lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
tests/rsa_tests.rs
NikVolf/ring
0c0f7c47112f9ff3e1b8d8d4427d8246fcdc0794
#![forbid( anonymous_parameters, box_pointers, legacy_directory_ownership, missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts, unsafe_code, unstable_features, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, variant_size_differences, warnings, )] extern crate ring; extern crate untrusted; use ring::{der, error, signature, test}; #[cfg(feature = "rsa_signing")] use ring::rand; #[cfg(feature = "rsa_signing")] #[test] fn rsa_from_pkcs8_test() { test::from_file("tests/rsa_from_pkcs8_tests.txt", |section, test_case| { assert_eq!(section, ""); let input = test_case.consume_bytes("Input"); let input = untrusted::Input::from(&input); let error = test_case.consume_optional_string("Error"); assert_eq!(signature::RSAKeyPair::from_pkcs8(input).is_ok(), error.is_none()); Ok(()) }); } #[cfg(feature = "rsa_signing")] #[test] fn test_signature_rsa_pkcs1_sign() { let rng = rand::SystemRandom::new(); test::from_file("tests/rsa_pkcs1_sign_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA256" => &signature::RSA_PKCS1_SHA256, "SHA384" => &signature::RSA_PKCS1_SHA384, "SHA512" => &signature::RSA_PKCS1_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let private_key = test_case.consume_bytes("Key"); let msg = test_case.consume_bytes("Msg"); let expected = test_case.consume_bytes("Sig"); let result = test_case.consume_string("Result"); let private_key = untrusted::Input::from(&private_key); let key_pair = signature::RSAKeyPair::from_der(private_key); if result == "Fail-Invalid-Key" { assert!(key_pair.is_err()); return Ok(()); } let key_pair = key_pair.unwrap(); let key_pair = std::sync::Arc::new(key_pair); let mut signing_state = signature::RSASigningState::new(key_pair).unwrap(); let mut actual = vec![0u8; signing_state.key_pair().public_modulus_len()]; signing_state.sign(alg, &rng, &msg, actual.as_mut_slice()).unwrap(); assert_eq!(actual.as_slice() == &expected[..], result == "Pass"); Ok(()) }); } #[cfg(feature = "rsa_signing")] #[test] fn test_signature_rsa_pss_sign() { struct DeterministicSalt<'a> { salt: &'a [u8], rng: &'a rand::SecureRandom } impl<'a> rand::SecureRandom for DeterministicSalt<'a> { fn fill(&self, dest: &mut [u8]) -> Result<(), error::Unspecified> { let dest_len = dest.len(); if dest_len != self.salt.len() { self.rng.fill(dest)?; } else { dest.copy_from_slice(&self.salt); } Ok(()) } } let rng = rand::SystemRandom::new(); test::from_file("tests/rsa_pss_sign_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA256" => &signature::RSA_PSS_SHA256, "SHA384" => &signature::RSA_PSS_SHA384, "SHA512" => &signature::RSA_PSS_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let result = test_case.consume_string("Result"); let private_key = test_case.consume_bytes("Key"); let private_key = untrusted::Input::from(&private_key); let key_pair = signature::RSAKeyPair::from_der(private_key); if key_pair.is_err() && result == "Fail-Invalid-Key" { return Ok(()); } let key_pair = key_pair.unwrap(); let key_pair = std::sync::Arc::new(key_pair); let msg = test_case.consume_bytes("Msg"); let salt = test_case.consume_bytes("Salt"); let expected = test_case.consume_bytes("Sig"); let new_rng = DeterministicSalt { salt: &salt, rng: &rng }; let mut signing_state = signature::RSASigningState::new(key_pair).unwrap(); let mut actual = vec![0u8; signing_state.key_pair().public_modulus_len()]; signing_state.sign(alg, &new_rng, &msg, actual.as_mut_slice())?; assert_eq!(actual.as_slice() == &expected[..], result == "Pass"); Ok(()) }); } #[cfg(feature = "rsa_signing")] #[test] fn test_rsa_key_pair_traits() { test::compile_time_assert_send::<signature::RSAKeyPair>(); test::compile_time_assert_sync::<signature::RSAKeyPair>(); test::compile_time_assert_debug::<signature::RSAKeyPair>(); test::compile_time_assert_send::<signature::RSASigningState>(); } #[test] fn test_signature_rsa_pkcs1_verify() { test::from_file("tests/rsa_pkcs1_verify_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA1" => &signature::RSA_PKCS1_2048_8192_SHA1, "SHA256" => &signature::RSA_PKCS1_2048_8192_SHA256, "SHA384" => &signature::RSA_PKCS1_2048_8192_SHA384, "SHA512" => &signature::RSA_PKCS1_2048_8192_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let public_key = test_case.consume_bytes("Key"); let public_key = untrusted::Input::from(&public_key); assert!(public_key.read_all(error::Unspecified, |input| { der::nested(input, der::Tag::Sequence, error::Unspecified, |input| { let _ = der::positive_integer(input)?; let _ = der::positive_integer(input)?; Ok(()) }) }).is_ok()); let msg = test_case.consume_bytes("Msg"); let msg = untrusted::Input::from(&msg); let sig = test_case.consume_bytes("Sig"); let sig = untrusted::Input::from(&sig); let expected_result = test_case.consume_string("Result"); let actual_result = signature::verify(alg, public_key, msg, sig); assert_eq!(actual_result.is_ok(), expected_result == "P"); Ok(()) }); } #[test] fn test_signature_rsa_pss_verify() { test::from_file("tests/rsa_pss_verify_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA256" => &signature::RSA_PSS_2048_8192_SHA256, "SHA384" => &signature::RSA_PSS_2048_8192_SHA384, "SHA512" => &signature::RSA_PSS_2048_8192_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let public_key = test_case.consume_bytes("Key"); let public_key = untrusted::Input::from(&public_key); assert!(public_key.read_all(error::Unspecified, |input| { der::nested(input, der::Tag::Sequence, error::Unspecified, |input| { let _ = der::positive_integer(input)?; let _ = der::positive_integer(input)?; Ok(()) }) }).is_ok()); let msg = test_case.consume_bytes("Msg"); let msg = untrusted::Input::from(&msg); let sig = test_case.consume_bytes("Sig"); let sig = untrusted::Input::from(&sig); let expected_result = test_case.consume_string("Result"); let actual_result = signature::verify(alg, public_key, msg, sig); assert_eq!(actual_result.is_ok(), expected_result == "P"); Ok(()) }); } #[test] fn test_signature_rsa_primitive_verification() { test::from_file("tests/rsa_primitive_verify_tests.txt", |section, test_case| { assert_eq!(section, ""); let n = test_case.consume_bytes("n"); let e = test_case.consume_bytes("e"); let msg = test_case.consume_bytes("Msg"); let sig = test_case.consume_bytes("Sig"); let expected = test_case.consume_string("Result"); let result = signature::primitive::verify_rsa( &signature::RSA_PKCS1_2048_8192_SHA256, (untrusted::Input::from(&n), untrusted::Input::from(&e)), untrusted::Input::from(&msg), untrusted::Input::from(&sig)); assert_eq!(result.is_ok(), expected == "Pass"); Ok(()) }) }
#![forbid( anonymous_parameters, box_pointers, legacy_directory_ownership, missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts, unsafe_code, unstable_features, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, variant_size_differences, warnings, )] extern crate ring; extern crate untrusted; use ring::{der, error, signature, test}; #[cfg(feature = "rsa_signing")] use ring::rand; #[cfg(feature = "rsa_signing")] #[test] fn rsa_from_pkcs8_test() { test::from_file("tests/rsa_from_pkcs8_tests.txt", |section, test_case| { assert_eq!(section, ""); let input = test_case.consume_bytes("Input"); let input = untrusted::Input::from(&input); let error = test_case.consume_optional_string("Error"); assert_eq!(signature::RSAKeyPair::from_pkcs8(input).is_ok(), error.is_none()); Ok(()) }); } #[cfg(feature = "rsa_signing")] #[test] fn test_signature_rsa_pkcs1_sign() { let rng = rand::SystemRandom::new(); test::from_file("tests/rsa_pkcs1_sign_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA256" => &signature::RSA_PKCS1_SHA256, "SHA384" => &signature::RSA_PKCS1_SHA384, "SHA512" => &signature::RSA_PKCS1_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let private_key = test_case.consume_bytes("Key"); let msg = test_case.consume_bytes("Msg"); let expected = test_case.consume_bytes("Sig"); let result = test_case.consume_string("Result"); let private_key = untrusted::Input::from(&private_key); let key_pair = signature::RSAKeyPair::from_der(private_key); if result == "Fail-Invalid-Key" { assert!(key_pair.is_err()); return Ok(()); } let key_pair = key_pair.unwrap(); let key_pair = std::sync::Arc::new(key_pair); let mut signing_state = signature::RSASigningState::new(key_pair).unwrap(); let mut actual = vec![0u8; signing_state.key_pair().public_modulus_len()]; signing_state.sign(alg, &rng, &msg, actual.as_mut_slice()).unwrap(); assert_eq!(actual.as_slice() == &expected[..], result == "Pass"); Ok(()) }); } #[cfg(feature = "rsa_signing")] #[test] fn test_signature_rsa_pss_sign() { struct DeterministicSalt<'a> { salt: &'a [u8], rng: &'a rand::SecureRandom } impl<'a> rand::SecureRandom for DeterministicSalt<'a> { fn fill(&self, dest: &mut [u8]) -> Result<(), error::Unspecified> { let dest_len = dest.len(); if dest_len != self.salt.len() { self.rng.fill(dest)?; } else { dest.copy_from_slice(&self.salt); } Ok(()) } } let rng = rand::SystemRandom::new(); test::from_file("tests/rsa_pss_sign_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest");
let result = test_case.consume_string("Result"); let private_key = test_case.consume_bytes("Key"); let private_key = untrusted::Input::from(&private_key); let key_pair = signature::RSAKeyPair::from_der(private_key); if key_pair.is_err() && result == "Fail-Invalid-Key" { return Ok(()); } let key_pair = key_pair.unwrap(); let key_pair = std::sync::Arc::new(key_pair); let msg = test_case.consume_bytes("Msg"); let salt = test_case.consume_bytes("Salt"); let expected = test_case.consume_bytes("Sig"); let new_rng = DeterministicSalt { salt: &salt, rng: &rng }; let mut signing_state = signature::RSASigningState::new(key_pair).unwrap(); let mut actual = vec![0u8; signing_state.key_pair().public_modulus_len()]; signing_state.sign(alg, &new_rng, &msg, actual.as_mut_slice())?; assert_eq!(actual.as_slice() == &expected[..], result == "Pass"); Ok(()) }); } #[cfg(feature = "rsa_signing")] #[test] fn test_rsa_key_pair_traits() { test::compile_time_assert_send::<signature::RSAKeyPair>(); test::compile_time_assert_sync::<signature::RSAKeyPair>(); test::compile_time_assert_debug::<signature::RSAKeyPair>(); test::compile_time_assert_send::<signature::RSASigningState>(); } #[test] fn test_signature_rsa_pkcs1_verify() { test::from_file("tests/rsa_pkcs1_verify_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA1" => &signature::RSA_PKCS1_2048_8192_SHA1, "SHA256" => &signature::RSA_PKCS1_2048_8192_SHA256, "SHA384" => &signature::RSA_PKCS1_2048_8192_SHA384, "SHA512" => &signature::RSA_PKCS1_2048_8192_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let public_key = test_case.consume_bytes("Key"); let public_key = untrusted::Input::from(&public_key); assert!(public_key.read_all(error::Unspecified, |input| { der::nested(input, der::Tag::Sequence, error::Unspecified, |input| { let _ = der::positive_integer(input)?; let _ = der::positive_integer(input)?; Ok(()) }) }).is_ok()); let msg = test_case.consume_bytes("Msg"); let msg = untrusted::Input::from(&msg); let sig = test_case.consume_bytes("Sig"); let sig = untrusted::Input::from(&sig); let expected_result = test_case.consume_string("Result"); let actual_result = signature::verify(alg, public_key, msg, sig); assert_eq!(actual_result.is_ok(), expected_result == "P"); Ok(()) }); } #[test] fn test_signature_rsa_pss_verify() { test::from_file("tests/rsa_pss_verify_tests.txt", |section, test_case| { assert_eq!(section, ""); let digest_name = test_case.consume_string("Digest"); let alg = match digest_name.as_ref() { "SHA256" => &signature::RSA_PSS_2048_8192_SHA256, "SHA384" => &signature::RSA_PSS_2048_8192_SHA384, "SHA512" => &signature::RSA_PSS_2048_8192_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } }; let public_key = test_case.consume_bytes("Key"); let public_key = untrusted::Input::from(&public_key); assert!(public_key.read_all(error::Unspecified, |input| { der::nested(input, der::Tag::Sequence, error::Unspecified, |input| { let _ = der::positive_integer(input)?; let _ = der::positive_integer(input)?; Ok(()) }) }).is_ok()); let msg = test_case.consume_bytes("Msg"); let msg = untrusted::Input::from(&msg); let sig = test_case.consume_bytes("Sig"); let sig = untrusted::Input::from(&sig); let expected_result = test_case.consume_string("Result"); let actual_result = signature::verify(alg, public_key, msg, sig); assert_eq!(actual_result.is_ok(), expected_result == "P"); Ok(()) }); } #[test] fn test_signature_rsa_primitive_verification() { test::from_file("tests/rsa_primitive_verify_tests.txt", |section, test_case| { assert_eq!(section, ""); let n = test_case.consume_bytes("n"); let e = test_case.consume_bytes("e"); let msg = test_case.consume_bytes("Msg"); let sig = test_case.consume_bytes("Sig"); let expected = test_case.consume_string("Result"); let result = signature::primitive::verify_rsa( &signature::RSA_PKCS1_2048_8192_SHA256, (untrusted::Input::from(&n), untrusted::Input::from(&e)), untrusted::Input::from(&msg), untrusted::Input::from(&sig)); assert_eq!(result.is_ok(), expected == "Pass"); Ok(()) }) }
let alg = match digest_name.as_ref() { "SHA256" => &signature::RSA_PSS_SHA256, "SHA384" => &signature::RSA_PSS_SHA384, "SHA512" => &signature::RSA_PSS_SHA512, _ => { panic!("Unsupported digest: {}", digest_name) } };
assignment_statement
[ { "content": "#[inline]\n\npub fn sign(key_pair: &KeyPair, rng: &rand::SecureRandom, msg: untrusted::Input)\n\n -> Result<Signature, error::Unspecified> {\n\n key_pair.inner.sign(rng, msg)\n\n}\n\n\n", "file_path": "src/signature.rs", "rank": 0, "score": 364956.10236816294 }, { "content": "// Mask-generating function MGF1 as described in\n\n// https://tools.ietf.org/html/rfc3447#appendix-B.2.1.\n\nfn mgf1(digest_alg: &'static digest::Algorithm, seed: &[u8], mask: &mut [u8])\n\n -> Result<(), error::Unspecified> {\n\n let digest_len = digest_alg.output_len;\n\n\n\n // Maximum counter value is the value of (mask_len / digest_len) rounded up.\n\n let ctr_max = (mask.len() - 1) / digest_len;\n\n assert!(ctr_max <= u32::max_value() as usize);\n\n for (i, mask_chunk) in mask.chunks_mut(digest_len).enumerate() {\n\n let mut ctx = digest::Context::new(digest_alg);\n\n ctx.update(seed);\n\n ctx.update(&polyfill::slice::be_u8_from_u32(i as u32));\n\n let digest = ctx.finish();\n\n let mask_chunk_len = mask_chunk.len();\n\n mask_chunk.copy_from_slice(&digest.as_ref()[..mask_chunk_len]);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rsa/padding.rs", "rank": 1, "score": 340885.6572794318 }, { "content": "fn eddsa_digest(signature_r: &[u8], public_key: &[u8], msg: &[u8])\n\n -> digest::Digest {\n\n let mut ctx = digest::Context::new(&digest::SHA512);\n\n ctx.update(signature_r);\n\n ctx.update(public_key);\n\n ctx.update(msg);\n\n ctx.finish()\n\n}\n\n\n", "file_path": "src/ec/curve25519/ed25519.rs", "rank": 2, "score": 336908.686226866 }, { "content": "fn nonnegative_integer<'a>(input: &mut untrusted::Reader<'a>, min_value: u8)\n\n -> Result<untrusted::Input<'a>, error::Unspecified> {\n\n // Verify that |input|, which has had any leading zero stripped off, is the\n\n // encoding of a value of at least |min_value|.\n\n fn check_minimum(input: untrusted::Input, min_value: u8)\n\n -> Result<(), error::Unspecified> {\n\n input.read_all(error::Unspecified, |input| {\n\n let first_byte = input.read_byte()?;\n\n if input.at_end() && first_byte < min_value {\n\n return Err(error::Unspecified);\n\n }\n\n let _ = input.skip_to_end();\n\n Ok(())\n\n })\n\n }\n\n\n\n let value = expect_tag_and_get_value(input, Tag::Integer)?;\n\n\n\n value.read_all(error::Unspecified, |input| {\n\n // Empty encodings are not allowed.\n", "file_path": "src/der.rs", "rank": 3, "score": 321440.3274845469 }, { "content": "#[inline]\n\npub fn key_pair_from_pkcs8(alg: &'static SigningAlgorithm, input: untrusted::Input)\n\n -> Result<KeyPair, error::Unspecified>\n\n{\n\n alg.from_pkcs8(input)\n\n}\n\n\n\n/// Returns a signature of the given data using the given key. The signing may or may\n\n/// not use `rng`, depending on the `key_pair's algorithm.\n", "file_path": "src/signature.rs", "rank": 4, "score": 320275.56293690996 }, { "content": "/// Verify the signature `signature` of message `msg` with the public key\n\n/// `public_key` using the algorithm `alg`.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Verify a RSA PKCS#1 signature that uses the SHA-256 digest\n\n///\n\n/// ```\n\n/// extern crate ring;\n\n/// extern crate untrusted;\n\n///\n\n/// use ring::signature;\n\n///\n\n/// enum Error {\n\n/// InvalidSignature,\n\n/// }\n\n///\n\n/// # #[cfg(feature = \"use_heap\")]\n\n/// fn verify_rsa_pkcs1_sha256(public_key: untrusted::Input,\n\n/// msg: untrusted::Input, sig: untrusted::Input)\n\n/// -> Result<(), Error> {\n\n/// signature::verify(&signature::RSA_PKCS1_2048_8192_SHA256, public_key,\n\n/// msg, sig).map_err(|_| Error::InvalidSignature)\n\n/// }\n\n/// # fn main() { }\n\n/// ```\n\npub fn verify(alg: &VerificationAlgorithm, public_key: untrusted::Input,\n\n msg: untrusted::Input, signature: untrusted::Input)\n\n -> Result<(), error::Unspecified> {\n\n init::init_once();\n\n alg.verify(public_key, msg, signature)\n\n}\n", "file_path": "src/signature.rs", "rank": 5, "score": 309358.99000600557 }, { "content": "pub fn expect_tag_and_get_value<'a>(input: &mut untrusted::Reader<'a>,\n\n tag: Tag)\n\n -> Result<untrusted::Input<'a>,\n\n error::Unspecified> {\n\n let (actual_tag, inner) = read_tag_and_get_value(input)?;\n\n if (tag as usize) != (actual_tag as usize) {\n\n return Err(error::Unspecified);\n\n }\n\n Ok(inner)\n\n}\n\n\n", "file_path": "src/der.rs", "rank": 6, "score": 304832.43423657847 }, { "content": "fn from_hex_digit(d: u8) -> Result<u8, String> {\n\n if d >= b'0' && d <= b'9' {\n\n Ok(d - b'0')\n\n } else if d >= b'a' && d <= b'f' {\n\n Ok(d - b'a' + 10u8)\n\n } else if d >= b'A' && d <= b'F' {\n\n Ok(d - b'A' + 10u8)\n\n } else {\n\n Err(format!(\"Invalid hex digit '{}'\", d as char))\n\n }\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 7, "score": 300287.2532796794 }, { "content": "// Implement padding procedure per EMSA-PKCS1-v1_5,\n\n// https://tools.ietf.org/html/rfc3447#section-9.2. This is used by both\n\n// verification and signing so it needs to be able to handle moduli of the\n\n// minimum and maximum sizes for both operations.\n\nfn pkcs1_encode(pkcs1: &PKCS1, m_hash: &digest::Digest, m_out: &mut [u8]) {\n\n let em = m_out;\n\n\n\n let digest_len =\n\n pkcs1.digestinfo_prefix.len() + pkcs1.digest_alg.output_len;\n\n\n\n // The specification requires at least 8 bytes of padding. Since we\n\n // disallow keys smaller than 2048 bits, this should always be true.\n\n assert!(em.len() >= digest_len + 11);\n\n let pad_len = em.len() - digest_len - 3;\n\n em[0] = 0;\n\n em[1] = 1;\n\n for i in 0..pad_len {\n\n em[2 + i] = 0xff;\n\n }\n\n em[2 + pad_len] = 0;\n\n\n\n let (digest_prefix, digest_dst) = em[3 + pad_len..]\n\n .split_at_mut(pkcs1.digestinfo_prefix.len());\n\n digest_prefix.copy_from_slice(pkcs1.digestinfo_prefix);\n", "file_path": "src/rsa/padding.rs", "rank": 8, "score": 293403.3142981216 }, { "content": "pub fn sign(key: Key, msg: &[u8], tag: &mut Tag) {\n\n let mut ctx = SigningContext::from_key(key);\n\n ctx.update(msg);\n\n ctx.sign(tag)\n\n}\n\n\n", "file_path": "src/poly1305.rs", "rank": 9, "score": 273404.7956438996 }, { "content": "fn hmac_test_case_inner(digest_alg: &'static digest::Algorithm,\n\n key_value: &[u8], input: &[u8], output: &[u8],\n\n is_ok: bool) -> Result<(), error::Unspecified> {\n\n\n\n let s_key = hmac::SigningKey::new(digest_alg, key_value);\n\n let v_key = hmac::VerificationKey::new(digest_alg, key_value);\n\n\n\n // One-shot API.\n\n {\n\n let signature = hmac::sign(&s_key, input);\n\n assert_eq!(is_ok, signature.as_ref() == output);\n\n assert_eq!(is_ok, hmac::verify(&v_key, input, output).is_ok());\n\n }\n\n\n\n // Multi-part API, one single part.\n\n {\n\n let mut s_ctx = hmac::SigningContext::with_key(&s_key);\n\n s_ctx.update(input);\n\n let signature = s_ctx.sign();\n\n assert_eq!(is_ok, signature.as_ref() == output);\n", "file_path": "tests/hmac_tests.rs", "rank": 10, "score": 272600.51373965415 }, { "content": "#[inline]\n\npub fn small_nonnegative_integer(input: &mut untrusted::Reader)\n\n -> Result<u8, error::Unspecified> {\n\n let value = nonnegative_integer(input, 0)?;\n\n value.read_all(error::Unspecified, |input| {\n\n let r = input.read_byte()?;\n\n Ok(r)\n\n })\n\n}\n\n\n\n/// Parses a positive DER integer, returning the big-endian-encoded value, sans\n\n/// any leading zero byte.\n", "file_path": "src/der.rs", "rank": 11, "score": 269667.2437194438 }, { "content": "fn parse_test_case(current_section: &mut String, lines: &mut FileLines)\n\n -> Option<TestCase> {\n\n let mut attributes = Vec::new();\n\n\n\n let mut is_first_line = true;\n\n loop {\n\n let line = match lines.next() {\n\n None => None,\n\n Some(result) => Some(result.unwrap()),\n\n };\n\n\n\n if cfg!(feature = \"test_logging\") {\n\n if let Some(text) = &line {\n\n println!(\"Line: {}\", text);\n\n }\n\n }\n\n\n\n match line {\n\n // If we get to EOF when we're not in the middle of a test case,\n\n // then we're done.\n", "file_path": "src/test.rs", "rank": 12, "score": 267555.8335639606 }, { "content": "#[inline]\n\npub fn positive_integer<'a>(input: &mut untrusted::Reader<'a>)\n\n -> Result<untrusted::Input<'a>, error::Unspecified> {\n\n nonnegative_integer(input, 1)\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use error;\n\n use super::*;\n\n use untrusted;\n\n\n\n fn with_good_i<F, R>(value: &[u8], f: F)\n\n where F: FnOnce(&mut untrusted::Reader)\n\n -> Result<R, error::Unspecified> {\n\n let r = untrusted::Input::from(value).read_all(error::Unspecified, f);\n\n assert!(r.is_ok());\n\n }\n\n\n\n fn with_bad_i<F, R>(value: &[u8], f: F)\n", "file_path": "src/der.rs", "rank": 13, "score": 264154.0501401555 }, { "content": "fn x25519_(private_key: &[u8], public_key: &[u8])\n\n -> Result<Vec<u8>, error::Unspecified> {\n\n let rng = test::rand::FixedSliceRandom { bytes: private_key };\n\n let private_key =\n\n agreement::EphemeralPrivateKey::generate(&agreement::X25519, &rng)?;\n\n let public_key = untrusted::Input::from(public_key);\n\n agreement::agree_ephemeral(private_key, &agreement::X25519, public_key,\n\n error::Unspecified, |agreed_value| {\n\n Ok(Vec::from(agreed_value))\n\n })\n\n}\n\n\n", "file_path": "tests/agreement_tests.rs", "rank": 14, "score": 262198.1253910661 }, { "content": "fn x25519(private_key: &[u8], public_key: &[u8]) -> Vec<u8> {\n\n x25519_(private_key, public_key).unwrap()\n\n}\n\n\n", "file_path": "tests/agreement_tests.rs", "rank": 15, "score": 259139.0570216378 }, { "content": "pub fn bit_string_with_no_unused_bits<'a>(input: &mut untrusted::Reader<'a>)\n\n -> Result<untrusted::Input<'a>, error::Unspecified> {\n\n nested(input, Tag::BitString, error::Unspecified, |value| {\n\n let unused_bits_at_end =\n\n value.read_byte().map_err(|_| error::Unspecified)?;\n\n if unused_bits_at_end != 0 {\n\n return Err(error::Unspecified);\n\n }\n\n Ok(value.skip_to_end())\n\n })\n\n}\n\n\n", "file_path": "src/der.rs", "rank": 16, "score": 256719.63079153007 }, { "content": "pub fn read_tag_and_get_value<'a>(input: &mut untrusted::Reader<'a>)\n\n -> Result<(u8, untrusted::Input<'a>),\n\n error::Unspecified> {\n\n let tag = input.read_byte()?;\n\n if (tag & 0x1F) == 0x1F {\n\n return Err(error::Unspecified); // High tag number form is not allowed.\n\n }\n\n\n\n // If the high order bit of the first byte is set to zero then the length\n\n // is encoded in the seven remaining bits of that byte. Otherwise, those\n\n // seven bits represent the number of bytes used to encode the length.\n\n let length = match input.read_byte()? {\n\n n if (n & 0x80) == 0 => n as usize,\n\n 0x81 => {\n\n let second_byte = input.read_byte()?;\n\n if second_byte < 128 {\n\n return Err(error::Unspecified); // Not the canonical encoding.\n\n }\n\n second_byte as usize\n\n },\n", "file_path": "src/der.rs", "rank": 17, "score": 256719.63079153007 }, { "content": "/// Decode an string of hex digits into a sequence of bytes. The input must\n\n/// have an even number of digits.\n\npub fn from_hex(hex_str: &str) -> Result<Vec<u8>, String> {\n\n if hex_str.len() % 2 != 0 {\n\n return Err(\n\n String::from(\"Hex string does not have an even number of digits\"));\n\n }\n\n\n\n let mut result = Vec::with_capacity(hex_str.len() / 2);\n\n for digits in hex_str.as_bytes().chunks(2) {\n\n let hi = from_hex_digit(digits[0])?;\n\n let lo = from_hex_digit(digits[1])?;\n\n result.push((hi * 0x10) | lo);\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 18, "score": 256458.32421999535 }, { "content": "fn sha256_format_output(input: &State) -> Output {\n\n let input = &polyfill::slice::u64_as_u32(input)[..8];\n\n [u32x2!(input[0].to_be(), input[1].to_be()),\n\n u32x2!(input[2].to_be(), input[3].to_be()),\n\n u32x2!(input[4].to_be(), input[5].to_be()),\n\n u32x2!(input[6].to_be(), input[7].to_be()),\n\n 0,\n\n 0,\n\n 0,\n\n 0]\n\n}\n\n\n", "file_path": "src/digest/mod.rs", "rank": 19, "score": 256423.48195840378 }, { "content": "fn sha512_format_output(input: &State) -> Output {\n\n [input[0].to_be(),\n\n input[1].to_be(),\n\n input[2].to_be(),\n\n input[3].to_be(),\n\n input[4].to_be(),\n\n input[5].to_be(),\n\n input[6].to_be(),\n\n input[7].to_be()]\n\n}\n\n\n\n/// The length of the output of SHA-1, in bytes.\n\npub const SHA1_OUTPUT_LEN: usize = sha1::OUTPUT_LEN;\n\n\n\n/// The length of the output of SHA-256, in bytes.\n\npub const SHA256_OUTPUT_LEN: usize = 256 / 8;\n\n\n\n/// The length of the output of SHA-384, in bytes.\n\npub const SHA384_OUTPUT_LEN: usize = 384 / 8;\n\n\n", "file_path": "src/digest/mod.rs", "rank": 20, "score": 256272.76881707538 }, { "content": "fn block_data_order_safe(state: &mut State, blocks: &[[u8; BLOCK_LEN]]) {\n\n let state = polyfill::slice::u64_as_u32_mut(state);\n\n let state = polyfill::slice::as_wrapping_mut(state);\n\n let state = &mut state[..CHAINING_WORDS];\n\n let state = slice_as_array_ref_mut!(state, CHAINING_WORDS).unwrap();\n\n\n\n let mut w: [W32; 80] = [Wrapping(0); 80];\n\n for block in blocks {\n\n for t in 0..16 {\n\n let word = slice_as_array_ref!(&block[t * 4..][..4], 4).unwrap();\n\n w[t] = Wrapping(polyfill::slice::u32_from_be_u8(word))\n\n }\n\n for t in 16..80 {\n\n let wt = w[t - 3] ^ w[t - 8] ^ w[t - 14] ^ w[t - 16];\n\n w[t] = polyfill::wrapping_rotate_left_u32(wt, 1);\n\n }\n\n\n\n let mut a = state[0];\n\n let mut b = state[1];\n\n let mut c = state[2];\n", "file_path": "src/digest/sha1.rs", "rank": 21, "score": 250573.57286705583 }, { "content": "/// Returns the digest of `data` using the given digest algorithm.\n\n///\n\n/// C analog: `EVP_Digest`\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// # #[cfg(feature = \"use_heap\")]\n\n/// # fn main() {\n\n/// use ring::{digest, test};\n\n///\n\n/// let expected_hex =\n\n/// \"09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b\";\n\n/// let expected: Vec<u8> = test::from_hex(expected_hex).unwrap();\n\n/// let actual = digest::digest(&digest::SHA256, b\"hello, world\");\n\n///\n\n/// assert_eq!(&expected, &actual.as_ref());\n\n/// # }\n\n///\n\n/// # #[cfg(not(feature = \"use_heap\"))]\n\n/// # fn main() { }\n\n/// ```\n\npub fn digest(algorithm: &'static Algorithm, data: &[u8]) -> Digest {\n\n let mut ctx = Context::new(algorithm);\n\n ctx.update(data);\n\n ctx.finish()\n\n}\n\n\n\n/// A calculated digest value.\n\n///\n\n/// Use `as_ref` to get the value as a `&[u8]`.\n\n#[derive(Clone, Copy)]\n\npub struct Digest {\n\n value: Output,\n\n algorithm: &'static Algorithm,\n\n}\n\n\n\nimpl Digest {\n\n /// The algorithm that was used to calculate the digest value.\n\n #[inline(always)]\n\n pub fn algorithm(&self) -> &'static Algorithm { self.algorithm }\n\n}\n", "file_path": "src/digest/mod.rs", "rank": 24, "score": 237970.18574038032 }, { "content": "fn aes_gcm_init(ctx_buf: &mut [u8], key: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n bssl::map_result(unsafe {\n\n GFp_aes_gcm_init(ctx_buf.as_mut_ptr(), ctx_buf.len(), key.as_ptr(),\n\n key.len())\n\n })\n\n}\n\n\n", "file_path": "src/aead/aes_gcm.rs", "rank": 25, "score": 236637.58400782762 }, { "content": "fn x25519_public_from_private(public_out: &mut [u8],\n\n private_key: &ec::PrivateKey)\n\n -> Result<(), error::Unspecified> {\n\n let public_out = slice_as_array_ref_mut!(public_out, PUBLIC_KEY_LEN)?;\n\n\n\n // XXX: This shouldn't require dynamic checks, but rustc can't slice an\n\n // array reference to a shorter array reference. TODO(perf): Fix this.\n\n let private_key =\n\n slice_as_array_ref!(&private_key.bytes[..PRIVATE_KEY_LEN],\n\n PRIVATE_KEY_LEN)?;\n\n unsafe {\n\n GFp_x25519_public_from_private(public_out, private_key);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ec/curve25519/x25519.rs", "rank": 26, "score": 232797.5113724877 }, { "content": "/// Calculate the digest of `msg` using the digest algorithm `digest_alg`. Then\n\n/// convert the digest to a scalar in the range [0, n) as described in\n\n/// NIST's FIPS 186-4 Section 4.2. Note that this is one of the few cases where\n\n/// a `Scalar` is allowed to have the value zero.\n\n///\n\n/// NIST's FIPS 186-4 4.2 says \"When the length of the output of the hash\n\n/// function is greater than N (i.e., the bit length of q), then the leftmost N\n\n/// bits of the hash function output block shall be used in any calculation\n\n/// using the hash function output during the generation or verification of a\n\n/// digital signature.\"\n\n///\n\n/// \"Leftmost N bits\" means \"N most significant bits\" because we interpret the\n\n/// digest as a bit-endian encoded integer.\n\n///\n\n/// The NSA guide instead vaguely suggests that we should convert the digest\n\n/// value to an integer and then reduce it mod `n`. However, real-world\n\n/// implementations (e.g. `digest_to_bn` in OpenSSL and `hashToInt` in Go) do\n\n/// what FIPS 186-4 says to do, not what the NSA guide suggests.\n\n///\n\n/// Why shifting the value right by at most one bit is sufficient: P-256's `n`\n\n/// has its 256th bit set; i.e. 2**255 < n < 2**256. Once we've truncated the\n\n/// digest to 256 bits and converted it to an integer, it will have a value\n\n/// less than 2**256. If the value is larger than `n` then shifting it one bit\n\n/// right will give a value less than 2**255, which is less than `n`. The\n\n/// analogous argument applies for P-384. However, it does *not* apply in\n\n/// general; for example, it doesn't apply to P-521.\n\npub fn digest_scalar(ops: &ScalarOps, msg: &digest::Digest) -> Scalar {\n\n digest_scalar_(ops, msg.as_ref())\n\n}\n\n\n", "file_path": "src/ec/suite_b/ecdsa/digest_scalar.rs", "rank": 27, "score": 231123.28407719126 }, { "content": "fn parse_public_key(input: untrusted::Input)\n\n -> Result<(untrusted::Input, untrusted::Input),\n\n error::Unspecified> {\n\n input.read_all(error::Unspecified, |input| {\n\n der::nested(input, der::Tag::Sequence, error::Unspecified, |input| {\n\n let n = der::positive_integer(input)?;\n\n let e = der::positive_integer(input)?;\n\n Ok((n, e))\n\n })\n\n })\n\n}\n\n\n\n// Type-level representation of an RSA public modulus *n*. See\n\n// `super::bigint`'s modulue-level documentation.\n\n#[derive(Copy, Clone)]\n\npub enum N {}\n\n\n\npub mod verification;\n\n\n\n#[cfg(feature = \"rsa_signing\")]\n\npub mod signing;\n\n\n\nmod bigint;\n", "file_path": "src/rsa/mod.rs", "rank": 28, "score": 230214.25390665486 }, { "content": "// This is a separate function solely so that we can test specific digest\n\n// values like all-zero values and values larger than `n`.\n\nfn digest_scalar_(ops: &ScalarOps, digest: &[u8]) -> Scalar {\n\n let cops = ops.common;\n\n let num_limbs = cops.num_limbs;\n\n let digest = if digest.len() > num_limbs * LIMB_BYTES {\n\n &digest[..(num_limbs * LIMB_BYTES)]\n\n } else {\n\n digest\n\n };\n\n\n\n scalar_parse_big_endian_partially_reduced_variable_consttime(\n\n cops, AllowZero::Yes, untrusted::Input::from(digest)).unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {digest, test};\n\n use super::digest_scalar_;\n\n use ec::suite_b::ops::*;\n\n use untrusted;\n\n\n", "file_path": "src/ec/suite_b/ecdsa/digest_scalar.rs", "rank": 29, "score": 229487.3776521394 }, { "content": "/// Copies |key| into |ctx_buf|.\n\npub fn chacha20_poly1305_init(ctx_buf: &mut [u8], key: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n ctx_buf[..key.len()].copy_from_slice(key);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/aead/chacha20_poly1305.rs", "rank": 30, "score": 229271.93661589827 }, { "content": "// TODO: investigate taking decoder as a reference to reduce generated code\n\n// size.\n\npub fn nested<'a, F, R, E: Copy>(input: &mut untrusted::Reader<'a>, tag: Tag,\n\n error: E, decoder: F) -> Result<R, E>\n\n where F : FnOnce(&mut untrusted::Reader<'a>)\n\n -> Result<R, E> {\n\n let inner = expect_tag_and_get_value(input, tag).map_err(|_| error)?;\n\n inner.read_all(error, decoder)\n\n}\n\n\n", "file_path": "src/der.rs", "rank": 31, "score": 228836.24500105588 }, { "content": "pub fn map_result(bssl_result: c::int) -> Result<(), error::Unspecified> {\n\n match bssl_result {\n\n 1 => Ok(()),\n\n _ => Err(error::Unspecified),\n\n }\n\n}\n\n\n\n// Adapt a BoringSSL test suite to a Rust test.\n\n//\n\n// The BoringSSL test suite is broken up into multiple files. Originally, they\n\n// were all executables with their own `main` functions. Those main functions\n\n// have been replaced with uniquely-named functions so that they can all be\n\n// linked into the same executable.\n\n#[cfg(test)]\n\nmacro_rules! bssl_test {\n\n ( $fn_name:ident, $bssl_test_main_fn_name:ident ) => {\n\n #[test]\n\n fn $fn_name() {\n\n use $crate::{c, init};\n\n extern {\n", "file_path": "src/bssl.rs", "rank": 32, "score": 227414.85395882098 }, { "content": "fn pss_digest(digest_alg: &'static digest::Algorithm, m_hash: &digest::Digest,\n\n salt: &[u8]) -> digest::Digest {\n\n // Fixed prefix.\n\n const PREFIX_ZEROS: [u8; 8] = [0u8; 8];\n\n\n\n // Encoding step 5 and 6, Verification step 12 and 13.\n\n let mut ctx = digest::Context::new(digest_alg);\n\n ctx.update(&PREFIX_ZEROS);\n\n ctx.update(m_hash.as_ref());\n\n ctx.update(salt);\n\n ctx.finish()\n\n}\n\n\n\nmacro_rules! rsa_pss_padding {\n\n ( $PADDING_ALGORITHM:ident, $digest_alg:expr, $doc_str:expr ) => {\n\n #[doc=$doc_str]\n\n /// Feature: `rsa_signing`.\n\n pub static $PADDING_ALGORITHM: PSS = PSS {\n\n digest_alg: $digest_alg,\n\n };\n", "file_path": "src/rsa/padding.rs", "rank": 33, "score": 224058.55053145663 }, { "content": "#[test]\n\nfn signature_impl_test() {\n\n test::compile_time_assert_debug::<signature::KeyPair>();\n\n test::compile_time_assert_send::<signature::KeyPair>();\n\n\n\n test::compile_time_assert_clone::<signature::Signature>();\n\n test::compile_time_assert_copy::<signature::Signature>();\n\n test::compile_time_assert_send::<signature::Signature>();\n\n test::compile_time_assert_sync::<signature::Signature>();\n\n}\n", "file_path": "tests/signature_tests.rs", "rank": 34, "score": 223695.15098756965 }, { "content": "#[test]\n\nfn digest_test_fmt() {\n\n assert_eq!(\"SHA1:b7e23ec29af22b0b4e41da31e868d57226121c84\",\n\n &format!(\"{:?}\",\n\n digest::digest(&digest::SHA1, b\"hello, world\")));\n\n assert_eq!(\"SHA256:09ca7e4eaa6e8ae9c7d261167129184883644d\\\n\n 07dfba7cbfbc4c8a2e08360d5b\",\n\n &format!(\"{:?}\",\n\n digest::digest(&digest::SHA256, b\"hello, world\")));\n\n assert_eq!(\"SHA384:1fcdb6059ce05172a26bbe2a3ccc88ed5a8cd5\\\n\n fc53edfd9053304d429296a6da23b1cd9e5c9ed3bb34f0\\\n\n 0418a70cdb7e\",\n\n &format!(\"{:?}\",\n\n digest::digest(&digest::SHA384, b\"hello, world\")));\n\n assert_eq!(\"SHA512:8710339dcb6814d0d9d2290ef422285c9322b7\\\n\n 163951f9a0ca8f883d3305286f44139aa374848e4174f5\\\n\n aada663027e4548637b6d19894aec4fb6c46a139fbf9\",\n\n &format!(\"{:?}\",\n\n digest::digest(&digest::SHA512, b\"hello, world\")));\n\n\n\n assert_eq!(\"SHA512_256:11f2c88c04f0a9c3d0970894ad2472505e\\\n\n 0bc6e8c7ec46b5211cd1fa3e253e62\",\n\n &format!(\"{:?}\",\n\n digest::digest(&digest::SHA512_256, b\"hello, world\")));\n\n}\n", "file_path": "tests/digest_tests.rs", "rank": 35, "score": 223639.40132562985 }, { "content": "fn h(s: &str) -> Vec<u8> {\n\n match test::from_hex(s) {\n\n Ok(v) => v,\n\n Err(msg) => {\n\n panic!(\"{} in {}\", msg, s);\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/agreement_tests.rs", "rank": 36, "score": 222468.1566049351 }, { "content": "/// Fills `out` with the output of the HKDF-Expand operation for the given\n\n/// inputs.\n\n///\n\n/// `prk` should be the return value of an earlier call to `extract`.\n\n///\n\n/// | Parameter | RFC 5869 Term\n\n/// |------------|--------------\n\n/// | prk | PRK\n\n/// | info | info\n\n/// | out | OKM (Output Keying Material)\n\n/// | out.len() | L (Length of output keying material in bytes)\n\n///\n\n/// # Panics\n\n///\n\n/// `expand` panics if the requested output length is larger than 255 times the\n\n/// size of the digest algorithm, i.e. if\n\n/// `out.len() > 255 * salt.digest_algorithm().output_len`. This is the limit\n\n/// imposed by the HKDF specification, and is necessary to prevent overflow of\n\n/// the 8-bit iteration counter in the expansion step.\n\npub fn expand(prk: &hmac::SigningKey, info: &[u8], out: &mut [u8]) {\n\n let digest_alg = prk.digest_algorithm();\n\n assert!(out.len() <= 255 * digest_alg.output_len);\n\n assert!(digest_alg.block_len >= digest_alg.output_len);\n\n\n\n let mut ctx = hmac::SigningContext::with_key(prk);\n\n\n\n let mut n = 1u8;\n\n let mut pos = 0;\n\n loop {\n\n ctx.update(info);\n\n ctx.update(&[n]);\n\n\n\n let t = ctx.sign();\n\n\n\n // Append `t` to the output.\n\n let to_copy = if out.len() - pos < digest_alg.output_len {\n\n out.len() - pos\n\n } else {\n\n digest_alg.output_len\n", "file_path": "src/hkdf.rs", "rank": 37, "score": 221910.64723347698 }, { "content": "#[test]\n\nfn digest_misc() {\n\n test::from_file(\"tests/digest_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let digest_alg = test_case.consume_digest_alg(\"Hash\").unwrap();\n\n let input = test_case.consume_bytes(\"Input\");\n\n let repeat = test_case.consume_usize(\"Repeat\");\n\n let expected = test_case.consume_bytes(\"Output\");\n\n\n\n let mut ctx = digest::Context::new(digest_alg);\n\n let mut data = Vec::new();\n\n for _ in 0..repeat {\n\n ctx.update(&input);\n\n data.extend(&input);\n\n }\n\n let actual_from_chunks = ctx.finish();\n\n assert_eq!(&expected, &actual_from_chunks.as_ref());\n\n\n\n let actual_from_one_shot = digest::digest(digest_alg, &data);\n\n assert_eq!(&expected, &actual_from_one_shot.as_ref());\n\n\n", "file_path": "tests/digest_tests.rs", "rank": 38, "score": 221351.75200227136 }, { "content": "pub fn signature_from_bytes(bytes: &[u8]) -> Signature {\n\n let mut r = Signature {\n\n value: [0; MAX_LEN],\n\n len: bytes.len(),\n\n };\n\n r.value[..bytes.len()].copy_from_slice(bytes);\n\n r\n\n}\n\n\n\n/// The longest signature is an ASN.1 P-384 signature where *r* and *s* are of\n\n/// maximum length with the leading high bit set on each. Then each component\n\n/// will have a tag, a one-byte length, and a one-byte “I'm not negative”\n\n/// prefix, and the outer sequence will have a two-byte length.\n\npub const MAX_LEN: usize = 1/*tag:SEQUENCE*/ + 2/*len*/ +\n\n (2 * (1/*tag:INTEGER*/ + 1/*len*/ + 1/*zero*/ + ec::SCALAR_MAX_BYTES));\n", "file_path": "src/signature_impl.rs", "rank": 39, "score": 219774.83441767155 }, { "content": "pub fn big_endian_from_limbs(limbs: &[Limb], out: &mut [u8]) {\n\n let num_limbs = limbs.len();\n\n let out_len = out.len();\n\n assert_eq!(out_len, num_limbs * LIMB_BYTES);\n\n for i in 0..num_limbs {\n\n let mut limb = limbs[i];\n\n for j in 0..LIMB_BYTES {\n\n out[((num_limbs - i - 1) * LIMB_BYTES) + (LIMB_BYTES - j - 1)] =\n\n (limb & 0xff) as u8;\n\n limb >>= 8;\n\n }\n\n }\n\n}\n\n\n\nextern {\n\n #[cfg(feature = \"use_heap\")]\n\n fn LIMBS_are_even(a: *const Limb, num_limbs: c::size_t) -> LimbMask;\n\n fn LIMBS_are_zero(a: *const Limb, num_limbs: c::size_t) -> LimbMask;\n\n #[cfg(any(test, feature = \"rsa_signing\"))]\n\n fn LIMBS_equal_limb(a: *const Limb, b: Limb, num_limbs: c::size_t)\n", "file_path": "src/limb.rs", "rank": 40, "score": 215457.31933881587 }, { "content": "/// Fills `out` with the output of the HKDF Extract-and-Expand operation for\n\n/// the given inputs.\n\n///\n\n/// `extract_and_expand` is exactly equivalent to:\n\n///\n\n/// ```\n\n/// # use ring::{hkdf, hmac};\n\n/// # fn foo(salt: &hmac::SigningKey, secret: &[u8], info: &[u8],\n\n/// # out: &mut [u8]) {\n\n/// let prk = hkdf::extract(salt, secret);\n\n/// hkdf::expand(&prk, info, out)\n\n/// # }\n\n/// ```\n\n///\n\n/// See the documentation for `extract` and `expand` for details.\n\n///\n\n/// # Panics\n\n///\n\n/// `extract_and_expand` panics if `expand` panics.\n\npub fn extract_and_expand(salt: &hmac::SigningKey, secret: &[u8],\n\n info: &[u8], out: &mut [u8]) {\n\n let prk = extract(salt, secret);\n\n expand(&prk, info, out)\n\n}\n\n\n", "file_path": "src/hkdf.rs", "rank": 41, "score": 212953.53220988347 }, { "content": "#[test]\n\nfn test_signature_ed25519() {\n\n test::from_file(\"tests/ed25519_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let seed = test_case.consume_bytes(\"SEED\");\n\n assert_eq!(32, seed.len());\n\n let seed = untrusted::Input::from(&seed);\n\n\n\n let public_key = test_case.consume_bytes(\"PUB\");\n\n assert_eq!(32, public_key.len());\n\n let public_key = untrusted::Input::from(&public_key);\n\n\n\n let msg = test_case.consume_bytes(\"MESSAGE\");\n\n\n\n let expected_sig = test_case.consume_bytes(\"SIG\");\n\n\n\n {\n\n let key_pair = Ed25519KeyPair::from_seed_and_public_key(\n\n seed, public_key).unwrap();\n\n let actual_sig = key_pair.sign(&msg);\n\n assert_eq!(&expected_sig[..], actual_sig.as_ref());\n", "file_path": "tests/ed25519_tests.rs", "rank": 42, "score": 212172.1545474123 }, { "content": "#[test]\n\nfn test_fmt_algorithm() {\n\n assert_eq!(\"SHA1\", &format!(\"{:?}\", digest::SHA1));\n\n assert_eq!(\"SHA256\", &format!(\"{:?}\", digest::SHA256));\n\n assert_eq!(\"SHA384\", &format!(\"{:?}\", digest::SHA384));\n\n assert_eq!(\"SHA512\", &format!(\"{:?}\", digest::SHA512));\n\n assert_eq!(\"SHA512_256\", &format!(\"{:?}\", digest::SHA512_256));\n\n}\n\n\n", "file_path": "tests/digest_tests.rs", "rank": 43, "score": 212126.36297930678 }, { "content": "pub fn verify(key: Key, msg: &[u8], tag: &Tag)\n\n -> Result<(), error::Unspecified> {\n\n let mut calculated_tag = [0u8; TAG_LEN];\n\n sign(key, msg, &mut calculated_tag);\n\n constant_time::verify_slices_are_equal(&calculated_tag[..], tag)\n\n}\n\n\n", "file_path": "src/poly1305.rs", "rank": 44, "score": 211292.61429888633 }, { "content": "fn unwrap_pkcs8(version: pkcs8::Version, input: untrusted::Input)\n\n -> Result<(untrusted::Input, Option<untrusted::Input>),\n\n error::Unspecified> {\n\n let (private_key, public_key) =\n\n pkcs8::unwrap_key(&PKCS8_TEMPLATE, version, input)?;\n\n let private_key = private_key.read_all(error::Unspecified, |input| {\n\n der::expect_tag_and_get_value(input, der::Tag::OctetString)\n\n })?;\n\n Ok((private_key, public_key))\n\n}\n\n\n\n/// Verification of [Ed25519] signatures.\n\n///\n\n/// Ed25519 uses SHA-512 as the digest algorithm.\n\n///\n\n/// [Ed25519]: https://ed25519.cr.yp.to/\n\npub static ED25519: EdDSAParameters = EdDSAParameters {};\n\n\n\nimpl signature::VerificationAlgorithm for EdDSAParameters {\n\n fn verify(&self, public_key: untrusted::Input, msg: untrusted::Input,\n", "file_path": "src/ec/curve25519/ed25519.rs", "rank": 45, "score": 210959.70490826512 }, { "content": "#[inline(always)]\n\npub fn verify(key: &VerificationKey, data: &[u8], signature: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n verify_with_own_key(&key.wrapped, data, signature)\n\n}\n\n\n", "file_path": "src/hmac.rs", "rank": 46, "score": 210023.92944622962 }, { "content": "pub fn from_hex(hex_str: &str) -> Result<Vec<u8>, String> {\n\n if hex_str.len() % 2 != 0 {\n\n return Err(\n\n String::from(\"Hex string does not have an even number of digits\"));\n\n }\n\n\n\n fn from_hex_digit(d: u8) -> Result<u8, String> {\n\n if d >= b'0' && d <= b'9' {\n\n Ok(d - b'0')\n\n } else if d >= b'a' && d <= b'f' {\n\n Ok(d - b'a' + 10u8)\n\n } else if d >= b'A' && d <= b'F' {\n\n Ok(d - b'A' + 10u8)\n\n } else {\n\n Err(format!(\"Invalid hex digit '{}'\", d as char))\n\n }\n\n }\n\n\n\n let mut result = Vec::with_capacity(hex_str.len() / 2);\n\n for digits in hex_str.as_bytes().chunks(2) {\n\n let hi = from_hex_digit(digits[0])?;\n\n let lo = from_hex_digit(digits[1])?;\n\n result.push((hi * 0x10) | lo);\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "examples/checkdigest.rs", "rank": 47, "score": 208572.083532902 }, { "content": "/// Calculates the HMAC of `data` using the signing key `key`, and verifies\n\n/// whether the resultant value equals `signature`, in one step.\n\n///\n\n/// This is logically equivalent to, but more efficient than, constructing a\n\n/// `VerificationKey` with the same value as `key` and then using `verify`.\n\n///\n\n/// The verification will be done in constant time to prevent timing attacks.\n\n///\n\n/// C analog: `HMAC_Init` + `HMAC_Update` + `HMAC_Final` + `CRYPTO_memcmp`\n\npub fn verify_with_own_key(key: &SigningKey, data: &[u8], signature: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n constant_time::verify_slices_are_equal(sign(key, data).as_ref(), signature)\n\n}\n\n\n\n/// Returns the recommended key length for HMAC using the given digest\n\n/// algorithm.\n\n///\n\n/// The value returned is the chaining length of the digest function,\n\n/// `digest_alg.chaining_len`. This is 32 bytes (256 bits) for SHA-256, and\n\n/// 64 bytes (512 bits) for SHA-384 and SHA-512.\n\n///\n\n/// This recommendation is based on [NIST SP 800-107], Section 5.3.4: Security\n\n/// Effect of the HMAC Key. The chaining length of the digest algorithm,\n\n/// instead of its block length, is used to be consistent with the key lengths\n\n/// chosen for TLS for SHA-256 (see [RFC 5246, Appendix C]) and most other\n\n/// protocols.\n\n///\n\n/// [NIST SP 800-107]:\n\n/// http://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-107r1.pdf\n\n/// [RFC 5246, Appendix C]:\n\n/// https://tools.ietf.org/html/rfc5246#appendix-C\n", "file_path": "src/hmac.rs", "rank": 48, "score": 207579.5903801032 }, { "content": "fn x25519_ecdh(out: &mut [u8], my_private_key: &ec::PrivateKey,\n\n peer_public_key: untrusted::Input)\n\n -> Result<(), error::Unspecified> {\n\n let out = slice_as_array_ref_mut!(out, SHARED_SECRET_LEN)?;\n\n\n\n // XXX: This shouldn't require dynamic checks, but rustc can't slice an\n\n // array reference to a shorter array reference. TODO(perf): Fix this.\n\n let my_private_key =\n\n slice_as_array_ref!(&my_private_key.bytes[..PRIVATE_KEY_LEN],\n\n PRIVATE_KEY_LEN)?;\n\n let peer_public_key =\n\n slice_as_array_ref!(peer_public_key.as_slice_less_safe(),\n\n PUBLIC_KEY_LEN)?;\n\n\n\n unsafe {\n\n GFp_x25519_scalar_mult(out, my_private_key, peer_public_key);\n\n }\n\n\n\n let zeros: SharedSecret = [0; SHARED_SECRET_LEN];\n\n if constant_time::verify_slices_are_equal(out, &zeros).is_ok() {\n\n // All-zero output results when the input is a point of small order.\n\n return Err(error::Unspecified);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nconst ELEM_AND_SCALAR_LEN: usize = ops::ELEM_LEN;\n\n\n", "file_path": "src/ec/curve25519/x25519.rs", "rank": 49, "score": 207555.20045653154 }, { "content": "#[test]\n\nfn signature_ecdsa_sign_fixed_test() {\n\n test::from_file(\"tests/ecdsa_sign_fixed_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n\n\n let curve_name = test_case.consume_string(\"Curve\");\n\n let digest_name = test_case.consume_string(\"Digest\");\n\n\n\n let msg = test_case.consume_bytes(\"Msg\");\n\n let msg = untrusted::Input::from(&msg);\n\n\n\n let d = test_case.consume_bytes(\"d\");\n\n let d = untrusted::Input::from(&d);\n\n\n\n let q = test_case.consume_bytes(\"Q\");\n\n let q = untrusted::Input::from(&q);\n\n\n\n let k = test_case.consume_bytes(\"k\");\n\n\n\n let expected_result = test_case.consume_bytes(\"Sig\");\n\n\n", "file_path": "tests/ecdsa_tests.rs", "rank": 51, "score": 206775.9841431182 }, { "content": "#[test]\n\nfn signature_ecdsa_verify_fixed_test() {\n\n test::from_file(\"tests/ecdsa_verify_fixed_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n\n\n let curve_name = test_case.consume_string(\"Curve\");\n\n let digest_name = test_case.consume_string(\"Digest\");\n\n\n\n let msg = test_case.consume_bytes(\"Msg\");\n\n let msg = untrusted::Input::from(&msg);\n\n\n\n let public_key = test_case.consume_bytes(\"Q\");\n\n let public_key = untrusted::Input::from(&public_key);\n\n\n\n let sig = test_case.consume_bytes(\"Sig\");\n\n let sig = untrusted::Input::from(&sig);\n\n\n\n let expected_result = test_case.consume_string(\"Result\");\n\n\n\n let alg = match (curve_name.as_str(), digest_name.as_str()) {\n\n (\"P-256\", \"SHA256\") => &signature::ECDSA_P256_SHA256_FIXED,\n", "file_path": "tests/ecdsa_tests.rs", "rank": 52, "score": 206775.9841431182 }, { "content": "#[test]\n\nfn signature_ecdsa_sign_asn1_test() {\n\n test::from_file(\"tests/ecdsa_sign_asn1_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n\n\n let curve_name = test_case.consume_string(\"Curve\");\n\n let digest_name = test_case.consume_string(\"Digest\");\n\n\n\n let msg = test_case.consume_bytes(\"Msg\");\n\n let msg = untrusted::Input::from(&msg);\n\n\n\n let d = test_case.consume_bytes(\"d\");\n\n let d = untrusted::Input::from(&d);\n\n\n\n let q = test_case.consume_bytes(\"Q\");\n\n let q = untrusted::Input::from(&q);\n\n\n\n let k = test_case.consume_bytes(\"k\");\n\n\n\n let expected_result = test_case.consume_bytes(\"Sig\");\n\n\n", "file_path": "tests/ecdsa_tests.rs", "rank": 53, "score": 206775.9841431182 }, { "content": "#[test]\n\nfn signature_ecdsa_verify_asn1_test() {\n\n test::from_file(\"tests/ecdsa_verify_asn1_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n\n\n let curve_name = test_case.consume_string(\"Curve\");\n\n let digest_name = test_case.consume_string(\"Digest\");\n\n\n\n let msg = test_case.consume_bytes(\"Msg\");\n\n let msg = untrusted::Input::from(&msg);\n\n\n\n let public_key = test_case.consume_bytes(\"Q\");\n\n let public_key = untrusted::Input::from(&public_key);\n\n\n\n let sig = test_case.consume_bytes(\"Sig\");\n\n let sig = untrusted::Input::from(&sig);\n\n\n\n let expected_result = test_case.consume_string(\"Result\");\n\n\n\n let alg = match (curve_name.as_str(), digest_name.as_str()) {\n\n (\"P-256\", \"SHA256\") => &signature::ECDSA_P256_SHA256_ASN1,\n", "file_path": "tests/ecdsa_tests.rs", "rank": 56, "score": 206775.9841431182 }, { "content": "fn derive_block(secret: &hmac::SigningKey, iterations: u32, salt: &[u8],\n\n idx: u32, out: &mut [u8]) {\n\n let mut ctx = hmac::SigningContext::with_key(secret);\n\n ctx.update(salt);\n\n ctx.update(&polyfill::slice::be_u8_from_u32(idx));\n\n\n\n let mut u = ctx.sign();\n\n\n\n let mut remaining = iterations;\n\n loop {\n\n for i in 0..out.len() {\n\n out[i] ^= u.as_ref()[i];\n\n }\n\n\n\n if remaining == 1 {\n\n break;\n\n }\n\n remaining -= 1;\n\n\n\n u = hmac::sign(secret, u.as_ref());\n\n }\n\n}\n\n\n", "file_path": "src/pbkdf2.rs", "rank": 57, "score": 205945.31891305003 }, { "content": "#[inline]\n\npub fn recommended_key_len(digest_alg: &digest::Algorithm) -> usize {\n\n digest_alg.chaining_len\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {digest, hmac, rand, test};\n\n\n\n // Make sure that `SigningKey::generate` and `verify_with_own_key` aren't\n\n // completely wacky.\n\n #[test]\n\n pub fn hmac_signing_key_coverage() {\n\n let mut rng = rand::SystemRandom::new();\n\n\n\n const HELLO_WORLD_GOOD: &'static [u8] = b\"hello, world\";\n\n const HELLO_WORLD_BAD: &'static [u8] = b\"hello, worle\";\n\n\n\n for d in &digest::test_util::ALL_ALGORITHMS {\n\n {\n", "file_path": "src/hmac.rs", "rank": 58, "score": 205507.63259625767 }, { "content": "#[inline]\n\nfn poly1305_update_padded_16(ctx: &mut poly1305::SigningContext, data: &[u8]) {\n\n ctx.update(data);\n\n if data.len() % 16 != 0 {\n\n static PADDING: [u8; 16] = [0u8; 16];\n\n ctx.update(&PADDING[..PADDING.len() - (data.len() % 16)])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn max_input_len_test() {\n\n // Errata 4858 at https://www.rfc-editor.org/errata_search.php?rfc=7539.\n\n assert_eq!(super::CHACHA20_POLY1305.max_input_len, 274_877_906_880u64);\n\n }\n\n}\n", "file_path": "src/aead/chacha20_poly1305.rs", "rank": 59, "score": 205107.12470427272 }, { "content": "pub fn public_from_private(ops: &PrivateKeyOps, public_out: &mut [u8],\n\n my_private_key: &ec::PrivateKey)\n\n -> Result<(), error::Unspecified> {\n\n let elem_and_scalar_bytes = ops.common.num_limbs * LIMB_BYTES;\n\n debug_assert_eq!(public_out.len(), 1 + (2 * elem_and_scalar_bytes));\n\n let my_private_key = private_key_as_scalar(ops, my_private_key);\n\n let my_public_key = ops.point_mul_base(&my_private_key);\n\n public_out[0] = 4; // Uncompressed encoding.\n\n let (x_out, y_out) =\n\n (&mut public_out[1..]).split_at_mut(elem_and_scalar_bytes);\n\n\n\n // `big_endian_affine_from_jacobian` verifies that the point is not at\n\n // infinity and is on the curve.\n\n big_endian_affine_from_jacobian(ops, Some(x_out), Some(y_out),\n\n &my_public_key)\n\n}\n\n\n", "file_path": "src/ec/suite_b/private_key.rs", "rank": 60, "score": 202743.96752265806 }, { "content": "#[inline]\n\npub fn chacha20_xor_in_place(key: &Key, counter: &Counter, in_out: &mut [u8]) {\n\n chacha20_xor_inner(key, counter, in_out.as_ptr(), in_out.len(),\n\n in_out.as_mut_ptr());\n\n}\n\n\n", "file_path": "src/chacha.rs", "rank": 61, "score": 201950.37300368864 }, { "content": "/// Parses a public key encoded in uncompressed form. The key is validated\n\n/// using the ECC Partial Public-Key Validation Routine from\n\n/// [NIST SP 800-56A, revision 2] Section 5.6.2.3.3, the NSA's\n\n/// \"Suite B Implementer's Guide to NIST SP 800-56A,\" Appendix B.3, and the\n\n/// NSA's \"Suite B Implementer's Guide to FIPS 186-3 (ECDSA),\" Appendix A.3.\n\n///\n\n/// [NIST SP 800-56A, revision 2]:\n\n/// http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-56Ar2.pdf\n\npub fn parse_uncompressed_point(ops: &PublicKeyOps, input: untrusted::Input)\n\n -> Result<(Elem<R>, Elem<R>), error::Unspecified> {\n\n // NIST SP 800-56A Step 1: \"Verify that Q is not the point at infinity.\n\n // This can be done by inspection if the point is entered in the standard\n\n // affine representation.\" (We do it by inspection since we only accept\n\n // the affine representation.)\n\n let (x, y) = input.read_all(error::Unspecified, |input| {\n\n // The encoding must be 4, which is the encoding for \"uncompressed\".\n\n let encoding = input.read_byte()?;\n\n if encoding != 4 {\n\n return Err(error::Unspecified);\n\n }\n\n\n\n // NIST SP 800-56A Step 2: \"Verify that xQ and yQ are integers in the\n\n // interval [0, p-1] in the case that q is an odd prime p[.]\"\n\n let x = ops.elem_parse(input)?;\n\n let y = ops.elem_parse(input)?;\n\n Ok((x, y))\n\n })?;\n\n\n", "file_path": "src/ec/suite_b/public_key.rs", "rank": 62, "score": 201879.7628742715 }, { "content": "/// The HKDF-Extract operation.\n\n///\n\n/// | Parameter | RFC 5869 Term\n\n/// |---------------------------|--------------\n\n/// | `salt.digest_algorithm()` | Hash\n\n/// | `secret` | IKM (Input Keying Material)\n\n/// | [return value] | PRK\n\npub fn extract(salt: &hmac::SigningKey, secret: &[u8]) -> hmac::SigningKey {\n\n // The spec says that if no salt is provided then a key of\n\n // `digest_alg.output_len` bytes of zeros is used. But, HMAC keys are\n\n // already zero-padded to the block length, which is larger than the output\n\n // length of the extract step (the length of the digest). Consequently, the\n\n // `SigningKey` constructor will automatically do the right thing for a\n\n // zero-length string.\n\n let prk = hmac::sign(salt, secret);\n\n hmac::SigningKey::new(salt.digest_algorithm(), prk.as_ref())\n\n}\n\n\n", "file_path": "src/hkdf.rs", "rank": 63, "score": 199605.19886258437 }, { "content": "#[inline]\n\npub fn chacha20_xor_inner(key: &Key, counter: &Counter, input: *const u8,\n\n in_out_len: usize, output: *mut u8) {\n\n debug_assert!(core::mem::align_of_val(key) >= 4);\n\n debug_assert!(core::mem::align_of_val(counter) >= 4);\n\n unsafe {\n\n GFp_ChaCha20_ctr32(output, input, in_out_len, key, counter);\n\n }\n\n}\n\n\n\npub type Counter = [u32; 4];\n\n\n", "file_path": "src/chacha.rs", "rank": 64, "score": 199505.8046046747 }, { "content": "/// Verifies that a previously-derived (e.g., using `derive`) PBKDF2 value\n\n/// matches the PBKDF2 value derived from the other inputs.\n\n///\n\n/// The comparison is done in constant time to prevent timing attacks. The\n\n/// comparison will fail if `previously_derived` is empty (has a length of\n\n/// zero).\n\n///\n\n/// | Parameter | RFC 2898 Section 5.2 Term\n\n/// |----------------------------|--------------------------------------------\n\n/// | digest_alg | PRF (HMAC with the given digest algorithm).\n\n/// | `iterations` | c (iteration count)\n\n/// | `salt` | S (salt)\n\n/// | `secret` | P (password)\n\n/// | `previously_derived` | dk (derived key)\n\n/// | `previously_derived.len()` | dkLen (derived key length)\n\n///\n\n/// C analog: `PKCS5_PBKDF2_HMAC` + `CRYPTO_memcmp`\n\n///\n\n/// # Panics\n\n///\n\n/// `verify` panics if `iterations < 1`.\n\n///\n\n/// `verify` panics if `out.len()` is larger than (2**32 - 1) * the digest\n\n/// algorithm's output length, per the PBKDF2 specification.\n\npub fn verify(digest_alg: &'static digest::Algorithm, iterations: u32,\n\n salt: &[u8], secret: &[u8], previously_derived: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n if previously_derived.is_empty() {\n\n return Err(error::Unspecified);\n\n }\n\n\n\n let mut derived_buf = [0u8; digest::MAX_OUTPUT_LEN];\n\n\n\n let output_len = digest_alg.output_len;\n\n let secret = hmac::SigningKey::new(digest_alg, secret);\n\n let mut idx: u32 = 0;\n\n\n\n let mut matches = 1;\n\n\n\n for previously_derived_chunk in previously_derived.chunks(output_len) {\n\n idx = idx.checked_add(1).expect(\"derived key too long\");\n\n\n\n let derived_chunk = &mut derived_buf[..previously_derived_chunk.len()];\n\n polyfill::slice::fill(derived_chunk, 0);\n", "file_path": "src/pbkdf2.rs", "rank": 65, "score": 199380.72131425713 }, { "content": "/// Fills `out` with the key derived using PBKDF2 with the given inputs.\n\n///\n\n/// Do not use `derive` as part of verifying a secret; use `verify` instead, to\n\n/// minimize the effectiveness of timing attacks.\n\n///\n\n/// `out.len()` must be no larger than the digest length * (2**32 - 1), per the\n\n/// PBKDF2 specification.\n\n///\n\n/// | Parameter | RFC 2898 Section 5.2 Term\n\n/// |-------------|-------------------------------------------\n\n/// | digest_alg | PRF (HMAC with the given digest algorithm)\n\n/// | iterations | c (iteration count)\n\n/// | salt | S (salt)\n\n/// | secret | P (password)\n\n/// | out | dk (derived key)\n\n/// | out.len() | dkLen (derived key length)\n\n///\n\n/// C analog: `PKCS5_PBKDF2_HMAC`\n\n///\n\n/// # Panics\n\n///\n\n/// `derive` panics if `iterations < 1`.\n\n///\n\n/// `derive` panics if `out.len()` is larger than (2**32 - 1) * the digest\n\n/// algorithm's output length, per the PBKDF2 specification.\n\npub fn derive(digest_alg: &'static digest::Algorithm, iterations: u32,\n\n salt: &[u8], secret: &[u8], out: &mut [u8]) {\n\n assert!(iterations >= 1);\n\n\n\n let output_len = digest_alg.output_len;\n\n\n\n // This implementation's performance is asymptotically optimal as described\n\n // in https://jbp.io/2015/08/11/pbkdf2-performance-matters/. However, it\n\n // hasn't been optimized to the same extent as fastpbkdf2. In particular,\n\n // this implementation is probably doing a lot of unnecessary copying.\n\n\n\n let secret = hmac::SigningKey::new(digest_alg, secret);\n\n\n\n // Clear |out|.\n\n polyfill::slice::fill(out, 0);\n\n\n\n let mut idx: u32 = 0;\n\n\n\n for chunk in out.chunks_mut(output_len) {\n\n idx = idx.checked_add(1).expect(\"derived key too long\");\n\n derive_block(&secret, iterations, salt, idx, chunk);\n\n }\n\n}\n\n\n", "file_path": "src/pbkdf2.rs", "rank": 66, "score": 199380.03299064358 }, { "content": "/// Reads test cases out of the file with the path given by\n\n/// `test_data_relative_file_path`, calling `f` on each vector until `f` fails\n\n/// or until all the test vectors have been read. `f` can indicate failure\n\n/// either by returning `Err()` or by panicking.\n\npub fn from_file<F>(test_data_relative_file_path: &str, mut f: F)\n\n where F: FnMut(&str, &mut TestCase)\n\n -> Result<(), error::Unspecified> {\n\n let path = ring_src_path().join(test_data_relative_file_path);\n\n let file = std::fs::File::open(path).unwrap();\n\n let mut lines = std::io::BufReader::new(&file).lines();\n\n\n\n let mut current_section = String::from(\"\");\n\n let mut failed = false;\n\n\n\n #[allow(box_pointers)]\n\n while let Some(mut test_case) = parse_test_case(&mut current_section,\n\n &mut lines) {\n\n let result =\n\n std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {\n\n f(&current_section, &mut test_case)\n\n }));\n\n let result = match result {\n\n Ok(Ok(())) => {\n\n if !test_case.attributes.iter().any(\n", "file_path": "src/test.rs", "rank": 68, "score": 194237.94574683084 }, { "content": "fn aead_poly1305(tag_out: &mut [u8; aead::TAG_LEN], chacha20_key: &chacha::Key,\n\n counter: &chacha::Counter, ad: &[u8], ciphertext: &[u8]) {\n\n debug_assert_eq!(counter[0], 0);\n\n let key = poly1305::Key::derive_using_chacha(chacha20_key, counter);\n\n let mut ctx = poly1305::SigningContext::from_key(key);\n\n poly1305_update_padded_16(&mut ctx, ad);\n\n poly1305_update_padded_16(&mut ctx, ciphertext);\n\n let lengths =\n\n [polyfill::u64_from_usize(ad.len()).to_le(),\n\n polyfill::u64_from_usize(ciphertext.len()).to_le()];\n\n ctx.update(polyfill::slice::u64_as_u8(&lengths));\n\n ctx.sign(tag_out);\n\n}\n\n\n", "file_path": "src/aead/chacha20_poly1305.rs", "rank": 69, "score": 192318.9030065115 }, { "content": "/// Calculates the HMAC of `data` using the key `key` in one step.\n\n///\n\n/// Use `SigningContext` to calculate HMACs where the input is in multiple\n\n/// parts.\n\n///\n\n/// It is generally not safe to implement HMAC verification by comparing the\n\n/// return value of `sign` to a signature. Use `verify` for verification\n\n/// instead.\n\n///\n\n/// C analog: `HMAC_CTX_init` + `HMAC_Update` + `HMAC_Final`.\n\npub fn sign(key: &SigningKey, data: &[u8]) -> Signature {\n\n let mut ctx = SigningContext::with_key(key);\n\n ctx.update(data);\n\n ctx.sign()\n\n}\n\n\n\n/// A key to use for HMAC authentication.\n\npub struct VerificationKey {\n\n wrapped: SigningKey,\n\n}\n\n\n\nimpl VerificationKey {\n\n /// Construct an HMAC verification key using the given digest algorithm and\n\n /// key value.\n\n ///\n\n /// As specified in RFC 2104, if `key_value` is shorter than the digest\n\n /// algorithm's block length (as returned by `digest::Algorithm::block_len`,\n\n /// not the digest length returned by `digest::Algorithm::output_len`) then\n\n /// it will be padded with zeros. Similarly, if it is longer than the block\n\n /// length then it will be compressed using the digest algorithm.\n", "file_path": "src/hmac.rs", "rank": 70, "score": 192096.73090630205 }, { "content": "fn digest_scalar(digest: digest::Digest) -> Scalar {\n\n let mut unreduced = [0u8; digest::SHA512_OUTPUT_LEN];\n\n unreduced.copy_from_slice(digest.as_ref());\n\n unsafe { GFp_x25519_sc_reduce(&mut unreduced) };\n\n let mut scalar = [0u8; SCALAR_LEN];\n\n scalar.copy_from_slice(&unreduced[..SCALAR_LEN]);\n\n scalar\n\n}\n\n\n\nextern {\n\n fn GFp_x25519_sc_mask(a: &mut Scalar);\n\n fn GFp_x25519_ge_double_scalarmult_vartime(r: &mut Point, a_coeff: &Scalar,\n\n a: &ExtPoint, b_coeff: &Scalar);\n\n fn GFp_x25519_ge_scalarmult_base(h: &mut ExtPoint, a: &Seed);\n\n fn GFp_x25519_sc_muladd(s: &mut Scalar, a: &Scalar, b: &Scalar, c: &Scalar);\n\n fn GFp_x25519_sc_reduce(s: &mut UnreducedScalar);\n\n}\n\n\n", "file_path": "src/ec/curve25519/ed25519.rs", "rank": 71, "score": 189280.97199464246 }, { "content": "// Test that we reject non-standard nonce sizes.\n\n//\n\n// XXX: This test isn't that great in terms of how it tests\n\n// `open_in_place`. It should be constructing a valid ciphertext using the\n\n// unsupported nonce size using a different implementation that supports\n\n// non-standard nonce sizes. So, when `open_in_place` returns\n\n// `Err(error::Unspecified)`, we don't know if it is because it rejected\n\n// the non-standard nonce size or because it tried to process the input\n\n// with the wrong nonce. But at least we're verifying that `open_in_place`\n\n// won't crash or access out-of-bounds memory (when run under valgrind or\n\n// similar). The AES-128-GCM tests have some WRONG_NONCE_LENGTH test cases\n\n// that tests this more correctly.\n\nfn test_aead_nonce_sizes(aead_alg: &'static aead::Algorithm)\n\n -> Result<(), error::Unspecified> {\n\n let key_len = aead_alg.key_len();\n\n let key_data = vec![0u8; key_len];\n\n let s_key = aead::SealingKey::new(aead_alg, &key_data[..key_len])?;\n\n let o_key = aead::OpeningKey::new(aead_alg, &key_data[..key_len])?;\n\n\n\n let nonce_len = aead_alg.nonce_len();\n\n\n\n let nonce = vec![0u8; nonce_len * 2];\n\n\n\n let prefix_len = 0;\n\n let tag_len = aead_alg.tag_len();\n\n let ad: [u8; 0] = [];\n\n\n\n // Construct a template input for `seal_in_place`.\n\n let mut to_seal = b\"hello, world\".to_vec();\n\n // Reserve space for tag.\n\n for _ in 0..tag_len {\n\n to_seal.push(0);\n", "file_path": "tests/aead_tests.rs", "rank": 72, "score": 185565.57484855113 }, { "content": "fn test_aead_key_sizes(aead_alg: &'static aead::Algorithm) {\n\n let key_len = aead_alg.key_len();\n\n let key_data = vec![0u8; key_len * 2];\n\n\n\n // Key is the right size.\n\n assert!(aead::OpeningKey::new(aead_alg, &key_data[..key_len]).is_ok());\n\n assert!(aead::SealingKey::new(aead_alg, &key_data[..key_len]).is_ok());\n\n\n\n // Key is one byte too small.\n\n assert!(aead::OpeningKey::new(aead_alg, &key_data[..(key_len - 1)])\n\n .is_err());\n\n assert!(aead::SealingKey::new(aead_alg, &key_data[..(key_len - 1)])\n\n .is_err());\n\n\n\n // Key is one byte too large.\n\n assert!(aead::OpeningKey::new(aead_alg, &key_data[..(key_len + 1)])\n\n .is_err());\n\n assert!(aead::SealingKey::new(aead_alg, &key_data[..(key_len + 1)])\n\n .is_err());\n\n\n", "file_path": "tests/aead_tests.rs", "rank": 73, "score": 185541.46871551307 }, { "content": "fn run(digest_name: &str, expected_digest_hex: &str,\n\n file_path: &std::path::Path) -> Result<(), &'static str> {\n\n let digest_alg = match digest_name {\n\n \"sha256\" => &digest::SHA256,\n\n \"sha384\" => &digest::SHA384,\n\n \"sha512\" => &digest::SHA512,\n\n _ => { return Err(\"unsupported digest algorithm\"); }\n\n };\n\n\n\n let mut ctx = digest::Context::new(digest_alg);\n\n\n\n {\n\n let mut file = match std::fs::File::open(file_path) {\n\n Ok(file) => file,\n\n // TODO: don't use panic here.\n\n Err(why) => panic!(\"couldn't open {}: {}\", file_path.display(),\n\n why.description())\n\n };\n\n\n\n let mut chunk = vec![0u8; 128 * 1024];\n", "file_path": "examples/checkdigest.rs", "rank": 74, "score": 184757.1393877717 }, { "content": "fn test_aead(aead_alg: &'static aead::Algorithm, file_path: &str) {\n\n test_aead_key_sizes(aead_alg);\n\n test_aead_nonce_sizes(aead_alg).unwrap();\n\n\n\n test::from_file(file_path, |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let key_bytes = test_case.consume_bytes(\"KEY\");\n\n let nonce = test_case.consume_bytes(\"NONCE\");\n\n let plaintext = test_case.consume_bytes(\"IN\");\n\n let ad = test_case.consume_bytes(\"AD\");\n\n let mut ct = test_case.consume_bytes(\"CT\");\n\n let tag = test_case.consume_bytes(\"TAG\");\n\n let error = test_case.consume_optional_string(\"FAILS\");\n\n\n\n let tag_len = aead_alg.tag_len();\n\n let mut s_in_out = plaintext.clone();\n\n for _ in 0..tag_len {\n\n s_in_out.push(0);\n\n }\n\n let s_key = aead::SealingKey::new(aead_alg, &key_bytes[..])?;\n", "file_path": "tests/aead_tests.rs", "rank": 75, "score": 179851.12256467805 }, { "content": "fn alg_from_curve_name(curve_name: &str) -> &'static agreement::Algorithm {\n\n if curve_name == \"P-256\" {\n\n &agreement::ECDH_P256\n\n } else if curve_name == \"P-384\" {\n\n &agreement::ECDH_P384\n\n } else if curve_name == \"X25519\" {\n\n &agreement::X25519\n\n } else {\n\n panic!(\"Unsupported curve: {}\", curve_name);\n\n }\n\n}\n", "file_path": "tests/agreement_tests.rs", "rank": 76, "score": 177731.69271362777 }, { "content": "#[cfg(not(target_os = \"ios\"))]\n\npub fn ring_src_path() -> std::path::PathBuf {\n\n std::path::PathBuf::from(\".\")\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 77, "score": 171711.16567147995 }, { "content": "/// Returns `Ok(())` if `a == b` and `Err(error::Unspecified)` otherwise.\n\n/// The comparison of `a` and `b` is done in constant time with respect to the\n\n/// contents of each, but NOT in constant time with respect to the lengths of\n\n/// `a` and `b`.\n\npub fn verify_slices_are_equal(a: &[u8], b: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n if a.len() != b.len() {\n\n return Err(error::Unspecified);\n\n }\n\n let result = unsafe { GFp_memcmp(a.as_ptr(), b.as_ptr(), a.len()) };\n\n match result {\n\n 0 => Ok(()),\n\n _ => Err(error::Unspecified),\n\n }\n\n}\n\n\n\nextern {\n\n fn GFp_memcmp(a: *const u8, b: *const u8, len: c::size_t) -> c::int;\n\n}\n", "file_path": "src/constant_time.rs", "rank": 78, "score": 171697.16913407354 }, { "content": "#[repr(C, align(8))]\n\nstruct Opaque([u8; OPAQUE_LEN]);\n\nconst OPAQUE_LEN: usize = 192;\n\n\n", "file_path": "src/poly1305.rs", "rank": 79, "score": 169890.2516700467 }, { "content": "fn compile(p: &Path, target: &Target, warnings_are_errors: bool, out_dir: &Path,\n\n includes_modified: SystemTime) -> String {\n\n let ext = p.extension().unwrap().to_str().unwrap();\n\n if ext == \"obj\" {\n\n p.to_str().expect(\"Invalid path\").into()\n\n } else {\n\n let mut out_path = out_dir.clone().join(p.file_name().unwrap());\n\n assert!(out_path.set_extension(target.obj_ext));\n\n if need_run(&p, &out_path, includes_modified) {\n\n let cmd = if target.os() != WINDOWS || ext != \"asm\" {\n\n cc(p, ext, target, warnings_are_errors, &out_path)\n\n } else {\n\n yasm(p, target.arch(), &out_path)\n\n };\n\n\n\n run_command(cmd);\n\n }\n\n out_path.to_str().expect(\"Invalid path\").into()\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 80, "score": 166319.0644415506 }, { "content": "fn cc(file: &Path, ext: &str, target: &Target, warnings_are_errors: bool,\n\n out_dir: &Path)\n\n -> Command {\n\n let mut c = cc::Build::new();\n\n let _ = c.include(\"include\");\n\n match ext {\n\n \"c\" => {\n\n for f in c_flags(target) {\n\n let _ = c.flag(f);\n\n }\n\n },\n\n \"S\" => {},\n\n e => panic!(\"Unsupported file extension: {:?}\", e),\n\n };\n\n for f in cpp_flags(target) {\n\n let _ = c.flag(&f);\n\n }\n\n if target.os() != \"none\" &&\n\n target.os() != \"redox\" &&\n\n target.os() != \"windows\" {\n", "file_path": "build.rs", "rank": 81, "score": 163584.69445201597 }, { "content": "fn run_command(mut cmd: Command) {\n\n println!(\"running {:?}\", cmd);\n\n let status = cmd.status().unwrap_or_else(|e| {\n\n panic!(\"failed to execute [{:?}]: {}\", cmd, e);\n\n });\n\n if !status.success() {\n\n panic!(\"execution failed\");\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 82, "score": 162758.28564539 }, { "content": "#[test]\n\nfn hkdf_tests() {\n\n test::from_file(\"tests/hkdf_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let digest_alg =\n\n test_case.consume_digest_alg(\"Hash\").ok_or(error::Unspecified)?;\n\n let secret = test_case.consume_bytes(\"IKM\");\n\n let salt = test_case.consume_bytes(\"salt\");\n\n let info = test_case.consume_bytes(\"info\");\n\n\n\n // The PRK is an intermediate value that we can't test, but we\n\n // have to consume it to make test::from_file happy.\n\n let _ = test_case.consume_bytes(\"PRK\");\n\n\n\n let expected_out = test_case.consume_bytes(\"OKM\");\n\n\n\n let salt = hmac::SigningKey::new(digest_alg, &salt);\n\n\n\n let mut out = vec![0u8; expected_out.len()];\n\n hkdf::extract_and_expand(&salt, &secret, &info, &mut out);\n\n assert_eq!(out, expected_out);\n\n\n\n Ok(())\n\n });\n\n}\n", "file_path": "tests/hkdf_tests.rs", "rank": 83, "score": 160918.6973650916 }, { "content": "#[test]\n\nfn hmac_tests() {\n\n test::from_file(\"tests/hmac_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let digest_alg = test_case.consume_digest_alg(\"HMAC\");\n\n let key_value = test_case.consume_bytes(\"Key\");\n\n let mut input = test_case.consume_bytes(\"Input\");\n\n let output = test_case.consume_bytes(\"Output\");\n\n\n\n let digest_alg = match digest_alg {\n\n Some(digest_alg) => digest_alg,\n\n None => { return Ok(()); }, // Unsupported digest algorithm\n\n };\n\n\n\n hmac_test_case_inner(digest_alg, &key_value[..], &input[..],\n\n &output[..], true)?;\n\n\n\n // Tamper with the input and check that verification fails.\n\n if input.is_empty() {\n\n input.push(0);\n\n } else {\n\n input[0] ^= 1;\n\n }\n\n\n\n hmac_test_case_inner(digest_alg, &key_value[..], &input[..],\n\n &output[..], false)\n\n });\n\n}\n\n\n", "file_path": "tests/hmac_tests.rs", "rank": 84, "score": 160918.6973650916 }, { "content": "#[test]\n\nfn ecdsa_from_pkcs8_test() {\n\n test::from_file(\"tests/ecdsa_from_pkcs8_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n\n\n let curve_name = test_case.consume_string(\"Curve\");\n\n let ((this_fixed, this_asn1), (other_fixed, other_asn1)) =\n\n match curve_name.as_str() {\n\n \"P-256\" => ((&signature::ECDSA_P256_SHA256_FIXED_SIGNING,\n\n &signature::ECDSA_P256_SHA256_ASN1_SIGNING),\n\n (&signature::ECDSA_P384_SHA384_FIXED_SIGNING,\n\n &signature::ECDSA_P384_SHA384_ASN1_SIGNING)),\n\n \"P-384\" => ((&signature::ECDSA_P384_SHA384_FIXED_SIGNING,\n\n &signature::ECDSA_P384_SHA384_ASN1_SIGNING),\n\n (&signature::ECDSA_P256_SHA256_FIXED_SIGNING,\n\n &signature::ECDSA_P256_SHA256_ASN1_SIGNING)),\n\n _ => unreachable!(),\n\n };\n\n\n\n let input = test_case.consume_bytes(\"Input\");\n\n let input = untrusted::Input::from(&input);\n", "file_path": "tests/ecdsa_tests.rs", "rank": 85, "score": 159184.49596021883 }, { "content": "#[test]\n\nfn test_ed25519_from_pkcs8() {\n\n // Just test that we can parse the input.\n\n test::from_file(\"tests/ed25519_from_pkcs8_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let input = test_case.consume_bytes(\"Input\");\n\n let error = test_case.consume_optional_string(\"Error\");\n\n assert_eq!(\n\n Ed25519KeyPair::from_pkcs8(untrusted::Input::from(&input)).is_ok(),\n\n error.is_none());\n\n Ok(())\n\n });\n\n}\n", "file_path": "tests/ed25519_tests.rs", "rank": 86, "score": 159184.49596021883 }, { "content": "/// Encrypts and signs (“seals”) data in place.\n\n///\n\n/// `nonce` must be unique for every use of the key to seal data.\n\n///\n\n/// The input is `in_out[..(in_out.len() - out_suffix_capacity)]`; i.e. the\n\n/// input is the part of `in_out` that precedes the suffix. When\n\n/// `seal_in_place()` returns `Ok(out_len)`, the encrypted and signed output is\n\n/// `in_out[..out_len]`; i.e. the output has been written over input and at\n\n/// least part of the data reserved for the suffix. (The input/output buffer\n\n/// is expressed this way because Rust's type system does not allow us to have\n\n/// two slices, one mutable and one immutable, that reference overlapping\n\n/// memory at the same time.)\n\n///\n\n/// `out_suffix_capacity` must be at least `key.algorithm().tag_len()`. See\n\n/// also `MAX_TAG_LEN`.\n\n///\n\n/// `ad` is the additional authenticated data, if any.\n\n///\n\n/// C analog: `EVP_AEAD_CTX_seal`.\n\n///\n\n/// Go analog: [`AEAD.Seal`](https://golang.org/pkg/crypto/cipher/#AEAD)\n\npub fn seal_in_place(key: &SealingKey, nonce: &[u8], ad: &[u8],\n\n in_out: &mut [u8], out_suffix_capacity: usize)\n\n -> Result<usize, error::Unspecified> {\n\n if out_suffix_capacity < key.key.algorithm.tag_len() {\n\n return Err(error::Unspecified);\n\n }\n\n let nonce = slice_as_array_ref!(nonce, NONCE_LEN)?;\n\n let in_out_len =\n\n in_out.len().checked_sub(out_suffix_capacity).ok_or(error::Unspecified)?;\n\n check_per_nonce_max_bytes(key.key.algorithm, in_out_len)?;\n\n let (in_out, tag_out) = in_out.split_at_mut(in_out_len);\n\n let tag_out = slice_as_array_ref_mut!(tag_out, TAG_LEN)?;\n\n (key.key.algorithm.seal)(&key.key.ctx_buf, nonce, ad, in_out, tag_out)?;\n\n Ok(in_out_len + TAG_LEN)\n\n}\n\n\n", "file_path": "src/aead/mod.rs", "rank": 87, "score": 158877.86284372467 }, { "content": "fn x25519_check_private_key_bytes(bytes: &[u8])\n\n -> Result<(), error::Unspecified> {\n\n debug_assert_eq!(bytes.len(), PRIVATE_KEY_LEN);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ec/curve25519/x25519.rs", "rank": 88, "score": 157968.98393271564 }, { "content": "#[test]\n\nfn test_ed25519_from_pkcs8_unchecked() {\n\n // Just test that we can parse the input.\n\n test::from_file(\"tests/ed25519_from_pkcs8_unchecked_tests.txt\",\n\n |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let input = test_case.consume_bytes(\"Input\");\n\n let error = test_case.consume_optional_string(\"Error\");\n\n assert_eq!(\n\n Ed25519KeyPair::from_pkcs8_maybe_unchecked(\n\n untrusted::Input::from(&input)).is_ok(),\n\n error.is_none());\n\n Ok(())\n\n });\n\n}\n\n\n", "file_path": "tests/ed25519_tests.rs", "rank": 89, "score": 157506.02202720402 }, { "content": "#[test]\n\nfn ecdsa_generate_pkcs8_test() {\n\n let rng = rand::SystemRandom::new();\n\n\n\n for alg in &[&signature::ECDSA_P256_SHA256_ASN1_SIGNING,\n\n &signature::ECDSA_P256_SHA256_FIXED_SIGNING,\n\n &signature::ECDSA_P384_SHA384_ASN1_SIGNING,\n\n &signature::ECDSA_P384_SHA384_FIXED_SIGNING] {\n\n let pkcs8 = signature::ECDSAKeyPair::generate_pkcs8(alg, &rng).unwrap();\n\n println!();\n\n for b in pkcs8.as_ref() {\n\n print!(\"{:02x}\", *b);\n\n }\n\n println!();\n\n println!();\n\n let _ = signature::key_pair_from_pkcs8(\n\n *alg, untrusted::Input::from(pkcs8.as_ref())).unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/ecdsa_tests.rs", "rank": 90, "score": 157506.02202720402 }, { "content": "/// Authenticates and decrypts (“opens”) data in place. When\n\n///\n\n/// The input may have a prefix that is `in_prefix_len` bytes long; any such\n\n/// prefix is ignored on input and overwritten on output. The last\n\n/// `key.algorithm().tag_len()` bytes of `ciphertext_and_tag_modified_in_place`\n\n/// must be the tag. The part of `ciphertext_and_tag_modified_in_place` between\n\n/// the prefix and the tag is the input ciphertext.\n\n///\n\n/// When `open_in_place()` returns `Ok(plaintext)`, the decrypted output is\n\n/// `plaintext`, which is\n\n/// `&mut ciphertext_and_tag_modified_in_place[..plaintext.len()]`. That is,\n\n/// the output plaintext overwrites some or all of the prefix and ciphertext.\n\n/// To put it another way, the ciphertext is shifted forward `in_prefix_len`\n\n/// bytes and then decrypted in place. To have the output overwrite the input\n\n/// without shifting, pass 0 as `in_prefix_len`.\n\n///\n\n/// When `open_in_place()` returns `Err(..)`,\n\n/// `ciphertext_and_tag_modified_in_place` may have been overwritten in an\n\n/// unspecified way.\n\n///\n\n/// The shifting feature is useful in the case where multiple packets are\n\n/// being reassembled in place. Consider this example where the peer has sent\n\n/// the message “Split stream reassembled in place” split into three sealed\n\n/// packets:\n\n///\n\n/// ```ascii-art\n\n/// Packet 1 Packet 2 Packet 3\n\n/// Input: [Header][Ciphertext][Tag][Header][Ciphertext][Tag][Header][Ciphertext][Tag]\n\n/// | +--------------+ |\n\n/// +------+ +-----+ +----------------------------------+\n\n/// v v v\n\n/// Output: [Plaintext][Plaintext][Plaintext]\n\n/// “Split stream reassembled in place”\n\n/// ```\n\n///\n\n/// Let's say the header is always 5 bytes (like TLS 1.2) and the tag is always\n\n/// 16 bytes (as for AES-GCM and ChaCha20-Poly1305). Then for this example,\n\n/// `in_prefix_len` would be `5` for the first packet, `(5 + 16) + 5` for the\n\n/// second packet, and `(2 * (5 + 16)) + 5` for the third packet.\n\n///\n\n/// (The input/output buffer is expressed as combination of `in_prefix_len`\n\n/// and `ciphertext_and_tag_modified_in_place` because Rust's type system\n\n/// does not allow us to have two slices, one mutable and one immutable, that\n\n/// reference overlapping memory.)\n\n///\n\n/// C analog: `EVP_AEAD_CTX_open`\n\n///\n\n/// Go analog: [`AEAD.Open`](https://golang.org/pkg/crypto/cipher/#AEAD)\n\npub fn open_in_place<'a>(key: &OpeningKey, nonce: &[u8], ad: &[u8],\n\n in_prefix_len: usize,\n\n ciphertext_and_tag_modified_in_place: &'a mut [u8])\n\n -> Result<&'a mut [u8], error::Unspecified> {\n\n let nonce = slice_as_array_ref!(nonce, NONCE_LEN)?;\n\n let ciphertext_and_tag_len =\n\n ciphertext_and_tag_modified_in_place.len()\n\n .checked_sub(in_prefix_len).ok_or(error::Unspecified)?;\n\n let ciphertext_len =\n\n ciphertext_and_tag_len.checked_sub(TAG_LEN).ok_or(error::Unspecified)?;\n\n check_per_nonce_max_bytes(key.key.algorithm, ciphertext_len)?;\n\n let (in_out, received_tag) =\n\n ciphertext_and_tag_modified_in_place\n\n .split_at_mut(in_prefix_len + ciphertext_len);\n\n let mut calculated_tag = [0u8; TAG_LEN];\n\n (key.key.algorithm.open)(&key.key.ctx_buf, nonce, &ad, in_prefix_len,\n\n in_out, &mut calculated_tag)?;\n\n if constant_time::verify_slices_are_equal(&calculated_tag, received_tag)\n\n .is_err() {\n\n // Zero out the plaintext so that it isn't accidentally leaked or used\n", "file_path": "src/aead/mod.rs", "rank": 91, "score": 156180.69648729253 }, { "content": "#[test]\n\npub fn pbkdf2_tests() {\n\n test::from_file(\"tests/pbkdf2_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n let digest_alg = &test_case.consume_digest_alg(\"Hash\").unwrap();\n\n let iterations = test_case.consume_usize(\"c\");\n\n let secret = test_case.consume_bytes(\"P\");\n\n let salt = test_case.consume_bytes(\"S\");\n\n let dk = test_case.consume_bytes(\"DK\");\n\n let verify_expected_result = test_case.consume_string(\"Verify\");\n\n let verify_expected_result =\n\n match verify_expected_result.as_str() {\n\n \"OK\" => Ok(()),\n\n \"Err\" => Err(error::Unspecified),\n\n _ => panic!(\"Unsupported value of \\\"Verify\\\"\"),\n\n };\n\n\n\n {\n\n let mut out = vec![0u8; dk.len()];\n\n pbkdf2::derive(digest_alg, iterations as u32, &salt, &secret,\n\n &mut out);\n", "file_path": "tests/pbkdf2_tests.rs", "rank": 93, "score": 155432.1122195102 }, { "content": "#[test]\n\nfn test_agreement_ecdh_x25519_rfc_iterated() {\n\n let mut k =\n\n h(\"0900000000000000000000000000000000000000000000000000000000000000\");\n\n let mut u = k.clone();\n\n\n\n fn expect_iterated_x25519(expected_result: &str,\n\n range: std::ops::Range<usize>, k: &mut Vec<u8>,\n\n u: &mut Vec<u8>) {\n\n for _ in range {\n\n let new_k = x25519(k, u);\n\n *u = k.clone();\n\n *k = new_k;\n\n }\n\n assert_eq!(&h(expected_result), k);\n\n }\n\n\n\n expect_iterated_x25519(\n\n \"422c8e7a6227d7bca1350b3e2bb7279f7897b87bb6854b783c60e80311ae3079\",\n\n 0..1, &mut k, &mut u);\n\n expect_iterated_x25519(\n", "file_path": "tests/agreement_tests.rs", "rank": 94, "score": 154304.73716480754 }, { "content": "#[test]\n\nfn test_ed25519_from_seed_and_public_key_misuse() {\n\n const PRIVATE_KEY: &[u8] = include_bytes!(\"ed25519_test_private_key.bin\");\n\n const PUBLIC_KEY: &[u8] = include_bytes!(\"ed25519_test_public_key.bin\");\n\n\n\n assert!(Ed25519KeyPair::from_seed_and_public_key(\n\n untrusted::Input::from(PRIVATE_KEY),\n\n untrusted::Input::from(PUBLIC_KEY)).is_ok());\n\n\n\n // Truncated private key.\n\n assert!(Ed25519KeyPair::from_seed_and_public_key(\n\n untrusted::Input::from(&PRIVATE_KEY[..31]),\n\n untrusted::Input::from(PUBLIC_KEY)).is_err());\n\n\n\n // Truncated public key.\n\n assert!(Ed25519KeyPair::from_seed_and_public_key(\n\n untrusted::Input::from(PRIVATE_KEY),\n\n untrusted::Input::from(&PUBLIC_KEY[..31])).is_err());\n\n\n\n // Swapped public and private key.\n\n assert!(Ed25519KeyPair::from_seed_and_public_key(\n\n untrusted::Input::from(PUBLIC_KEY),\n\n untrusted::Input::from(PRIVATE_KEY)).is_err());\n\n}\n\n\n", "file_path": "tests/ed25519_tests.rs", "rank": 95, "score": 154304.73716480754 }, { "content": "#[test]\n\nfn agreement_agree_ephemeral() {\n\n let rng = rand::SystemRandom::new();\n\n\n\n test::from_file(\"tests/agreement_tests.txt\", |section, test_case| {\n\n assert_eq!(section, \"\");\n\n\n\n let curve_name = test_case.consume_string(\"Curve\");\n\n let alg = alg_from_curve_name(&curve_name);\n\n let peer_public = test_case.consume_bytes(\"PeerQ\");\n\n let peer_public = untrusted::Input::from(&peer_public);\n\n\n\n match test_case.consume_optional_string(\"Error\") {\n\n None => {\n\n let my_private = test_case.consume_bytes(\"D\");\n\n let rng = test::rand::FixedSliceRandom { bytes: &my_private };\n\n let my_private =\n\n agreement::EphemeralPrivateKey::generate(alg, &rng)?;\n\n\n\n let my_public = test_case.consume_bytes(\"MyQ\");\n\n let output = test_case.consume_bytes(\"Output\");\n", "file_path": "tests/agreement_tests.rs", "rank": 96, "score": 154160.52388072887 }, { "content": "#[test]\n\nfn aead_aes_gcm_256() {\n\n test_aead(&aead::AES_256_GCM, \"tests/aead_aes_256_gcm_tests.txt\");\n\n}\n\n\n", "file_path": "tests/aead_tests.rs", "rank": 97, "score": 154160.52388072887 }, { "content": "#[test]\n\nfn aead_aes_gcm_128() {\n\n test_aead(&aead::AES_128_GCM, \"tests/aead_aes_128_gcm_tests.txt\");\n\n}\n\n\n", "file_path": "tests/aead_tests.rs", "rank": 98, "score": 154160.52388072887 } ]
Rust
src/error.rs
braunse/isahc
df395edf481221d7d1fbfef020d813e604aa28e3
#![allow(deprecated)] use std::error::Error as StdError; use std::fmt; use std::io; #[derive(Debug)] pub enum Error { Aborted, BadClientCertificate(Option<String>), BadServerCertificate(Option<String>), ConnectFailed, CouldntResolveHost, CouldntResolveProxy, Curl(String), InvalidContentEncoding(Option<String>), InvalidCredentials, InvalidHttpFormat(http::Error), InvalidUtf8, Io(io::Error), NoResponse, RangeRequestUnsupported, RequestBodyError(Option<String>), ResponseBodyError(Option<String>), SSLConnectFailed(Option<String>), SSLEngineError(Option<String>), Timeout, TooManyRedirects, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}: {}", self, Error::description(self)) } } impl StdError for Error { fn description(&self) -> &str { match self { Error::Aborted => "request aborted unexpectedly", Error::BadClientCertificate(Some(ref e)) => e, Error::BadServerCertificate(Some(ref e)) => e, Error::ConnectFailed => "failed to connect to the server", Error::CouldntResolveHost => "couldn't resolve host name", Error::CouldntResolveProxy => "couldn't resolve proxy host name", Error::Curl(ref e) => e, Error::InvalidContentEncoding(Some(ref e)) => e, Error::InvalidCredentials => "credentials were rejected by the server", Error::InvalidHttpFormat(ref e) => e.description(), Error::InvalidUtf8 => "bytes are not valid UTF-8", Error::Io(ref e) => e.description(), Error::NoResponse => "server did not send a response", Error::RangeRequestUnsupported => "server does not support or accept range requests", Error::RequestBodyError(Some(ref e)) => e, Error::ResponseBodyError(Some(ref e)) => e, Error::SSLConnectFailed(Some(ref e)) => e, Error::SSLEngineError(Some(ref e)) => e, Error::Timeout => "request took longer than the configured timeout", Error::TooManyRedirects => "max redirect limit exceeded", _ => "unknown error", } } fn cause(&self) -> Option<&dyn StdError> { match self { Error::InvalidHttpFormat(e) => Some(e), Error::Io(e) => Some(e), _ => None, } } } #[doc(hidden)] impl From<curl::Error> for Error { fn from(error: curl::Error) -> Error { if error.is_ssl_certproblem() || error.is_ssl_cacert_badfile() { Error::BadClientCertificate(error.extra_description().map(str::to_owned)) } else if error.is_peer_failed_verification() || error.is_ssl_cacert() { Error::BadServerCertificate(error.extra_description().map(str::to_owned)) } else if error.is_couldnt_connect() { Error::ConnectFailed } else if error.is_couldnt_resolve_host() { Error::CouldntResolveHost } else if error.is_couldnt_resolve_proxy() { Error::CouldntResolveProxy } else if error.is_bad_content_encoding() || error.is_conv_failed() { Error::InvalidContentEncoding(error.extra_description().map(str::to_owned)) } else if error.is_login_denied() { Error::InvalidCredentials } else if error.is_got_nothing() { Error::NoResponse } else if error.is_range_error() { Error::RangeRequestUnsupported } else if error.is_read_error() || error.is_aborted_by_callback() { Error::RequestBodyError(error.extra_description().map(str::to_owned)) } else if error.is_write_error() || error.is_partial_file() { Error::ResponseBodyError(error.extra_description().map(str::to_owned)) } else if error.is_ssl_connect_error() { Error::SSLConnectFailed(error.extra_description().map(str::to_owned)) } else if error.is_ssl_engine_initfailed() || error.is_ssl_engine_notfound() || error.is_ssl_engine_setfailed() { Error::SSLEngineError(error.extra_description().map(str::to_owned)) } else if error.is_operation_timedout() { Error::Timeout } else if error.is_too_many_redirects() { Error::TooManyRedirects } else { Error::Curl(error.description().to_owned()) } } } #[doc(hidden)] impl From<curl::MultiError> for Error { fn from(error: curl::MultiError) -> Error { Error::Curl(error.description().to_owned()) } } #[doc(hidden)] impl From<http::Error> for Error { fn from(error: http::Error) -> Error { Error::InvalidHttpFormat(error) } } #[doc(hidden)] impl From<io::Error> for Error { fn from(error: io::Error) -> Error { match error.kind() { io::ErrorKind::ConnectionRefused => Error::ConnectFailed, io::ErrorKind::TimedOut => Error::Timeout, _ => Error::Io(error), } } } #[doc(hidden)] impl From<Error> for io::Error { fn from(error: Error) -> io::Error { match error { Error::ConnectFailed => io::ErrorKind::ConnectionRefused.into(), Error::Io(e) => e, Error::Timeout => io::ErrorKind::TimedOut.into(), _ => io::ErrorKind::Other.into(), } } } #[doc(hidden)] impl From<std::string::FromUtf8Error> for Error { fn from(_: std::string::FromUtf8Error) -> Error { Error::InvalidUtf8 } } #[doc(hidden)] impl From<std::str::Utf8Error> for Error { fn from(_: std::str::Utf8Error) -> Error { Error::InvalidUtf8 } }
#![allow(deprecated)] use std::error::Error as StdError; use std::fmt; use std::io; #[derive(Debug)] pub enum Error { Aborted, BadClientCertificate(Option<String>), BadServerCertificate(Option<String>), ConnectFailed, CouldntResolveHost, CouldntResolveProxy, Curl(String), InvalidContentEncoding(Option<String>), InvalidCredentials, InvalidHttpFormat(http::Error), InvalidUtf8, Io(io::Error), NoResponse, RangeRequestUnsupported, RequestBodyError(Option<String>), ResponseBodyError(Option<String>), SSLConnectFailed(Option<String>), SSLEngineError(Option<String>), Timeout, TooManyRedirects, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}: {}", self, Error::description(self)) } } impl StdError for Error { fn description(&self) -> &str { match self { Error::Aborted => "request aborted unexpectedly", Error::BadClientCertificate(Some(ref e)) => e, Error::BadServerCertificate(Some(ref e)) => e, Error::ConnectFailed => "failed to connect to the server", Error::CouldntResolveHost => "couldn't resolve host name", Error::CouldntResolveProxy => "couldn't resolve proxy host name", Error::Curl(ref e) => e, Error::InvalidContentEncoding(Some(ref e)) => e, Error::InvalidCredentials => "credentials were rejected by the server", Error::InvalidHttpFormat(ref e) => e.description(), Error::InvalidUtf8 => "bytes are not valid UTF-8", Error::Io(ref e) => e.description(), Error::NoResponse => "server did not send a response", Error::RangeRequestUnsupported => "server does not support or accept range requests", Error::RequestBodyError(Some(ref e)) => e, Error::ResponseBodyError(Some(ref e)) => e, Error::SSLConnectFailed(Some(ref e)) => e, Error::SSLEngineError(Some(ref e)) => e, Error::Timeout => "request took longer than the configured timeout", Error::TooManyRedirects => "max redirect limit exceeded", _ => "unknown error", } } fn cause(&self) -> Option<&dyn StdError> { match self { Error::InvalidHttpFormat(e) => Some(e), Error::Io(e) => Some(e), _ => None, } } } #[doc(hidden)] impl From<curl::Error> for Error { fn from(error: curl::Error) -> Error { if error.is_ssl_certproblem() || error.is_ssl_cacert_badfile() { Error::BadClientCertificate(error.extra_description().map(str::to_owned)) } else if error.is_peer_failed_verification() || error.is_ssl_cacert() { Error::BadServerCertificate(error.extra_description().map(str::to_owned)) } else if error.is_couldnt_connect() { Error::ConnectFailed } else if error.is_couldnt_resolve_host() { Error::CouldntResolveHost } else if error.is_couldnt_resolve_proxy() { Error::CouldntResolveProxy } else if error.is_bad_content_encoding() || error.is_conv_failed() { Error::InvalidContentEncoding(error.extra_description().map(str::to_owned)) } else if error.is_login_denied() { Error::InvalidCredentials } else if error.is_got_nothing() { Error::NoResponse } else if error.is_range_error() { Error::RangeRequestUnsupported } else if error.is_read_error() || error.is_aborted_by_callback() { Error::RequestBodyError(error.extra_description().map(str::to_owned)) } else if error.is_write_error() || error.is_partial_file() { Error::ResponseBodyError(error.extra_description().map(str::to_owned)) } else if error.is_ssl_connect_error() { Error::SSLConnectFailed(error.extra_description().map(str::to_owned)) } else if error.is_ssl_engine_initfailed() || error.is_ssl_engine_notfound() || error.is_ssl_engine_setfailed() { Error::SSLEngineError(error.extra_description().map(str::to_owned)) } else if error.is_operation_timedout() { Error::Timeout } else if error.is_too_many_redirects() { Error::TooManyRedirects } else { Error::Curl(error.description().to_owned()) } } } #[doc(hidden)] impl From<curl::MultiError> for Error { fn from(error: curl::MultiError) -> Error { Error::Curl(error.description().to_owned()) } } #[doc(hidden)] impl From<http::Error> for Error { fn from(error: http::Error) -> Error { Error::InvalidHttpFormat(error) } } #[doc(hidden)] impl From<io::Error> for Error { fn from(error: io::Error) -> Error { match error.kind() { io::ErrorKind::ConnectionRefused => Error::ConnectFailed, io::ErrorKind::TimedOut => Error::Timeout, _ => Error::Io(error), } } } #[doc(hidden)] impl From<Error> for io::Error {
} #[doc(hidden)] impl From<std::string::FromUtf8Error> for Error { fn from(_: std::string::FromUtf8Error) -> Error { Error::InvalidUtf8 } } #[doc(hidden)] impl From<std::str::Utf8Error> for Error { fn from(_: std::str::Utf8Error) -> Error { Error::InvalidUtf8 } }
fn from(error: Error) -> io::Error { match error { Error::ConnectFailed => io::ErrorKind::ConnectionRefused.into(), Error::Io(e) => e, Error::Timeout => io::ErrorKind::TimedOut.into(), _ => io::ErrorKind::Other.into(), } }
function_block-full_function
[ { "content": "/// Creates an interceptor from an arbitrary closure or function.\n\npub fn from_fn<F, E>(f: F) -> InterceptorFn<F>\n\nwhere\n\n F: for<'a> private::AsyncFn2<Request<Body>, Context<'a>, Output = Result<Response<Body>, E>> + Send + Sync + 'static,\n\n E: Into<Box<dyn Error>>,\n\n{\n\n InterceptorFn(f)\n\n}\n\n\n\n/// An interceptor created from an arbitrary closure or function. See\n\n/// [`from_fn`] for details.\n\npub struct InterceptorFn<F>(F);\n\n\n\nimpl<E, F> Interceptor for InterceptorFn<F>\n\nwhere\n\n E: Into<Box<dyn Error>>,\n\n F: for<'a> private::AsyncFn2<Request<Body>, Context<'a>, Output = Result<Response<Body>, E>> + Send + Sync + 'static,\n\n{\n\n type Err = E;\n\n\n\n fn intercept<'a>(&self, request: Request<Body>, ctx: Context<'a>) -> InterceptorFuture<'a, Self::Err> {\n", "file_path": "src/interceptor/mod.rs", "rank": 0, "score": 218804.52993193275 }, { "content": "/// Send an HTTP request and return the HTTP response.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::send`] for details.\n\npub fn send<B: Into<Body>>(request: Request<B>) -> Result<Response<Body>, Error> {\n\n HttpClient::shared().send(request)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 208209.15668927744 }, { "content": "#[test_case(\"GET\")]\n\n#[test_case(\"HEAD\")]\n\n#[test_case(\"POST\")]\n\n#[test_case(\"PUT\")]\n\n#[test_case(\"DELETE\")]\n\n#[test_case(\"PATCH\")]\n\n#[test_case(\"FOOBAR\")]\n\nfn request_with_body_of_unknown_size_uses_chunked_encoding(method: &str) {\n\n let body = \"foo\";\n\n\n\n let m = mock!();\n\n\n\n Request::builder()\n\n .method(method)\n\n .uri(m.url())\n\n // This header should be ignored\n\n .header(\"transfer-encoding\", \"identity\")\n\n .body(Body::from_reader(body.as_bytes()))\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.request().method, method);\n\n m.request().expect_header(\"transfer-encoding\", \"chunked\");\n\n m.request().expect_body(body);\n\n}\n\n\n", "file_path": "tests/request_body.rs", "rank": 2, "score": 180409.26583624643 }, { "content": "#[allow(unsafe_code)]\n\nfn parse_cookie_value(mut bytes: &[u8]) -> Result<&str, ParseError> {\n\n // Strip quotes, but only if in a legal pair.\n\n if bytes.starts_with(b\"\\\"\") && bytes.ends_with(b\"\\\"\") {\n\n bytes = &bytes[1..bytes.len() - 2];\n\n }\n\n\n\n // Validate the bytes are all legal cookie octets.\n\n if !is_valid_cookie_value(bytes) {\n\n return Err(ParseError(()));\n\n }\n\n\n\n // Safety: We know that the given bytes are valid US-ASCII at this point, so\n\n // therefore it is also valid UTF-8.\n\n Ok(unsafe { str::from_utf8_unchecked(bytes) })\n\n}\n\n\n", "file_path": "src/cookies/cookie.rs", "rank": 3, "score": 174128.94538876816 }, { "content": "/// Create a waker from a closure.\n\nfn waker_fn(f: impl Fn() + Send + Sync + 'static) -> Waker {\n\n struct Impl<F>(F);\n\n\n\n impl<F: Fn() + Send + Sync + 'static> ArcWake for Impl<F> {\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n (&arc_self.0)()\n\n }\n\n }\n\n\n\n futures_util::task::waker(Arc::new(Impl(f)))\n\n}\n\n\n\n/// Helper methods for working with wakers.\n\npub(crate) trait WakerExt {\n\n /// Create a new waker from a closure that accepts this waker as an\n\n /// argument.\n\n fn chain(&self, f: impl Fn(&Waker) + Send + Sync + 'static) -> Waker;\n\n}\n\n\n\nimpl WakerExt for Waker {\n", "file_path": "src/task.rs", "rank": 4, "score": 168019.26815801952 }, { "content": "/// Send an HTTP request and return the HTTP response asynchronously.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::send_async`] for details.\n\npub fn send_async<B: Into<Body>>(request: Request<B>) -> ResponseFuture<'static> {\n\n HttpClient::shared().send_async(request)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 159427.00203785926 }, { "content": "/// Gets a human-readable string with the version number of Isahc and its\n\n/// dependencies.\n\n///\n\n/// This function can be helpful when troubleshooting issues in Isahc or one of\n\n/// its dependencies.\n\npub fn version() -> &'static str {\n\n static FEATURES_STRING: &str = include_str!(concat!(env!(\"OUT_DIR\"), \"/features.txt\"));\n\n static VERSION_STRING: Lazy<String> = Lazy::new(|| format!(\n\n \"isahc/{} (features:{}) {}\",\n\n env!(\"CARGO_PKG_VERSION\"),\n\n FEATURES_STRING,\n\n curl::Version::num(),\n\n ));\n\n\n\n &VERSION_STRING\n\n}\n", "file_path": "src/lib.rs", "rank": 6, "score": 155837.7196385549 }, { "content": "// http://tools.ietf.org/html/rfc6265#section-5.1.4\n\nfn path_matches(request_path: &str, cookie_path: &str) -> bool {\n\n if request_path == cookie_path {\n\n return true;\n\n }\n\n\n\n if request_path.starts_with(cookie_path)\n\n && (cookie_path.ends_with('/') || request_path[cookie_path.len()..].starts_with('/'))\n\n {\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/cookies/jar.rs", "rank": 7, "score": 154548.3966219597 }, { "content": "#[test]\n\nfn request_errors_if_read_timeout_is_reached() {\n\n // Spawn a slow server.\n\n let m = mock! {\n\n delay: 1s,\n\n };\n\n\n\n // Send a request with a timeout.\n\n let result = Request::post(m.url())\n\n .timeout(Duration::from_millis(500))\n\n .body(\"hello world\")\n\n .unwrap()\n\n .send();\n\n\n\n // Client should time-out.\n\n match result {\n\n Err(isahc::Error::Timeout) => {}\n\n e => {\n\n panic!(\"expected timeout error, got {:?}\", e);\n\n }\n\n }\n\n\n\n assert_eq!(m.requests().len(), 1);\n\n}\n", "file_path": "tests/timeouts.rs", "rank": 8, "score": 153145.92498077246 }, { "content": "/// Send a POST request to the given URI with a given request body.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::post`] for details.\n\npub fn post<U>(uri: U, body: impl Into<Body>) -> Result<Response<Body>, Error>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().post(uri, body)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 145255.09025155296 }, { "content": "/// Send a PUT request to the given URI with a given request body.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::put`] for details.\n\npub fn put<U>(uri: U, body: impl Into<Body>) -> Result<Response<Body>, Error>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().put(uri, body)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 145255.09025155296 }, { "content": "#[allow(unsafe_code)]\n\nfn parse_token(bytes: &[u8]) -> Result<&str, ParseError> {\n\n if is_valid_token(bytes) {\n\n // Safety: We know that the given bytes are valid US-ASCII at this\n\n // point, so therefore it is also valid UTF-8.\n\n Ok(unsafe { str::from_utf8_unchecked(bytes) })\n\n } else {\n\n Err(ParseError(()))\n\n }\n\n}\n\n\n", "file_path": "src/cookies/cookie.rs", "rank": 11, "score": 139178.81672330588 }, { "content": "#[test_case(\"GET\")]\n\n#[test_case(\"HEAD\")]\n\n#[test_case(\"POST\")]\n\n#[test_case(\"PUT\")]\n\n#[test_case(\"DELETE\")]\n\n#[test_case(\"PATCH\")]\n\n#[test_case(\"FOOBAR\")]\n\nfn request_with_body_of_known_size(method: &str) {\n\n let body = \"MyVariableOne=ValueOne&MyVariableTwo=ValueTwo\";\n\n\n\n let m = mock!();\n\n\n\n Request::builder()\n\n .method(method)\n\n .uri(m.url())\n\n .header(\"Content-Type\", \"application/x-www-form-urlencoded\")\n\n .body(body)\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.request().method, method);\n\n m.request().expect_header(\"content-length\", body.len().to_string());\n\n m.request().expect_header(\"content-type\", \"application/x-www-form-urlencoded\");\n\n m.request().expect_body(body);\n\n}\n\n\n", "file_path": "tests/request_body.rs", "rank": 12, "score": 136814.0784705895 }, { "content": "// http://tools.ietf.org/html/rfc6265#section-5.1.3\n\nfn domain_matches(string: &str, domain_string: &str) -> bool {\n\n if domain_string.eq_ignore_ascii_case(string) {\n\n return true;\n\n }\n\n\n\n let string = &string.to_lowercase();\n\n let domain_string = &domain_string.to_lowercase();\n\n\n\n string.ends_with(domain_string)\n\n && string.as_bytes()[string.len() - domain_string.len() - 1] == b'.'\n\n && string.parse::<Ipv4Addr>().is_err()\n\n && string.parse::<Ipv6Addr>().is_err()\n\n}\n\n\n", "file_path": "src/cookies/jar.rs", "rank": 13, "score": 131430.7514475379 }, { "content": "#[test]\n\nfn redirect_limit_is_respected() {\n\n let m = mock! {\n\n status: 301,\n\n headers {\n\n \"Location\": \"/next\",\n\n }\n\n };\n\n\n\n let result = Request::get(m.url())\n\n .redirect_policy(RedirectPolicy::Limit(5))\n\n .body(())\n\n .unwrap()\n\n .send();\n\n\n\n // Request should error with too many redirects.\n\n assert!(match result {\n\n Err(isahc::Error::TooManyRedirects) => true,\n\n _ => false,\n\n });\n\n\n\n // After request (limit + 1) that returns a redirect should error.\n\n assert_eq!(m.requests().len(), 6);\n\n}\n\n\n", "file_path": "tests/redirects.rs", "rank": 14, "score": 129997.13549177392 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n let count = env::args()\n\n .nth(1)\n\n .and_then(|s| s.parse::<u32>().ok())\n\n .unwrap_or(100);\n\n\n\n let urls: Vec<String> = (0..count)\n\n .map(|i| format!(\"https://httpbin.org/anything/{:03}\", i))\n\n .collect();\n\n let client = HttpClient::new()?;\n\n\n\n let start = Instant::now();\n\n\n\n // Iterate over each URL and send a request in parallel.\n\n urls.par_iter()\n\n .try_for_each(|url| {\n\n let start = Instant::now();\n\n let response = client.get(url)?;\n\n let end = Instant::now();\n\n println!(\n", "file_path": "examples/parallel_requests.rs", "rank": 15, "score": 129200.21894649562 }, { "content": "/// Send a HEAD request to the given URI.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::head`] for details.\n\npub fn head<U>(uri: U) -> Result<Response<Body>, Error>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().head(uri)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 122519.15518895318 }, { "content": "/// Send a DELETE request to the given URI.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::delete`] for details.\n\npub fn delete<U>(uri: U) -> Result<Response<Body>, Error>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().delete(uri)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 17, "score": 122519.15518895318 }, { "content": "/// Send a GET request to the given URI.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::get`] for details.\n\npub fn get<U>(uri: U) -> Result<Response<Body>, Error>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().get(uri)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 18, "score": 122519.15518895318 }, { "content": "#[test]\n\nfn redirect_non_rewindable_body_returns_error() {\n\n let m2 = mock!();\n\n let location = m2.url();\n\n\n\n let m1 = mock! {\n\n status: 307,\n\n headers {\n\n \"Location\": location,\n\n }\n\n };\n\n\n\n // Create a streaming body of unknown size.\n\n let upload_stream = Body::from_reader(Body::from_bytes(b\"hello world\"));\n\n\n\n let result = Request::post(m1.url())\n\n .redirect_policy(RedirectPolicy::Follow)\n\n .body(upload_stream)\n\n .unwrap()\n\n .send();\n\n\n\n assert!(matches!(result, Err(isahc::Error::RequestBodyError(_))));\n\n assert_eq!(m1.request().method, \"POST\");\n\n}\n\n\n", "file_path": "tests/redirects.rs", "rank": 19, "score": 118711.23081947991 }, { "content": "#[test_case(307)]\n\n#[test_case(308)]\n\nfn redirect_also_sends_post(status: u16) {\n\n let m2 = mock!();\n\n let location = m2.url();\n\n\n\n let m1 = mock! {\n\n status: status,\n\n headers {\n\n \"Location\": location,\n\n }\n\n };\n\n\n\n let response = Request::post(m1.url())\n\n .redirect_policy(RedirectPolicy::Follow)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(response.status(), 200);\n\n assert_eq!(response.effective_uri().unwrap().to_string(), m2.url());\n\n\n\n assert_eq!(m1.request().method, \"POST\");\n\n assert_eq!(m2.request().method, \"POST\");\n\n}\n\n\n", "file_path": "tests/redirects.rs", "rank": 20, "score": 115700.62269195657 }, { "content": "#[test]\n\nfn send_request_to_unix_socket() {\n\n let temp_dir = TempDir::new().unwrap();\n\n let socket_path = temp_dir.path().join(\"test.sock\");\n\n let listener = UnixListener::bind(&socket_path).unwrap();\n\n\n\n thread::spawn(move || {\n\n let (mut stream, _) = listener.accept().unwrap();\n\n let mut reader = stream.try_clone().unwrap();\n\n\n\n thread::spawn(move || {\n\n io::copy(&mut reader, &mut io::sink()).unwrap();\n\n });\n\n\n\n stream.write_all(b\"\\\n\n HTTP/1.1 200 OK\\r\\n\\\n\n Content-Length: 8\\r\\n\\\n\n \\r\\n\\\n\n success\\n\\\n\n \").unwrap();\n\n });\n", "file_path": "tests/unix.rs", "rank": 21, "score": 115513.74692299558 }, { "content": "#[ignore]\n\n#[test_case(\"GET\")]\n\n#[test_case(\"HEAD\")]\n\n#[test_case(\"POST\")]\n\n#[test_case(\"PUT\")]\n\n#[test_case(\"DELETE\")]\n\n#[test_case(\"PATCH\")]\n\n#[test_case(\"FOOBAR\")]\n\nfn content_length_header_takes_precedence_over_body_objects_length(method: &str) {\n\n let m = mock!();\n\n\n\n Request::builder()\n\n .method(method)\n\n .uri(m.url())\n\n // Override given body's length\n\n .header(\"content-length\", \"3\")\n\n .body(\"abc123\")\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.request().method, method);\n\n m.request().expect_header(\"content-length\", \"3\");\n\n m.request().expect_body(\"abc\"); // truncated to 3 bytes\n\n}\n", "file_path": "tests/request_body.rs", "rank": 22, "score": 115296.39275593252 }, { "content": "#[test]\n\nfn unknown_content_encoding_returns_error() {\n\n let m = mock! {\n\n headers {\n\n \"Content-Encoding\": \"foo\",\n\n }\n\n body: \"hello world\",\n\n };\n\n\n\n let result = Request::get(m.url())\n\n .header(\"Accept-Encoding\", \"deflate\")\n\n .body(())\n\n .unwrap()\n\n .send();\n\n\n\n match result {\n\n Err(isahc::Error::InvalidContentEncoding(_)) => {}\n\n _ => panic!(\"expected unknown encoding error, instead got {:?}\", result),\n\n };\n\n\n\n m.request().expect_header(\"Accept-Encoding\", \"deflate\");\n\n}\n", "file_path": "tests/encoding.rs", "rank": 23, "score": 111785.18360104674 }, { "content": "/// Send a POST request to the given URI asynchronously with a given request\n\n/// body.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::post_async`] for details.\n\npub fn post_async<U>(uri: U, body: impl Into<Body>) -> ResponseFuture<'static>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().post_async(uri, body)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 111455.63610203938 }, { "content": "/// Send a PUT request to the given URI asynchronously with a given request\n\n/// body.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::put_async`] for details.\n\npub fn put_async<U>(uri: U, body: impl Into<Body>) -> ResponseFuture<'static>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().put_async(uri, body)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 111455.63610203938 }, { "content": "/// Execute a given closure with a reference to the list cache. If the list is\n\n/// out of date, attempt to refresh it first before continuing.\n\nfn with_cache<T>(f: impl FnOnce(&ListCache) -> T) -> T {\n\n let cache = CACHE.upgradable_read();\n\n\n\n // First check if the list needs to be refreshed.\n\n if cache.needs_refreshed() {\n\n // Upgrade our lock to gain write access.\n\n let mut cache = RwLockUpgradableReadGuard::upgrade(cache);\n\n\n\n // If there was contention then the cache might not need refreshed any\n\n // more.\n\n if cache.needs_refreshed() {\n\n if let Err(e) = cache.refresh() {\n\n tracing::warn!(\"could not refresh public suffix list: {}\", e);\n\n }\n\n }\n\n\n\n f(&*cache)\n\n } else {\n\n f(&*cache)\n\n }\n\n}\n", "file_path": "src/cookies/psl/mod.rs", "rank": 26, "score": 108273.48872495649 }, { "content": "/// Base trait for interceptors.\n\n///\n\n/// Since clients may be used to send requests concurrently, all interceptors\n\n/// must be synchronized and must be able to account for multiple requests being\n\n/// made in parallel.\n\npub trait Interceptor: Send + Sync {\n\n /// The type of error returned by this interceptor.\n\n type Err: Into<Box<dyn Error>>;\n\n\n\n /// Intercept a request, returning a response.\n\n ///\n\n /// The returned future is allowed to borrow the interceptor for the\n\n /// duration of its execution.\n\n fn intercept<'a>(&'a self, request: Request<Body>, ctx: Context<'a>) -> InterceptorFuture<'a, Self::Err>;\n\n}\n\n\n\n/// The type of future returned by an interceptor.\n\npub type InterceptorFuture<'a, E> = BoxFuture<'a, Result<Response<Body>, E>>;\n\n\n", "file_path": "src/interceptor/mod.rs", "rank": 27, "score": 106820.33320261279 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let request = ShoutCloudRequest {\n\n input: \"hello world\".into(),\n\n };\n\n\n\n let response = Request::post(\"HTTP://API.SHOUTCLOUD.IO/V1/SHOUT\")\n\n .header(\"content-type\", \"application/json\")\n\n .body(serde_json::to_vec(&request)?)?\n\n .send()?\n\n .json::<ShoutCloudResponse>()?;\n\n\n\n println!(\"Response: {:?}\", response);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/json.rs", "rank": 28, "score": 105498.97738200493 }, { "content": "// https://tools.ietf.org/html/rfc2616#section-2.2\n\nfn is_valid_token(bytes: &[u8]) -> bool {\n\n const SEPARATORS: &[u8] = b\"()<>@,;:\\\\\\\"/[]?={} \\t\";\n\n\n\n bytes\n\n .iter()\n\n .all(|byte| byte.is_ascii() && !byte.is_ascii_control() && !SEPARATORS.contains(byte))\n\n}\n\n\n", "file_path": "src/cookies/cookie.rs", "rank": 29, "score": 104096.74955997331 }, { "content": "#[test]\n\nfn no_proxy() {\n\n let m = mock!();\n\n\n\n Request::get(m.url())\n\n .proxy(None)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.requests().len(), 1);\n\n}\n\n\n", "file_path": "tests/proxy.rs", "rank": 30, "score": 101996.36837825004 }, { "content": "fn benchmark(c: &mut Criterion) {\n\n c.bench_function(\"download 64K: curl\", move |b| {\n\n let server = TestServer::static_response(&DATA);\n\n let endpoint = server.endpoint();\n\n\n\n b.iter_batched(\n\n || {\n\n let mut easy = curl::easy::Easy::new();\n\n easy.url(&endpoint).unwrap();\n\n easy\n\n },\n\n |mut easy| {\n\n let mut sink = sink();\n\n let mut transfer = easy.transfer();\n\n\n\n transfer\n\n .write_function(|bytes| {\n\n sink.write_all(bytes).unwrap();\n\n Ok(bytes.len())\n\n })\n", "file_path": "benchmarks/benches/download.rs", "rank": 31, "score": 101874.18005738642 }, { "content": "/// A responder is a request-response handler responsible for producing the\n\n/// responses returned by a mock endpoint.\n\n///\n\n/// Responders are not responsible for doing any assertions.\n\npub trait Responder: Send + Sync + 'static {\n\n fn respond(&self, request: Request) -> Option<Response>;\n\n}\n\n\n\n/// Simple responder that returns a general response.\n\npub struct DefaultResponder;\n\n\n\nimpl Responder for DefaultResponder {\n\n fn respond(&self, _: Request) -> Option<Response> {\n\n Some(Response::default())\n\n }\n\n}\n", "file_path": "testserver/src/responder.rs", "rank": 32, "score": 101305.55067597878 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n futures::executor::block_on(async {\n\n let mut response = isahc::get_async(\"http://example.org\").await?;\n\n\n\n println!(\"Status: {}\", response.status());\n\n println!(\"Headers:\\n{:?}\", response.headers());\n\n println!(\"Body: {}\", response.text_async().await?);\n\n\n\n Ok(())\n\n })\n\n}\n", "file_path": "examples/async.rs", "rank": 33, "score": 100873.67098240062 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n let response = Request::get(\"https://nghttp2.org\")\n\n .version_negotiation(VersionNegotiation::http2())\n\n .body(())\n\n .map_err(Into::into)\n\n .and_then(isahc::send)?;\n\n\n\n println!(\"{:#?}\", response.headers());\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/http2.rs", "rank": 34, "score": 100873.67098240062 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n // Send a GET request and wait for the response headers.\n\n // Must be `mut` so we can read the response body.\n\n let mut response = isahc::get(\"http://example.org\")?;\n\n\n\n // Print some basic info about the response to standard output.\n\n println!(\"Status: {}\", response.status());\n\n println!(\"Headers: {:#?}\", response.headers());\n\n\n\n // Read the response body as text into a string and print it.\n\n print!(\"{}\", response.text()?);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/simple.rs", "rank": 35, "score": 100873.67098240062 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n let options = Options::from_args();\n\n\n\n let bar = ProgressBar::new(0).with_style(\n\n ProgressStyle::default_bar()\n\n .template(\"{bar:40.cyan/blue} {bytes:>7}/{total_bytes:7} {msg}\"),\n\n );\n\n\n\n let mut response = Request::get(options.url).metrics(true).body(())?.send()?;\n\n let metrics = response.metrics().unwrap().clone();\n\n let body = response.body_mut();\n\n let mut buf = [0; 16384 * 4];\n\n\n\n loop {\n\n match body.read(&mut buf) {\n\n Ok(0) => {\n\n bar.finish();\n\n break;\n\n }\n\n Ok(_) => {\n", "file_path": "examples/progress.rs", "rank": 36, "score": 100873.67098240062 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n tracing_subscriber::fmt::init();\n\n\n\n let mut response = isahc::get(\"https://example.org\")?;\n\n\n\n // Consume the response stream quietly.\n\n std::io::copy(response.body_mut(), &mut std::io::sink())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/tracing.rs", "rank": 37, "score": 100873.67098240062 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n env_logger::init();\n\n\n\n let mut response = isahc::get(\"https://example.org\")?;\n\n\n\n // Consume the response stream quietly.\n\n std::io::copy(response.body_mut(), &mut std::io::sink())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/logging.rs", "rank": 38, "score": 100873.67098240062 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n // Create a custom client instance and customize a couple things different\n\n // than the default settings. Check the documentation of `HttpClient` and\n\n // `Configurable` for everything that can be customized.\n\n let client = HttpClient::builder()\n\n .timeout(Duration::from_secs(5))\n\n .redirect_policy(RedirectPolicy::Follow)\n\n .build()?;\n\n\n\n let mut response = client.get(\"https://rust-lang.org\")?;\n\n\n\n // Copy the response body directly to stdout.\n\n copy(response.body_mut(), &mut stdout())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/client.rs", "rank": 39, "score": 100873.67098240062 }, { "content": "// https://tools.ietf.org/html/rfc6265#section-4.1.1\n\nfn is_valid_cookie_value(bytes: &[u8]) -> bool {\n\n bytes\n\n .iter()\n\n .all(|&byte| matches!(byte, 0x21 | 0x23..=0x2B | 0x2D..=0x3A | 0x3C..=0x5B | 0x5D..=0x7E))\n\n}\n\n\n\n// https://tools.ietf.org/html/rfc2616#section-2.2\n", "file_path": "src/cookies/cookie.rs", "rank": 40, "score": 100836.31415314239 }, { "content": "#[test]\n\nfn http_proxy() {\n\n // URI of our test server, which we will treat as a proxy.\n\n let m = mock!();\n\n let proxy = m.url().parse::<http::Uri>().unwrap();\n\n\n\n // Fake upstream URI to connect to.\n\n let upstream = \"http://127.0.0.2:1234/\".parse::<http::Uri>().unwrap();\n\n\n\n Request::get(upstream.clone())\n\n .proxy(proxy)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n // We should receive the request instead, following the HTTP proxy\n\n // protocol. The request-target should be the absolute URI of our\n\n // upstream request target (see [RFC\n\n // 7230](https://tools.ietf.org/html/rfc7230), sections 5.3 and 5.7).\n\n assert_eq!(m.request().url, upstream.to_string());\n\n // Host should be the upstream authority, not the proxy host.\n\n m.request().expect_header(\"host\", upstream.authority().unwrap().as_str());\n\n m.request().expect_header(\"proxy-connection\", \"Keep-Alive\");\n\n}\n\n\n", "file_path": "tests/proxy.rs", "rank": 41, "score": 99049.38932694017 }, { "content": "#[test]\n\n#[cfg_attr(tarpaulin, ignore)]\n\nfn socks4_proxy() {\n\n // Set up a simple SOCKS4 proxy.\n\n let proxy_server = Socks4Server::new(\"127.0.0.1:0\").unwrap();\n\n\n\n // Create the proxy URI for our listener.\n\n let proxy_uri = http::Uri::builder()\n\n .scheme(\"socks4\")\n\n .authority(proxy_server.addr().to_string().as_str())\n\n .path_and_query(\"/\")\n\n .build()\n\n .unwrap();\n\n\n\n // Run the proxy server in the background.\n\n proxy_server.spawn();\n\n\n\n // Set up our upstream HTTP test server.\n\n let m = mock!();\n\n\n\n // Send a request...\n\n Request::get(m.url())\n", "file_path": "tests/proxy.rs", "rank": 42, "score": 99049.38932694017 }, { "content": "/// Extension methods on an HTTP request.\n\npub trait RequestExt<T> {\n\n /// Create a new request builder with the method, URI, and headers cloned\n\n /// from this request.\n\n ///\n\n /// Note that third-party extensions are not cloned.\n\n fn to_builder(&self) -> http::request::Builder;\n\n\n\n /// Send the HTTP request synchronously using the default client.\n\n ///\n\n /// This is a convenience method that is equivalent to\n\n /// [`send`](crate::send).\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use isahc::prelude::*;\n\n ///\n\n /// let response = Request::post(\"https://httpbin.org/post\")\n\n /// .header(\"Content-Type\", \"application/json\")\n\n /// .body(r#\"{\n", "file_path": "src/request.rs", "rank": 43, "score": 98457.38866633731 }, { "content": "fn main() -> Result<(), isahc::Error> {\n\n block_on(async {\n\n // Open a response stream.\n\n let response = isahc::get_async(\"https://www.rust-lang.org\").await?;\n\n\n\n let mut buf = [0; 8192];\n\n let mut offset = 0;\n\n let mut reader = response.into_body();\n\n\n\n // Set up a loop where we continuously read from the stream.\n\n loop {\n\n match reader.read(&mut buf).await? {\n\n // Zero bytes read, we hit EOF with no question marks.\n\n 0 => {\n\n println!(\"Download complete! No '?' byte of all {} bytes.\", offset);\n\n return Ok(());\n\n }\n\n // At least one byte was read.\n\n len => {\n\n // Check to dee if there's any question marks this time\n", "file_path": "examples/stream_cancellation.rs", "rank": 44, "score": 98359.50270959694 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let out_dir = PathBuf::from(env::var(\"OUT_DIR\")?);\n\n fs::write(out_dir.join(\"features.txt\"), get_feature_string())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 45, "score": 98250.55233924888 }, { "content": "// http://tools.ietf.org/html/rfc6265#section-5.1.4\n\nfn default_path(uri: &Uri) -> &str {\n\n // Step 2\n\n if !uri.path().starts_with('/') {\n\n return \"/\";\n\n }\n\n\n\n // Step 3\n\n let rightmost_slash_idx = uri.path().rfind('/').unwrap();\n\n if rightmost_slash_idx == 0 {\n\n // There's only one slash; it's the first character.\n\n return \"/\";\n\n }\n\n\n\n // Step 4\n\n &uri.path()[..rightmost_slash_idx]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/cookies/jar.rs", "rank": 46, "score": 96706.76987156933 }, { "content": "#[test]\n\nfn proxy_blacklist_works() {\n\n // This time, the proxy is the fake one.\n\n let proxy = \"http://127.0.0.2:1234/\".parse::<http::Uri>().unwrap();\n\n\n\n // Our test server is upstream (we don't expect the proxy to be used).\n\n let m = mock!();\n\n let upstream = m.url().parse::<http::Uri>().unwrap();\n\n\n\n Request::get(&upstream)\n\n .proxy(proxy)\n\n // Exclude our upstream from the proxy we set.\n\n .proxy_blacklist(Some(upstream.host().unwrap().to_string()))\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.requests().len(), 1);\n\n}\n", "file_path": "tests/proxy.rs", "rank": 47, "score": 96315.60824727878 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n // Create a new cookie jar.\n\n let cookie_jar = CookieJar::new();\n\n\n\n // Send a request to a server that sets a cookie.\n\n let uri = \"http://httpbin.org/cookies/set?foo=bar&baz=123\".parse()?;\n\n let _response = Request::get(&uri)\n\n // Set the cookie jar to use for this request.\n\n .cookie_jar(cookie_jar.clone())\n\n .body(())?\n\n .send()?;\n\n\n\n // Print all cookies relevant to the URL.\n\n for cookie in cookie_jar.get_for_uri(&uri) {\n\n println!(\"Cookie set: {} = {}\", cookie.name(), cookie.value());\n\n }\n\n\n\n // Send another request. The cookies previously set by the server will be\n\n // returned to it.\n\n let mut response = Request::get(\"http://httpbin.org/cookies\")\n\n .cookie_jar(cookie_jar.clone())\n\n .body(())?\n\n .send()?;\n\n\n\n println!(\"Cookies received by server: {}\", response.text()?);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/cookies.rs", "rank": 48, "score": 95736.38406644519 }, { "content": "#[test]\n\nfn headers_are_reset_every_redirect() {\n\n let m2 = mock! {\n\n status: 200,\n\n headers {\n\n \"X-Foo\": \"bbb\",\n\n \"X-Baz\": \"zzz\",\n\n }\n\n };\n\n let location = m2.url();\n\n\n\n let m1 = mock! {\n\n status: 301,\n\n headers {\n\n \"Location\": location,\n\n \"X-Foo\": \"aaa\",\n\n \"X-Bar\": \"zzz\",\n\n }\n\n };\n\n\n\n let response = Request::get(m1.url())\n", "file_path": "tests/redirects.rs", "rank": 49, "score": 93642.0553463464 }, { "content": "/// Provides additional methods when building a request for configuring various\n\n/// execution-related options on how the request should be sent.\n\n///\n\n/// This trait can be used to either configure requests individually by invoking\n\n/// them on an [`http::request::Builder`], or to configure the default settings\n\n/// for an [`HttpClient`](crate::HttpClient) by invoking them on an\n\n/// [`HttpClientBuilder`](crate::HttpClientBuilder).\n\n///\n\n/// This trait is sealed and cannot be implemented for types outside of Isahc.\n\npub trait Configurable: internal::ConfigurableBase {\n\n /// Set a maximum amount of time that a request is allowed to take before\n\n /// being aborted.\n\n ///\n\n /// If not set, no timeout will be enforced.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use isahc::prelude::*;\n\n /// use std::time::Duration;\n\n ///\n\n /// // This page is too slow and won't respond in time.\n\n /// let response = Request::get(\"https://httpbin.org/delay/10\")\n\n /// .timeout(Duration::from_secs(5))\n\n /// .body(())?\n\n /// .send()\n\n /// .expect_err(\"page should time out\");\n\n /// # Ok::<(), isahc::Error>(())\n\n /// ```\n", "file_path": "src/config/mod.rs", "rank": 50, "score": 92397.04209673064 }, { "content": "fn trim_left_ascii(mut ascii: &[u8]) -> &[u8] {\n\n while ascii.first() == Some(&b' ') {\n\n ascii = &ascii[1..];\n\n }\n\n\n\n ascii\n\n}\n\n\n", "file_path": "src/cookies/cookie.rs", "rank": 51, "score": 90170.4905934559 }, { "content": "#[test]\n\nfn response_301_no_follow() {\n\n let m = mock! {\n\n status: 301,\n\n headers {\n\n \"Location\": \"/2\",\n\n }\n\n };\n\n\n\n let response = isahc::get(m.url()).unwrap();\n\n\n\n assert_eq!(response.status(), 301);\n\n assert_eq!(response.headers()[\"Location\"], \"/2\");\n\n assert_eq!(response.effective_uri().unwrap().path(), \"/\");\n\n\n\n assert!(!m.requests().is_empty());\n\n}\n\n\n", "file_path": "tests/redirects.rs", "rank": 52, "score": 89052.34714215485 }, { "content": "#[test]\n\nfn delete_request() {\n\n let m = mock!();\n\n\n\n isahc::delete(m.url()).unwrap();\n\n\n\n assert_eq!(m.request().method, \"DELETE\");\n\n}\n\n\n", "file_path": "tests/methods.rs", "rank": 53, "score": 88899.1740109839 }, { "content": "#[test]\n\nfn head_request() {\n\n let m = mock!();\n\n\n\n isahc::head(m.url()).unwrap();\n\n\n\n assert_eq!(m.request().method, \"HEAD\");\n\n}\n\n\n", "file_path": "tests/methods.rs", "rank": 54, "score": 88899.1740109839 }, { "content": "#[test]\n\nfn get_request() {\n\n let m = mock!();\n\n\n\n isahc::get(m.url()).unwrap();\n\n\n\n assert_eq!(m.request().method, \"GET\");\n\n}\n\n\n", "file_path": "tests/methods.rs", "rank": 55, "score": 88899.1740109839 }, { "content": "#[test]\n\nfn post_request() {\n\n let m = mock!();\n\n\n\n isahc::post(m.url(), ()).unwrap();\n\n\n\n assert_eq!(m.request().method, \"POST\");\n\n}\n\n\n", "file_path": "tests/methods.rs", "rank": 56, "score": 88899.1740109839 }, { "content": "#[test]\n\nfn put_request() {\n\n let m = mock!();\n\n\n\n isahc::put(m.url(), ()).unwrap();\n\n\n\n assert_eq!(m.request().method, \"PUT\");\n\n}\n\n\n", "file_path": "tests/methods.rs", "rank": 57, "score": 88899.1740109839 }, { "content": "#[test]\n\nfn response_301_auto_follow() {\n\n let m2 = mock! {\n\n status: 200,\n\n body: \"ok\",\n\n };\n\n let location = m2.url();\n\n\n\n let m1 = mock! {\n\n status: 301,\n\n headers {\n\n \"Location\": location,\n\n }\n\n };\n\n\n\n let mut response = Request::get(m1.url())\n\n .redirect_policy(RedirectPolicy::Follow)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(response.status(), 200);\n\n assert_eq!(response.text().unwrap(), \"ok\");\n\n assert_eq!(response.effective_uri().unwrap().to_string(), m2.url());\n\n\n\n assert!(!m1.requests().is_empty());\n\n assert!(!m2.requests().is_empty());\n\n}\n\n\n", "file_path": "tests/redirects.rs", "rank": 58, "score": 86069.60970332034 }, { "content": "#[doc(hidden)]\n\npub trait ConfigurableBase: Sized {\n\n /// Configure this object with the given property, returning the configured\n\n /// self.\n\n #[doc(hidden)]\n\n fn configure(self, property: impl Send + Sync + 'static) -> Self;\n\n}\n\n\n\n/// A helper trait for applying a configuration value to a given curl handle.\n\npub(crate) trait SetOpt {\n\n /// Apply this configuration property to the given curl handle.\n\n #[doc(hidden)]\n\n fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error>;\n\n}\n", "file_path": "src/config/internal.rs", "rank": 59, "score": 85928.19754936741 }, { "content": "#[test]\n\nfn arbitrary_foobar_request() {\n\n let m = mock!();\n\n\n\n Request::builder()\n\n .method(\"FOOBAR\")\n\n .uri(m.url())\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.request().method, \"FOOBAR\");\n\n}\n", "file_path": "tests/methods.rs", "rank": 60, "score": 85923.022002182 }, { "content": "#[test_case(301)]\n\n#[test_case(302)]\n\n#[test_case(303)]\n\nfn redirect_changes_post_to_get(status: u16) {\n\n let m2 = mock!();\n\n let location = m2.url();\n\n\n\n let m1 = mock! {\n\n status: status,\n\n headers {\n\n \"Location\": location,\n\n }\n\n };\n\n\n\n let response = Request::post(m1.url())\n\n .redirect_policy(RedirectPolicy::Follow)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(response.status(), 200);\n\n assert_eq!(response.effective_uri().unwrap().to_string(), m2.url());\n\n\n\n assert_eq!(m1.request().method, \"POST\");\n\n assert_eq!(m2.request().method, \"GET\");\n\n}\n\n\n", "file_path": "tests/redirects.rs", "rank": 61, "score": 85840.8169883111 }, { "content": "#[test]\n\nfn accept_headers_populated_by_default() {\n\n let m = mock!();\n\n\n\n isahc::get(m.url()).unwrap();\n\n\n\n m.request().expect_header(\"accept\", \"*/*\");\n\n m.request().expect_header(\"accept-encoding\", \"deflate, gzip\");\n\n}\n\n\n", "file_path": "tests/headers.rs", "rank": 62, "score": 83490.37582814303 }, { "content": "/// Send a DELETE request to the given URI asynchronously.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::delete_async`] for details.\n\npub fn delete_async<U>(uri: U) -> ResponseFuture<'static>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().delete_async(uri)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 63, "score": 83155.49908394346 }, { "content": "/// Send a GET request to the given URI asynchronously.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::get_async`] for details.\n\npub fn get_async<U>(uri: U) -> ResponseFuture<'static>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().get_async(uri)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 64, "score": 83155.49908394346 }, { "content": "/// Send a HEAD request to the given URI asynchronously.\n\n///\n\n/// The request is executed using a shared [`HttpClient`] instance. See\n\n/// [`HttpClient::head_async`] for details.\n\npub fn head_async<U>(uri: U) -> ResponseFuture<'static>\n\nwhere\n\n http::Uri: TryFrom<U>,\n\n <http::Uri as TryFrom<U>>::Error: Into<http::Error>,\n\n{\n\n HttpClient::shared().head_async(uri)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 65, "score": 83155.49908394346 }, { "content": "#[test]\n\nfn credentials_without_auth_config_does_nothing() {\n\n let m = mock!();\n\n\n\n Request::get(m.url())\n\n .credentials(Credentials::new(\"clark\", \"querty\"))\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(m.request().get_header(\"authorization\").count(), 0);\n\n}\n\n\n", "file_path": "tests/auth.rs", "rank": 66, "score": 80974.43630408752 }, { "content": "#[test]\n\nfn content_is_decoded_even_if_not_listed_as_accepted() {\n\n let body = \"hello world\";\n\n let mut body_encoded = Vec::new();\n\n\n\n GzEncoder::new(body.as_bytes(), Compression::default())\n\n .read_to_end(&mut body_encoded)\n\n .unwrap();\n\n\n\n let m = mock! {\n\n headers {\n\n \"Content-Encoding\": \"gzip\",\n\n }\n\n body: body_encoded.clone(),\n\n };\n\n\n\n let mut response = Request::get(m.url())\n\n .header(\"Accept-Encoding\", \"deflate\")\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(response.text().unwrap(), body);\n\n m.request().expect_header(\"Accept-Encoding\", \"deflate\");\n\n}\n\n\n", "file_path": "tests/encoding.rs", "rank": 67, "score": 80964.00107356049 }, { "content": "#[test]\n\nfn basic_auth_sends_authorization_header() {\n\n let m = mock!();\n\n\n\n Request::get(m.url())\n\n .authentication(Authentication::basic())\n\n .credentials(Credentials::new(\"clark\", \"querty\"))\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n // base64\n\n m.request().expect_header(\"authorization\", \"Basic Y2xhcms6cXVlcnR5\");\n\n}\n\n\n", "file_path": "tests/auth.rs", "rank": 68, "score": 80878.87542327047 }, { "content": "#[test]\n\nfn auto_referer_sets_expected_header() {\n\n let m3 = mock!();\n\n\n\n let m2 = {\n\n let location = m3.url();\n\n mock! {\n\n status: 301,\n\n headers {\n\n \"Location\": location,\n\n }\n\n }\n\n };\n\n\n\n let m1 = {\n\n let location = m2.url();\n\n mock! {\n\n status: 301,\n\n headers {\n\n \"Location\": location,\n\n }\n", "file_path": "tests/redirects.rs", "rank": 69, "score": 80812.64044330723 }, { "content": "#[test]\n\nfn request_gzip_without_automatic_decompression() {\n\n let body = \"hello world\";\n\n let mut body_encoded = Vec::new();\n\n\n\n GzEncoder::new(body.as_bytes(), Compression::default())\n\n .read_to_end(&mut body_encoded)\n\n .unwrap();\n\n\n\n let m = {\n\n let body_encoded = body_encoded.clone();\n\n mock! {\n\n headers {\n\n \"Content-Encoding\": \"gzip\",\n\n }\n\n body: body_encoded.clone(),\n\n }\n\n };\n\n\n\n let mut response = Request::get(m.url())\n\n .header(\"Accept-Encoding\", \"gzip\")\n", "file_path": "tests/encoding.rs", "rank": 70, "score": 80677.65932972654 }, { "content": "#[test]\n\nfn dropping_client_does_not_abort_response_transfer() {\n\n let body = \"hello world\\n\".repeat(8192);\n\n let m = {\n\n let body = body.clone();\n\n mock! {\n\n body: body.clone(),\n\n }\n\n };\n\n\n\n let client = isahc::HttpClient::new().unwrap();\n\n let mut response = client.get(m.url()).unwrap();\n\n drop(client);\n\n\n\n assert_eq!(response.text().unwrap().len(), body.len());\n\n}\n\n\n\n// See issue #72.\n", "file_path": "tests/response_body.rs", "rank": 71, "score": 78640.08097361834 }, { "content": "#[test]\n\nfn setting_an_empty_header_sends_a_header_with_no_value() {\n\n let m = mock!();\n\n\n\n Request::get(m.url())\n\n .header(\"an-empty-header\", \"\")\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n m.request().expect_header(\"an-empty-header\", \"\");\n\n}\n\n\n\n// Issue [#209](https://github.com/sagebind/isahc/issues/209)\n", "file_path": "tests/headers.rs", "rank": 72, "score": 78548.15920458373 }, { "content": "#[test]\n\nfn setting_a_blank_header_sends_a_header_with_no_value() {\n\n let m = mock!();\n\n\n\n Request::get(m.url())\n\n .header(\"an-empty-header\", \" \")\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n m.request().expect_header(\"an-empty-header\", \"\");\n\n}\n\n\n\n// Issue [#190](https://github.com/sagebind/isahc/issues/190)\n", "file_path": "tests/headers.rs", "rank": 73, "score": 78548.15920458373 }, { "content": "#[test]\n\nfn response_body_with_chunked_encoding_has_unknown_size() {\n\n let m = mock! {\n\n body: \"hello world\",\n\n transfer_encoding: true,\n\n };\n\n\n\n let response = isahc::get(m.url()).unwrap();\n\n\n\n assert_eq!(response.body().len(), None);\n\n}\n\n\n\n// See issue #64.\n", "file_path": "tests/response_body.rs", "rank": 74, "score": 76458.28785249993 }, { "content": "#[test]\n\nfn headers_in_request_builder_must_override_headers_in_httpclient_builder() {\n\n let m = mock!();\n\n\n\n let client = HttpClient::builder()\n\n .default_header(\"X-header\", \"some-value1\")\n\n .build()\n\n .unwrap();\n\n\n\n let request = Request::builder()\n\n .method(\"GET\")\n\n .header(\"X-header\", \"some-value2\")\n\n .uri(m.url())\n\n .body(())\n\n .unwrap();\n\n\n\n let _ = client.send(request).unwrap();\n\n\n\n m.request().expect_header(\"accept\", \"*/*\");\n\n m.request().expect_header(\"accept-encoding\", \"deflate, gzip\");\n\n m.request().expect_header(\"X-header\", \"some-value2\");\n\n}\n\n\n", "file_path": "tests/headers.rs", "rank": 75, "score": 74201.72641676813 }, { "content": "#[test]\n\nfn headers_in_request_builder_must_override_multiple_headers_in_httpclient_builder() {\n\n let m = mock!();\n\n\n\n let client = HttpClient::builder()\n\n .default_header(\"X-header\", \"some-value1\")\n\n .default_header(\"X-header\", \"some-value2\")\n\n .build()\n\n .unwrap();\n\n\n\n let request = Request::builder()\n\n .method(\"GET\")\n\n .header(\"X-header\", \"some-value3\")\n\n .uri(m.url())\n\n .body(())\n\n .unwrap();\n\n\n\n let _ = client.send(request).unwrap();\n\n\n\n m.request().expect_header(\"accept\", \"*/*\");\n\n m.request().expect_header(\"accept-encoding\", \"deflate, gzip\");\n\n m.request().expect_header(\"X-header\", \"some-value3\");\n\n}\n", "file_path": "tests/headers.rs", "rank": 76, "score": 72338.18652741586 }, { "content": "#[derive(Debug)]\n\nenum Message {\n\n /// Requests the agent to close.\n\n Close,\n\n\n\n /// Begin executing a new request.\n\n Execute(EasyHandle),\n\n\n\n /// Request to resume reading the request body for the request with the\n\n /// given ID.\n\n UnpauseRead(usize),\n\n\n\n /// Request to resume writing the response body for the request with the\n\n /// given ID.\n\n UnpauseWrite(usize),\n\n}\n\n\n", "file_path": "src/agent.rs", "rank": 77, "score": 66447.78334208841 }, { "content": "/// All possible body implementations.\n\nenum Inner {\n\n /// An empty body.\n\n Empty,\n\n\n\n /// A body stored in memory.\n\n Bytes(Cursor<Bytes>),\n\n\n\n /// An asynchronous reader.\n\n AsyncRead(Pin<Box<dyn AsyncRead + Send + Sync>>, Option<u64>),\n\n}\n\n\n\nimpl Body {\n\n /// Create a new empty body.\n\n ///\n\n /// An empty body represents the *absence* of a body, which is semantically\n\n /// different than the presence of a body of zero length.\n\n pub const fn empty() -> Self {\n\n Body(Inner::Empty)\n\n }\n\n\n", "file_path": "src/body.rs", "rank": 78, "score": 66447.78334208841 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nenum Inner {\n\n Default,\n\n\n\n IpSocket(String),\n\n\n\n #[cfg(unix)]\n\n UnixSocket(std::path::PathBuf),\n\n}\n\n\n\nimpl Dialer {\n\n /// Connect to the given IP socket.\n\n ///\n\n /// Any value that can be converted into a [`SocketAddr`] can be given as an\n\n /// argument; check the [`SocketAddr`] documentation for a complete list.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use isahc::config::Dialer;\n\n /// use std::net::Ipv4Addr;\n", "file_path": "src/config/dial.rs", "rank": 79, "score": 64787.326949219045 }, { "content": "#[derive(Debug)]\n\nenum JoinResult {\n\n AlreadyJoined,\n\n Ok,\n\n Err(Error),\n\n Panic,\n\n}\n\n\n\nimpl Handle {\n\n /// Begin executing a request with this agent.\n\n pub(crate) fn submit_request(&self, request: EasyHandle) -> Result<(), Error> {\n\n self.send_message(Message::Execute(request))\n\n }\n\n\n\n /// Send a message to the agent thread.\n\n ///\n\n /// If the agent is not connected, an error is returned.\n\n fn send_message(&self, message: Message) -> Result<(), Error> {\n\n match self.message_tx.send(message) {\n\n Ok(()) => {\n\n // Wake the agent thread up so it will check its messages soon.\n", "file_path": "src/agent.rs", "rank": 80, "score": 64787.326949219045 }, { "content": "/// Provides extension methods for working with HTTP responses.\n\npub trait ResponseExt<T> {\n\n /// Get the effective URI of this response. This value differs from the\n\n /// original URI provided when making the request if at least one redirect\n\n /// was followed.\n\n ///\n\n /// This information is only available if populated by the HTTP client that\n\n /// produced the response.\n\n fn effective_uri(&self) -> Option<&Uri>;\n\n\n\n /// Get the local socket address of the last-used connection involved in\n\n /// this request, if known.\n\n ///\n\n /// Multiple connections may be involved in a request, such as with\n\n /// redirects.\n\n ///\n\n /// This method only makes sense with a normal Internet request. If some\n\n /// other kind of transport is used to perform the request, such as a Unix\n\n /// socket, then this method will return `None`.\n\n fn local_addr(&self) -> Option<SocketAddr>;\n\n\n", "file_path": "src/response.rs", "rank": 81, "score": 56076.81302649733 }, { "content": "/// Object-safe version of the interceptor used for type erasure. Implementation\n\n/// detail of [`InterceptorObj`].\n\ntrait DynInterceptor: Send + Sync {\n\n fn dyn_intercept<'a>(&'a self, request: Request<Body>, cx: Context<'a>) -> InterceptorFuture<'a, Box<dyn Error>>;\n\n}\n\n\n\nimpl<I: Interceptor> DynInterceptor for I {\n\n fn dyn_intercept<'a>(&'a self, request: Request<Body>, cx: Context<'a>) -> InterceptorFuture<'a, Box<dyn Error>> {\n\n Box::pin(async move {\n\n self.intercept(request, cx).await.map_err(Into::into)\n\n })\n\n }\n\n}\n", "file_path": "src/interceptor/obj.rs", "rank": 82, "score": 55671.49567752711 }, { "content": "fn main() {\n\n println!(\"version: {}\", isahc::version());\n\n}\n", "file_path": "examples/version.rs", "rank": 83, "score": 55511.84913062057 }, { "content": "fn main() {\n\n // accept expired cert\n\n Request::get(\"https://expired.badssl.com\")\n\n .ssl_options(SslOption::DANGER_ACCEPT_INVALID_CERTS)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .expect(\"cert should have been accepted\");\n\n\n\n // accepting invalid certs alone does not allow invalid hosts\n\n Request::get(\"https://wrong.host.badssl.com\")\n\n .ssl_options(SslOption::DANGER_ACCEPT_INVALID_CERTS)\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .expect_err(\"cert should have been rejected\");\n\n\n\n // accept cert with wrong host\n\n Request::get(\"https://wrong.host.badssl.com\")\n\n .ssl_options(SslOption::DANGER_ACCEPT_INVALID_HOSTS)\n", "file_path": "examples/badssl.rs", "rank": 84, "score": 55511.84913062057 }, { "content": "#[test]\n\nfn cookie_lifecycle() {\n\n let jar = CookieJar::default();\n\n let client = HttpClient::builder().cookie_jar(jar.clone()).build().unwrap();\n\n\n\n let m1 = mock! {\n\n headers {\n\n \"set-cookie\": \"foo=bar\",\n\n \"set-cookie\": \"baz=123\",\n\n }\n\n };\n\n let m2 = mock!();\n\n\n\n let response1 = client.get(m1.url()).unwrap();\n\n\n\n assert!(response1.cookie_jar().is_some());\n\n\n\n let response2 = client.get(m2.url()).unwrap();\n\n\n\n assert!(response2.cookie_jar().is_some());\n\n\n\n dbg!(m2.request()).expect_header(\"cookie\", \"baz=123; foo=bar\");\n\n}\n", "file_path": "tests/cookies.rs", "rank": 85, "score": 53901.914407438446 }, { "content": "/// Helper trait for defining key-value pair types that can be dereferenced into\n\n/// a tuple from a reference.\n\n///\n\n/// This trait is sealed and cannot be implemented for types outside of Isahc.\n\npub trait HeaderPair<K, V> {\n\n fn pair(self) -> (K, V);\n\n}\n\n\n\nimpl<K, V> HeaderPair<K, V> for (K, V) {\n\n fn pair(self) -> (K, V) {\n\n self\n\n }\n\n}\n\n\n\nimpl<'a, K: Copy, V: Copy> HeaderPair<K, V> for &'a (K, V) {\n\n fn pair(self) -> (K, V) {\n\n (self.0, self.1)\n\n }\n\n}\n\n\n\n/// An HTTP client for making requests.\n\n///\n\n/// An [`HttpClient`] instance acts as a session for executing one or more HTTP\n\n/// requests, and also allows you to set common protocol settings that should be\n", "file_path": "src/client.rs", "rank": 86, "score": 53000.203081608845 }, { "content": "#[test]\n\nfn metrics_are_disabled_by_default() {\n\n let m = mock!();\n\n\n\n let response = isahc::get(m.url()).unwrap();\n\n\n\n assert!(!m.requests().is_empty());\n\n assert!(response.metrics().is_none());\n\n}\n\n\n", "file_path": "tests/metrics.rs", "rank": 87, "score": 52430.41274226716 }, { "content": "#[cfg(feature = \"spnego\")]\n\n#[test]\n\nfn negotiate_auth_exists() {\n\n let m = mock! {\n\n status: 401,\n\n headers {\n\n \"WWW-Authenticate\": \"Negotiate\",\n\n }\n\n };\n\n\n\n Request::get(m.url())\n\n .authentication(Authentication::negotiate())\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert!(!m.requests().is_empty());\n\n}\n\n\n", "file_path": "tests/auth.rs", "rank": 88, "score": 52430.41274226716 }, { "content": "#[test]\n\nfn large_response_body() {\n\n let body = \"wow so large \".repeat(1000);\n\n\n\n let m = {\n\n let body = body.clone();\n\n mock! {\n\n body: body.clone(),\n\n }\n\n };\n\n\n\n let mut response = isahc::get(m.url()).unwrap();\n\n let response_text = response.text().unwrap();\n\n\n\n assert_eq!(response_text, body);\n\n}\n\n\n", "file_path": "tests/response_body.rs", "rank": 89, "score": 51080.22502967299 }, { "content": "#[test]\n\nfn change_http_method_with_interceptor() {\n\n let m = mock!();\n\n\n\n let client = HttpClient::builder()\n\n .interceptor(isahc::interceptor!(request, cx, {\n\n *request.method_mut() = http::Method::HEAD;\n\n cx.send(request).await\n\n }))\n\n .build()\n\n .unwrap();\n\n\n\n client.get(m.url()).unwrap();\n\n\n\n assert_eq!(m.request().method, \"HEAD\");\n\n}\n", "file_path": "tests/interceptors.rs", "rank": 90, "score": 51080.22502967299 }, { "content": "#[cfg(all(feature = \"spnego\", windows))]\n\n#[test]\n\nfn negotiate_on_windows_provides_a_token() {\n\n let m = mock! {\n\n status: 200,\n\n headers {\n\n \"WWW-Authenticate\": \"Negotiate\",\n\n }\n\n };\n\n\n\n let response = Request::get(m.url())\n\n .authentication(Authentication::negotiate())\n\n .body(())\n\n .unwrap()\n\n .send()\n\n .unwrap();\n\n\n\n assert_eq!(response.status(), 200);\n\n m.request().expect_header_regex(\"authorization\", r\"Negotiate \\w+=*\");\n\n}\n", "file_path": "tests/auth.rs", "rank": 91, "score": 51080.22502967299 }, { "content": "#[test]\n\nfn simple_response_body() {\n\n let m = mock! {\n\n body: \"hello world\",\n\n };\n\n\n\n let mut response = isahc::get(m.url()).unwrap();\n\n let response_text = response.text().unwrap();\n\n\n\n assert_eq!(response_text, \"hello world\");\n\n}\n\n\n", "file_path": "tests/response_body.rs", "rank": 92, "score": 51080.22502967299 }, { "content": "/// Generate a \"feature string\" for the crate features currently enabled.\n\nfn get_feature_string() -> String {\n\n env::vars()\n\n .filter(|(name, _)| name.starts_with(\"CARGO_FEATURE_\"))\n\n .filter(|(_, value)| value == \"1\")\n\n .map(|(name, _)| name.trim_start_matches(\"CARGO_FEATURE_\").to_lowercase())\n\n .collect::<Vec<String>>()\n\n .join(\",\")\n\n}\n", "file_path": "build.rs", "rank": 93, "score": 50680.74399625858 }, { "content": "type MultiMessage = (usize, Result<(), curl::Error>);\n\n\n\n/// Builder for configuring and spawning an agent.\n\n#[derive(Debug, Default)]\n\npub(crate) struct AgentBuilder {\n\n max_connections: usize,\n\n max_connections_per_host: usize,\n\n connection_cache_size: usize,\n\n}\n\n\n\nimpl AgentBuilder {\n\n pub(crate) fn max_connections(mut self, max: usize) -> Self {\n\n self.max_connections = max;\n\n self\n\n }\n\n\n\n pub(crate) fn max_connections_per_host(mut self, max: usize) -> Self {\n\n self.max_connections_per_host = max;\n\n self\n\n }\n", "file_path": "src/agent.rs", "rank": 94, "score": 50250.14030179819 }, { "content": "#[test]\n\nfn local_addr_returns_expected_address() {\n\n let m = mock!();\n\n\n\n let response = isahc::get(m.url()).unwrap();\n\n\n\n assert!(!m.requests().is_empty());\n\n assert_eq!(response.local_addr().unwrap().ip(), Ipv4Addr::LOCALHOST);\n\n assert!(response.local_addr().unwrap().port() > 0);\n\n}\n\n\n", "file_path": "tests/net.rs", "rank": 95, "score": 49836.94309282786 }, { "content": "#[test]\n\nfn user_agent_contains_expected_format() {\n\n let m = mock!();\n\n\n\n isahc::get(m.url()).unwrap();\n\n\n\n m.request().expect_header_regex(\"user-agent\", r\"^curl/\\S+ isahc/\\S+$\");\n\n}\n\n\n\n// Issue [#209](https://github.com/sagebind/isahc/issues/209)\n", "file_path": "tests/headers.rs", "rank": 96, "score": 49836.94309282786 }, { "content": "#[test]\n\nfn gzip_encoded_response_is_decoded_automatically() {\n\n let body = \"hello world\";\n\n let mut body_encoded = Vec::new();\n\n\n\n GzEncoder::new(body.as_bytes(), Compression::default())\n\n .read_to_end(&mut body_encoded)\n\n .unwrap();\n\n\n\n let m = mock! {\n\n headers {\n\n \"Content-Encoding\": \"gzip\",\n\n }\n\n body: body_encoded.clone(),\n\n };\n\n\n\n let mut response = isahc::get(m.url()).unwrap();\n\n\n\n assert_eq!(response.text().unwrap(), body);\n\n m.request().expect_header(\"Accept-Encoding\", \"deflate, gzip\");\n\n}\n\n\n", "file_path": "tests/encoding.rs", "rank": 97, "score": 49836.94309282786 }, { "content": "#[test]\n\nfn deflate_encoded_response_is_decoded_automatically() {\n\n let body = \"hello world\";\n\n let mut body_encoded = Vec::new();\n\n\n\n DeflateEncoder::new(body.as_bytes(), Compression::default())\n\n .read_to_end(&mut body_encoded)\n\n .unwrap();\n\n\n\n let m = mock! {\n\n headers {\n\n \"Content-Encoding\": \"deflate\",\n\n }\n\n body: body_encoded.clone(),\n\n };\n\n\n\n let mut response = isahc::get(m.url()).unwrap();\n\n\n\n assert_eq!(response.text().unwrap(), body);\n\n m.request().expect_header(\"Accept-Encoding\", \"deflate, gzip\");\n\n}\n\n\n", "file_path": "tests/encoding.rs", "rank": 98, "score": 49836.94309282786 }, { "content": "#[test]\n\nfn override_client_default_user_agent() {\n\n let m = mock!();\n\n\n\n let client = HttpClient::builder()\n\n .default_header(\"user-agent\", \"foo\")\n\n .build()\n\n .unwrap();\n\n\n\n client.get(m.url()).unwrap();\n\n\n\n m.request().expect_header(\"user-agent\", \"foo\");\n\n}\n\n\n\n// Issue [#205](https://github.com/sagebind/isahc/issues/205)\n", "file_path": "tests/headers.rs", "rank": 99, "score": 49836.94309282786 } ]
Rust
src/objects/module.rs
mloebel/rust-cpython
2e243f7622a8d33bca3fb321db25d4be4c26397d
use libc::c_char; use std::ffi::{CStr, CString}; use crate::conversion::ToPyObject; use crate::err::{self, PyErr, PyResult}; use crate::ffi; use crate::objectprotocol::ObjectProtocol; use crate::objects::{exc, PyDict, PyObject, PyTuple}; use crate::py_class::PythonObjectFromPyClassMacro; use crate::python::{PyDrop, Python, PythonObject}; pub struct PyModule(PyObject); pyobject_newtype!(PyModule, PyModule_Check, PyModule_Type); impl PyModule { pub fn new(py: Python, name: &str) -> PyResult<PyModule> { let name = CString::new(name).unwrap(); unsafe { err::result_cast_from_owned_ptr(py, ffi::PyModule_New(name.as_ptr())) } } pub fn import(py: Python, name: &str) -> PyResult<PyModule> { let name = CString::new(name).unwrap(); unsafe { err::result_cast_from_owned_ptr(py, ffi::PyImport_ImportModule(name.as_ptr())) } } pub fn dict(&self, py: Python) -> PyDict { unsafe { let r = PyObject::from_borrowed_ptr(py, ffi::PyModule_GetDict(self.0.as_ptr())); r.unchecked_cast_into::<PyDict>() } } unsafe fn str_from_ptr<'a>(&'a self, py: Python, ptr: *const c_char) -> PyResult<&'a str> { if ptr.is_null() { Err(PyErr::fetch(py)) } else { let slice = CStr::from_ptr(ptr).to_bytes(); match std::str::from_utf8(slice) { Ok(s) => Ok(s), Err(e) => Err(PyErr::from_instance( py, exc::UnicodeDecodeError::new_utf8(py, slice, e)?, )), } } } pub fn name<'a>(&'a self, py: Python) -> PyResult<&'a str> { unsafe { self.str_from_ptr(py, ffi::PyModule_GetName(self.0.as_ptr())) } } #[allow(deprecated)] pub fn filename<'a>(&'a self, py: Python) -> PyResult<&'a str> { unsafe { self.str_from_ptr(py, ffi::PyModule_GetFilename(self.0.as_ptr())) } } #[cfg(feature = "python3-sys")] pub fn filename_object<'a>(&'a self, py: Python) -> PyResult<PyObject> { let ptr = unsafe { ffi::PyModule_GetFilenameObject(self.0.as_ptr()) }; if ptr.is_null() { Err(PyErr::fetch(py)) } else { Ok(unsafe { PyObject::from_borrowed_ptr(py, ptr) }) } } pub fn get(&self, py: Python, name: &str) -> PyResult<PyObject> { self.as_object().getattr(py, name) } pub fn call<A>( &self, py: Python, name: &str, args: A, kwargs: Option<&PyDict>, ) -> PyResult<PyObject> where A: ToPyObject<ObjectType = PyTuple>, { self.as_object().getattr(py, name)?.call(py, args, kwargs) } pub fn add<V>(&self, py: Python, name: &str, value: V) -> PyResult<()> where V: ToPyObject, { self.as_object().setattr(py, name, value) } pub fn add_class<'p, T>(&self, py: Python<'p>) -> PyResult<()> where T: PythonObjectFromPyClassMacro, { T::add_to_module(py, self) } }
use libc::c_char; use std::ffi::{CStr, CString}; use crate::conversion::ToPyObject; use crate::err::{self, PyErr, PyResult}; use crate::ffi; use crate::objectprotocol::ObjectProtocol; use crate::objects::{exc, PyDict, PyObject, PyTuple}; use crate::py_class::PythonObjectFromPyClassMacro; use crate::python::{PyDrop, Python, PythonObject}; pub struct PyModule(PyObject); pyobject_newtype!(PyModule, PyModule_Check, PyModule_Type); impl PyModule { pub fn new(py: Python, name: &str) -> PyResult<PyModule> { let name = CString::new(name).unwrap(); unsafe { err::result_cast_from_owned_ptr(py, ffi::PyModule_New(name.as_ptr())) } } pub fn import(py: Python, name: &str) -> PyResult<PyModule> { let name = CString::new(name).unwrap(); unsafe { err::result_cast_from_owned_ptr(py, ffi::PyImport_ImportModule(name.as_ptr())) } } pub fn dict(&self, py: Python) -> PyDict { unsafe { let r = PyObject::from_borrowed_ptr(py, ffi::PyModule_GetDict(self.0.as_ptr())); r.unchecked_cast_into::<PyDict>() } } unsafe fn str_from_ptr<'a>(&'a self, py: Python, ptr: *const c_char) -> PyResult<&'a str> { if ptr.is_null() { Err(PyErr::fetch(py)) } else { let slice = CStr::from_ptr(ptr).to_bytes(); match std::str::from_utf8(slice) { Ok(s) => Ok(s), Err(e) =>
, } } } pub fn name<'a>(&'a self, py: Python) -> PyResult<&'a str> { unsafe { self.str_from_ptr(py, ffi::PyModule_GetName(self.0.as_ptr())) } } #[allow(deprecated)] pub fn filename<'a>(&'a self, py: Python) -> PyResult<&'a str> { unsafe { self.str_from_ptr(py, ffi::PyModule_GetFilename(self.0.as_ptr())) } } #[cfg(feature = "python3-sys")] pub fn filename_object<'a>(&'a self, py: Python) -> PyResult<PyObject> { let ptr = unsafe { ffi::PyModule_GetFilenameObject(self.0.as_ptr()) }; if ptr.is_null() { Err(PyErr::fetch(py)) } else { Ok(unsafe { PyObject::from_borrowed_ptr(py, ptr) }) } } pub fn get(&self, py: Python, name: &str) -> PyResult<PyObject> { self.as_object().getattr(py, name) } pub fn call<A>( &self, py: Python, name: &str, args: A, kwargs: Option<&PyDict>, ) -> PyResult<PyObject> where A: ToPyObject<ObjectType = PyTuple>, { self.as_object().getattr(py, name)?.call(py, args, kwargs) } pub fn add<V>(&self, py: Python, name: &str, value: V) -> PyResult<()> where V: ToPyObject, { self.as_object().setattr(py, name, value) } pub fn add_class<'p, T>(&self, py: Python<'p>) -> PyResult<()> where T: PythonObjectFromPyClassMacro, { T::add_to_module(py, self) } }
Err(PyErr::from_instance( py, exc::UnicodeDecodeError::new_utf8(py, slice, e)?, ))
call_expression
[ { "content": "pub fn type_error_to_false(py: Python, e: PyErr) -> PyResult<bool> {\n\n if e.matches(py, py.get_type::<exc::TypeError>()) {\n\n Ok(false)\n\n } else {\n\n Err(e)\n\n }\n\n}\n\n\n\n#[macro_export]\n\n#[doc(hidden)]\n\nmacro_rules! py_class_binary_numeric_slot {\n\n ($class:ident :: $f:ident) => {{\n\n unsafe extern \"C\" fn binary_numeric(\n\n lhs: *mut $crate::_detail::ffi::PyObject,\n\n rhs: *mut $crate::_detail::ffi::PyObject,\n\n ) -> *mut $crate::_detail::ffi::PyObject {\n\n const LOCATION: &'static str = concat!(stringify!($class), \".\", stringify!($f), \"()\");\n\n $crate::_detail::handle_callback(\n\n LOCATION,\n\n $crate::_detail::PyObjectCallbackConverter,\n", "file_path": "src/py_class/slots.rs", "rank": 0, "score": 305711.41988210985 }, { "content": "#[inline]\n\npub fn error_on_minusone(py: Python, result: libc::c_int) -> PyResult<()> {\n\n if result != -1 {\n\n Ok(())\n\n } else {\n\n Err(PyErr::fetch(py))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::objects::exc;\n\n use crate::{PyErr, Python};\n\n\n\n #[test]\n\n fn set_typeerror() {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n\n PyErr::new_lazy_init(py.get_type::<exc::TypeError>(), None).restore(py);\n\n assert!(PyErr::occurred(py));\n\n drop(PyErr::fetch(py));\n\n }\n\n}\n", "file_path": "src/err.rs", "rank": 1, "score": 287647.62895668065 }, { "content": "pub fn build_tp_name(module_name: Option<&str>, type_name: &str) -> *mut c_char {\n\n let name = match module_name {\n\n Some(module_name) => CString::new(format!(\"{}.{}\", module_name, type_name)),\n\n None => CString::new(type_name),\n\n };\n\n name.expect(\"Module name/type name must not contain NUL byte\")\n\n .into_raw()\n\n}\n\n\n\npub unsafe extern \"C\" fn tp_dealloc_callback<T>(obj: *mut ffi::PyObject)\n\nwhere\n\n T: super::BaseObject,\n\n{\n\n let guard = crate::function::AbortOnDrop(\"Cannot unwind out of tp_dealloc\");\n\n let py = Python::assume_gil_acquired();\n\n let r = T::dealloc(py, obj);\n\n mem::forget(guard);\n\n r\n\n}\n\n\n", "file_path": "src/py_class/slots.rs", "rank": 2, "score": 273025.224239749 }, { "content": "fn overflow_error(py: Python) -> PyErr {\n\n PyErr::new_lazy_init(py.get_type::<exc::OverflowError>(), None)\n\n}\n\n\n\n/// Conversion of Rust `f32` to Python `float`.\n\nimpl ToPyObject for f32 {\n\n type ObjectType = PyFloat;\n\n\n\n fn to_py_object(&self, py: Python) -> PyFloat {\n\n PyFloat::new(py, *self as f64)\n\n }\n\n}\n\n\n\nextract!(\n\n obj to f32;\n\n /// Converts Python `float` to Rust `f32`.\n\n ///\n\n /// This conversion loses precision as the 64-bit float from Python gets\n\n /// converted to a 32-bit float. Out-of-range numbers may also overflow to infinity.\n\n py => {\n", "file_path": "src/objects/num.rs", "rank": 3, "score": 269627.63851102436 }, { "content": "fn slice_length_error(py: Python) -> PyResult<()> {\n\n Err(err::PyErr::new::<exc::BufferError, _>(\n\n py,\n\n \"Slice length does not match buffer length.\",\n\n ))\n\n}\n\n\n", "file_path": "src/buffer.rs", "rank": 4, "score": 268110.413355492 }, { "content": "pub fn extract_op(py: Python, op: c_int) -> PyResult<CompareOp> {\n\n match op {\n\n ffi::Py_LT => Ok(CompareOp::Lt),\n\n ffi::Py_LE => Ok(CompareOp::Le),\n\n ffi::Py_EQ => Ok(CompareOp::Eq),\n\n ffi::Py_NE => Ok(CompareOp::Ne),\n\n ffi::Py_GT => Ok(CompareOp::Gt),\n\n ffi::Py_GE => Ok(CompareOp::Ge),\n\n _ => Err(PyErr::new_lazy_init(\n\n py.get_type::<exc::ValueError>(),\n\n Some(\n\n \"tp_richcompare called with invalid comparison operator\"\n\n .to_py_object(py)\n\n .into_object(),\n\n ),\n\n )),\n\n }\n\n}\n\n\n\n// sq_richcompare is special-cased slot\n", "file_path": "src/py_class/slots.rs", "rank": 5, "score": 247816.9069353184 }, { "content": " -> *mut PyObject;\n\npub fn PyString_AsEncodedString(str: *mut PyObject,\n\n encoding: *const c_char,\n\n errors: *const c_char)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 6, "score": 245384.31383355713 }, { "content": " -> *mut PyObject;\n\npub fn PyString_AsDecodedString(str: *mut PyObject,\n\n encoding: *const c_char,\n\n errors: *const c_char)\n\n -> *mut PyObject;\n\n\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 7, "score": 245384.31383355713 }, { "content": "pub fn PyString_DecodeEscape(arg1: *const c_char,\n\n arg2: Py_ssize_t,\n\n arg3: *const c_char,\n\n arg4: Py_ssize_t,\n\n arg5: *const c_char)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 8, "score": 238042.61513889913 }, { "content": "pub fn _PyImport_FindModule(arg1: *const c_char,\n\n arg2: *mut PyObject,\n\n arg3: *mut c_char, arg4: size_t,\n\n arg5: *mut *mut FILE,\n\n arg6: *mut *mut PyObject)\n", "file_path": "python27-sys/src/import.rs", "rank": 9, "score": 238042.61513889913 }, { "content": "fn hello(py: Python) -> PyResult<()> {\n\n let sys = py.import(\"sys\")?;\n\n let version: String = sys.get(py, \"version\")?.extract(py)?;\n\n\n\n let locals = PyDict::new(py);\n\n locals.set_item(py, \"os\", py.import(\"os\")?)?;\n\n let user: String = py\n\n .eval(\n\n \"os.getenv('USER') or os.getenv('USERNAME')\",\n\n None,\n\n Some(&locals),\n\n )?\n\n .extract(py)?;\n\n\n\n println!(\"Hello {}, I'm Python {}\", user, version);\n\n Ok(())\n\n}\n", "file_path": "examples/hello.rs", "rank": 10, "score": 235588.54925249898 }, { "content": "fn wrong_tuple_length(py: Python, t: &PyTuple, expected_length: usize) -> PyErr {\n\n let msg = format!(\n\n \"Expected tuple of length {}, but got tuple of length {}.\",\n\n expected_length,\n\n t.len(py)\n\n );\n\n PyErr::new_lazy_init(\n\n py.get_type::<exc::ValueError>(),\n\n Some(msg.to_py_object(py).into_object()),\n\n )\n\n}\n\n\n\nmacro_rules! tuple_conversion ({$length:expr,$(($refN:ident, $n:tt, $T:ident)),+} => {\n\n /// Converts a Rust tuple to a Python `tuple`.\n\n impl <$($T: ToPyObject),+> ToPyObject for ($($T,)+) {\n\n type ObjectType = PyTuple;\n\n\n\n fn to_py_object(&self, py: Python) -> PyTuple {\n\n PyTuple::new(py, &[\n\n $(py_coerce_expr!(self.$n.to_py_object(py)).into_object(),)+\n", "file_path": "src/objects/tuple.rs", "rank": 11, "score": 229110.10757291637 }, { "content": "fn buffer_readonly_error(py: Python) -> PyResult<()> {\n\n Err(err::PyErr::new::<exc::BufferError, _>(\n\n py,\n\n \"Cannot write to read-only buffer.\",\n\n ))\n\n}\n\n\n\nimpl PyDrop for PyBuffer {\n\n #[inline]\n\n fn release_ref(mut self, _py: Python) {\n\n unsafe { ffi::PyBuffer_Release(&mut *self.0) }\n\n }\n\n}\n\n\n\nimpl Drop for PyBuffer {\n\n fn drop(&mut self) {\n\n let _gil_guard = Python::acquire_gil();\n\n unsafe { ffi::PyBuffer_Release(&mut *self.0) }\n\n }\n\n}\n", "file_path": "src/buffer.rs", "rank": 12, "score": 228979.9709313942 }, { "content": "fn incompatible_format_error(py: Python) -> PyResult<()> {\n\n Err(err::PyErr::new::<exc::BufferError, _>(\n\n py,\n\n \"Slice type is incompatible with buffer format.\",\n\n ))\n\n}\n\n\n", "file_path": "src/buffer.rs", "rank": 13, "score": 228979.9709313942 }, { "content": "fn run(py: Python, args: &PyTuple, kwargs: Option<&PyDict>) -> PyResult<PyObject> {\n\n println!(\"Rust says: Hello Python!\");\n\n for arg in args.iter(py) {\n\n println!(\"Rust got {}\", arg);\n\n }\n\n if let Some(kwargs) = kwargs {\n\n for (key, val) in kwargs.items(py) {\n\n println!(\"{} = {}\", key, val);\n\n }\n\n }\n\n Ok(py.None())\n\n}\n\n\n", "file_path": "extensions/tests/hello.rs", "rank": 14, "score": 225502.1900743978 }, { "content": " def r#match(&self, r#match: i32) -> PyResult<i32> {\n\n Ok(r#match)\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 15, "score": 224353.29631434171 }, { "content": "fn run(py: Python, args: &PyTuple, kwargs: Option<&PyDict>) -> PyResult<PyObject> {\n\n println!(\"Rust says: Hello Python!\");\n\n for arg in args.iter(py) {\n\n println!(\"Rust got {}\", arg);\n\n }\n\n if let Some(kwargs) = kwargs {\n\n for (key, val) in kwargs.items(py) {\n\n println!(\"{} = {}\", key, val);\n\n }\n\n }\n\n Ok(py.None())\n\n}\n", "file_path": "extensions/hello/src/hello.rs", "rank": 16, "score": 223592.46417702892 }, { "content": "fn panic_after_error(_py: Python) -> ! {\n\n unsafe {\n\n ffi::PyErr_Print();\n\n }\n\n panic!(\"Python API called failed\");\n\n}\n\n\n\n#[inline]\n\npub unsafe fn from_owned_ptr_or_panic(py: Python, p: *mut ffi::PyObject) -> PyObject {\n\n if p.is_null() {\n\n panic_after_error(py);\n\n } else {\n\n PyObject::from_owned_ptr(py, p)\n\n }\n\n}\n\n\n\npub unsafe fn result_cast_from_owned_ptr<T>(py: Python, p: *mut ffi::PyObject) -> PyResult<T>\n\nwhere\n\n T: crate::python::PythonObjectWithCheckedDowncast,\n\n{\n", "file_path": "src/err.rs", "rank": 17, "score": 222777.95218017808 }, { "content": "// The py_fn!()-macro can translate between Python and Rust values,\n\n// so you can use `&str`, `i32` or `String` in the signature of a function\n\n// callable from Python.\n\n// The first argument of type `Python<'p>` is used to indicate that your\n\n// function may assume that the current thread holds the global interpreter lock.\n\n// Most functions in the `cpython` crate require that you pass this argument.\n\nfn func(_: Python, a: &str, b: i32) -> PyResult<String> {\n\n Ok(format!(\"func({}, {})\", a, b))\n\n}\n\n\n", "file_path": "extensions/hello/src/hello.rs", "rank": 18, "score": 219108.89599845567 }, { "content": "#[doc(hidden)]\n\n#[inline]\n\npub fn assert_buffer_type_direct<C>(_: for<'a> fn(&'a C, Python<'a>) -> PyResult<&'a [u8]>) {}\n\n\n\n#[macro_export]\n\n#[doc(hidden)]\n\nmacro_rules! py_class_buffer_slot {\n\n (handle, bf_getbuffer, $class:ident :: $f:ident) => {{\n\n unsafe extern \"C\" fn getbufferproc(\n\n exporter: *mut $crate::_detail::ffi::PyObject,\n\n view: *mut $crate::_detail::ffi::Py_buffer,\n\n flags: $crate::_detail::libc::c_int,\n\n ) -> $crate::_detail::libc::c_int {\n\n /*\n\n According to https://docs.python.org/3/c-api/typeobj.html#c.PyBufferProcs,\n\n the implementation of this function needs to behave like this:\n\n\n\n 1. Check if the request can be met. If not, raise PyExc_BufferError,\n\n set view->obj to NULL and return -1.\n\n 2. Fill in the requested fields.\n\n 3. Increment an internal counter for the number of exports.\n\n 4. Set view->obj to exporter and increment view->obj.\n", "file_path": "src/py_class/slots.rs", "rank": 19, "score": 217648.09120696335 }, { "content": "#[inline]\n\n#[doc(hidden)]\n\npub fn is_ready(_py: Python, ty: &ffi::PyTypeObject) -> bool {\n\n (ty.tp_flags & ffi::Py_TPFLAGS_READY) != 0\n\n}\n\n\n", "file_path": "src/py_class/mod.rs", "rank": 20, "score": 210903.3188334972 }, { "content": "fn extract_sequence<T>(py: Python, obj: &PyObject) -> PyResult<Vec<T>>\n\nwhere\n\n for<'a> T: FromPyObject<'a>,\n\n{\n\n let seq = obj.cast_as::<PySequence>(py)?;\n\n let mut v = Vec::new();\n\n for item in seq.iter(py)? {\n\n let item = item?;\n\n v.push(T::extract(py, &item)?);\n\n item.release_ref(py);\n\n }\n\n Ok(v)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::conversion::ToPyObject;\n\n use crate::objects::{PyIterator, PyList, PySequence, PyTuple};\n\n use crate::python::{Python, PythonObject};\n\n\n", "file_path": "src/objects/sequence.rs", "rank": 21, "score": 209459.79363578587 }, { "content": " def __iadd__(&self, other: &str) -> PyResult<Self> {\n\n let mut value = self.value(py).borrow_mut();\n\n value.push_str(other);\n\n Ok(self.clone_ref(py))\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 22, "score": 204103.423606754 }, { "content": "pub fn _PyLong_FromByteArray(bytes: *const c_uchar, n: size_t,\n\n little_endian: c_int,\n\n is_signed: c_int) -> *mut PyObject;\n", "file_path": "python27-sys/src/longobject.rs", "rank": 23, "score": 200273.60385546752 }, { "content": " def __str__(&self) -> PyResult<&'static str> {\n\n Ok(\"str\")\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 24, "score": 199499.78890743857 }, { "content": "fn val(_: Python) -> PyResult<i32> {\n\n Ok(42)\n\n}\n\n\n", "file_path": "extensions/tests/hello.rs", "rank": 25, "score": 188437.8675125533 }, { "content": " def __invert__(&self) -> PyResult<&'static str> {\n\n Ok(\"invert\")\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 26, "score": 187397.32325774262 }, { "content": " def __abs__(&self) -> PyResult<&'static str> {\n\n Ok(\"abs\")\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 27, "score": 187397.32325774262 }, { "content": " def __repr__(&self) -> PyResult<&'static str> {\n\n Ok(\"RCR\")\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 28, "score": 187397.32325774262 }, { "content": " def __pos__(&self) -> PyResult<&'static str> {\n\n Ok(\"pos\")\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 29, "score": 187397.32325774262 }, { "content": " def __delitem__(&self, key: &str) -> PyResult<()> {\n\n if key.is_empty() {\n\n return Err(PyErr::new::<exc::IndexError, NoArgs>(py, NoArgs));\n\n }\n\n Ok(())\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 30, "score": 187397.32325774262 }, { "content": " def __neg__(&self) -> PyResult<&'static str> {\n\n Ok(\"neg\")\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 31, "score": 187397.32325774262 }, { "content": " def __reversed__(&self) -> PyResult<&'static str> {\n\n Ok(\"I am reversed\")\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 32, "score": 187397.32325774262 }, { "content": "/// Prepares the use of Python in a free-threaded context.\n\n///\n\n/// If the Python interpreter is not already initialized, this function\n\n/// will initialize it with disabled signal handling\n\n/// (Python will not raise the `KeyboardInterrupt` exception).\n\n/// Python signal handling depends on the notion of a 'main thread', which must be\n\n/// the thread that initializes the Python interpreter.\n\n///\n\n/// If both the Python interpreter and Python threading are already initialized,\n\n/// this function has no effect.\n\n///\n\n/// # Panic\n\n/// If the Python interpreter is initialized but Python threading is not,\n\n/// a panic occurs.\n\n/// It is not possible to safely access the Python runtime unless the main\n\n/// thread (the thread which originally initialized Python) also initializes\n\n/// threading.\n\n///\n\n/// When writing an extension module, the `py_module_initializer!` macro\n\n/// will ensure that Python threading is initialized.\n\n///\n\npub fn prepare_freethreaded_python() {\n\n // Protect against race conditions when Python is not yet initialized\n\n // and multiple threads concurrently call 'prepare_freethreaded_python()'.\n\n // Note that we do not protect against concurrent initialization of the Python runtime\n\n // by other users of the Python C API.\n\n START.call_once(|| unsafe {\n\n if ffi::Py_IsInitialized() != 0 {\n\n // If Python is already initialized, we expect Python threading to also be initialized,\n\n // as we can't make the existing Python main thread acquire the GIL.\n\n assert!(ffi::PyEval_ThreadsInitialized() != 0);\n\n } else {\n\n #[cfg(feature = \"python27-sys\")]\n\n {\n\n // If Python isn't initialized yet, we expect that Python threading isn't initialized either.\n\n assert!(ffi::PyEval_ThreadsInitialized() == 0);\n\n // Note: starting with Python 3.2 it's no longer possible to initialize threading\n\n // without initializing Python; and in Python 3.7 PyEval_ThreadsInitialized() started\n\n // misbehaving when Python was not initialized yet.\n\n }\n\n // Initialize Python.\n", "file_path": "src/pythonrun.rs", "rank": 33, "score": 186552.14996785013 }, { "content": " -> Py_ssize_t;\n\npub fn _PyBytes_FormatAdvanced(obj: *mut PyObject,\n\n format_spec: *mut c_char,\n\n format_spec_len: Py_ssize_t)\n\n -> *mut PyObject;*/\n\n}\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 34, "score": 185731.5978398465 }, { "content": " -> *mut PyObject;\n\npub fn _PyLong_FormatAdvanced(obj: *mut PyObject,\n\n format_spec: *mut c_char,\n\n format_spec_len: Py_ssize_t)\n\n -> *mut PyObject;*/\n\n}\n", "file_path": "python27-sys/src/longobject.rs", "rank": 35, "score": 185731.56000497827 }, { "content": "pub fn _PyString_Join(sep: *mut PyObject, x: *mut PyObject)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 36, "score": 184093.8317811649 }, { "content": "pub fn _PyLong_Sign(v: *mut PyObject) -> c_int;\n", "file_path": "python27-sys/src/longobject.rs", "rank": 37, "score": 183582.96693510958 }, { "content": "#[doc(hidden)] // used in py_argparse_extract!() macro\n\npub fn with_extracted_or_default<P: ?Sized, R, F>(\n\n py: Python,\n\n obj: Option<&PyObject>,\n\n f: F,\n\n default: &'static P,\n\n) -> PyResult<R>\n\nwhere\n\n F: FnOnce(&P) -> PyResult<R>,\n\n P: RefFromPyObject,\n\n{\n\n match obj {\n\n Some(obj) => match P::with_extracted(py, obj, f) {\n\n Ok(result) => result,\n\n Err(e) => Err(e),\n\n },\n\n None => f(default),\n\n }\n\n}\n\n\n", "file_path": "src/argparse.rs", "rank": 38, "score": 183366.51061546616 }, { "content": "/// Trait implemented by the types produced by the `py_class!()` macro.\n\n///\n\n/// This is an unstable implementation detail; do not implement manually!\n\npub trait PythonObjectFromPyClassMacro: python::PythonObjectWithTypeObject {\n\n /// Initializes the class.\n\n ///\n\n /// module_name: the name of the parent module into which the class will be placed.\n\n fn initialize(py: Python, module_name: Option<&str>) -> PyResult<PyType>;\n\n\n\n /// Initializes the class and adds it to the module.\n\n fn add_to_module(py: Python, module: &PyModule) -> PyResult<()>;\n\n}\n\n\n", "file_path": "src/py_class/mod.rs", "rank": 39, "score": 182304.54639187318 }, { "content": " -> *mut PyObject;\n\npub fn _PyString_Eq(arg1: *mut PyObject, arg2: *mut PyObject)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 40, "score": 181883.55233325344 }, { "content": " -> *mut Struct_filedescr;\n\npub fn _PyImport_IsScript(arg1: *mut Struct_filedescr) -> c_int;\n", "file_path": "python27-sys/src/import.rs", "rank": 41, "score": 180977.29135927063 }, { "content": "pub fn _PyLong_NumBits(v: *mut PyObject) -> size_t;\n", "file_path": "python27-sys/src/longobject.rs", "rank": 42, "score": 180944.9151031628 }, { "content": "pub fn _PyLong_AsInt(arg1: *mut PyObject) -> c_int;\n", "file_path": "python27-sys/src/longobject.rs", "rank": 43, "score": 180944.9151031628 }, { "content": "#[doc(hidden)] // used in py_argparse_extract!() macro\n\npub fn with_extracted_optional_or_default<P: ?Sized, R, F>(\n\n py: Python,\n\n obj: Option<&PyObject>,\n\n f: F,\n\n default: Option<&'static P>,\n\n) -> PyResult<R>\n\nwhere\n\n F: FnOnce(Option<&P>) -> PyResult<R>,\n\n P: RefFromPyObject,\n\n{\n\n match obj {\n\n Some(obj) => {\n\n if obj.as_ptr() == unsafe { crate::ffi::Py_None() } {\n\n f(None)\n\n } else {\n\n match P::with_extracted(py, obj, |p| f(Some(p))) {\n\n Ok(result) => result,\n\n Err(e) => Err(e),\n\n }\n\n }\n", "file_path": "src/argparse.rs", "rank": 44, "score": 180112.8001122421 }, { "content": " def __contains__(&self, item: &str) -> PyResult<bool> {\n\n Ok(item.is_empty())\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 45, "score": 179319.67810244055 }, { "content": " def __getitem__(&self, key: &str) -> PyResult<String> {\n\n if key.is_empty() {\n\n return Err(PyErr::new::<exc::IndexError, NoArgs>(py, NoArgs));\n\n }\n\n Ok(format!(\"Item for {}\", key))\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 46, "score": 179319.67810244055 }, { "content": " -> *mut PyObject;\n\npub fn PyString_InternImmortal(arg1: *mut *mut PyObject);\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 47, "score": 178430.55787595885 }, { "content": "pub fn PyImport_Cleanup();\n", "file_path": "python27-sys/src/import.rs", "rank": 48, "score": 176954.52055182122 }, { "content": " -> *mut PyObject;\n\npub fn _PyImport_FixupExtension(arg1: *mut c_char,\n\n arg2: *mut c_char)\n\n -> *mut PyObject;*/\n\n}\n", "file_path": "python27-sys/src/import.rs", "rank": 49, "score": 176856.38373028184 }, { "content": "pub fn _PyImport_FindExtension(arg1: *mut c_char,\n\n arg2: *mut c_char)\n", "file_path": "python27-sys/src/import.rs", "rank": 50, "score": 176852.40481082222 }, { "content": " def __format__(&self, format_spec: &str) -> PyResult<String> {\n\n Ok(format!(\"format({})\", format_spec))\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 51, "score": 176289.868898547 }, { "content": "#[cfg(not(py_sys_config = \"Py_USING_UNICODE\"))]\n\n#[inline(always)]\n\npub fn PyUnicode_Check(op: *mut PyObject) -> libc::c_int {\n\n 0\n\n}\n\n\n", "file_path": "python27-sys/src/lib.rs", "rank": 52, "score": 174255.59195194033 }, { "content": "pub fn _PyLong_Format(aa: *mut PyObject, base: c_int,\n\n addL: c_int, newstyle: c_int)\n", "file_path": "python27-sys/src/longobject.rs", "rank": 53, "score": 174246.21644299576 }, { "content": "pub fn PyString_Repr(arg1: *mut PyObject, arg2: c_int)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 54, "score": 174246.21644299576 }, { "content": " -> Py_ssize_t;\n\npub fn _PyString_InsertThousandsGrouping(buffer: *mut c_char,\n\n n_buffer: Py_ssize_t,\n\n digits: *mut c_char,\n\n n_digits: Py_ssize_t,\n\n min_width: Py_ssize_t,\n\n grouping: *const c_char,\n\n thousands_sep:\n\n *const c_char)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 55, "score": 174143.92918789058 }, { "content": "pub fn _Py_ReleaseInternedStrings();\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 56, "score": 173573.47070033522 }, { "content": "pub fn _PyImport_AcquireLock();\n", "file_path": "python27-sys/src/import.rs", "rank": 57, "score": 173573.47070033522 }, { "content": "/// A PythonObject that is usable as a base type with the `py_class!()` macro.\n\npub trait BaseObject: PythonObject {\n\n /// Gets the size of the object, in bytes.\n\n fn size() -> usize;\n\n\n\n type InitType;\n\n\n\n /// Allocates a new object (usually by calling ty->tp_alloc),\n\n /// and initializes it using init_val.\n\n /// `ty` must be derived from the Self type, and the resulting object\n\n /// must be of type `ty`.\n\n unsafe fn alloc(py: Python, ty: &PyType, init_val: Self::InitType) -> PyResult<PyObject>;\n\n\n\n /// Calls the rust destructor for the object and frees the memory\n\n /// (usually by calling ptr->ob_type->tp_free).\n\n /// This function is used as tp_dealloc implementation.\n\n unsafe fn dealloc(py: Python, obj: *mut ffi::PyObject);\n\n}\n\n\n\nimpl BaseObject for PyObject {\n\n #[inline]\n", "file_path": "src/py_class/mod.rs", "rank": 58, "score": 172636.91883337926 }, { "content": " def __setitem__(&self, key: &str, val: i32) -> PyResult<()> {\n\n *self.key(py).borrow_mut() = key.to_string();\n\n self.val(py).set(val);\n\n Ok(())\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 59, "score": 172029.68999284852 }, { "content": " def __contains__(&self, item: Option<&str>) -> PyResult<bool> {\n\n Ok(item.map_or(false, str::is_empty))\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 60, "score": 172029.68999284852 }, { "content": "#[cfg(not(py_sys_config = \"Py_USING_UNICODE\"))]\n\n#[inline(always)]\n\npub fn PyUnicode_CheckExact(op: *mut PyObject) -> libc::c_int {\n\n 0\n\n}\n", "file_path": "python27-sys/src/lib.rs", "rank": 61, "score": 171848.8068409614 }, { "content": " -> c_int;\n\npub fn _PyString_FormatLong(arg1: *mut PyObject, arg2: c_int,\n\n arg3: c_int, arg4: c_int,\n\n arg5: *mut *mut c_char,\n\n arg6: *mut c_int) -> *mut PyObject;\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 62, "score": 171839.43133201683 }, { "content": "pub fn _PyImport_ReInitLock();\n", "file_path": "python27-sys/src/import.rs", "rank": 63, "score": 170381.01814688207 }, { "content": " def a(&self) -> PyResult<PyObject> {\n\n println!(\"a() was called with self={:?}\", self.data(py));\n\n Ok(py.None())\n\n }\n\n});\n\n\n", "file_path": "extensions/tests/custom_class.rs", "rank": 64, "score": 165667.17160172336 }, { "content": "pub fn _PyImport_ReleaseLock() -> c_int;\n", "file_path": "python27-sys/src/import.rs", "rank": 65, "score": 165406.03324180504 }, { "content": " def __richcmp__(&self, other: &str, op: CompareOp) -> PyResult<bool> {\n\n match op {\n\n CompareOp::Lt => Ok(\"RCR\" < other),\n\n CompareOp::Le => Ok(\"RCR\" <= other),\n\n CompareOp::Eq => Ok(\"RCR\" == other),\n\n CompareOp::Ne => Ok(\"RCR\" != other),\n\n CompareOp::Gt => Ok(\"RCR\" > other),\n\n CompareOp::Ge => Ok(\"RCR\" >= other),\n\n }\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 66, "score": 165404.50075898477 }, { "content": " def __setitem__(&self, key: Option<&str>, val: i32) -> PyResult<()> {\n\n if let Some(key) = key {\n\n *self.key(py).borrow_mut() = key.to_string();\n\n self.val(py).set(val);\n\n Ok(())\n\n } else {\n\n Err(PyErr::new::<exc::IndexError, NoArgs>(py, NoArgs))\n\n }\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 67, "score": 165404.50075898477 }, { "content": " def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {\n\n if let Ok(index) = key.extract::<i32>(py) {\n\n if index == 5 {\n\n return Err(PyErr::new::<exc::IndexError, NoArgs>(py, NoArgs));\n\n }\n\n }\n\n Ok(key)\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 68, "score": 165215.99072620133 }, { "content": "/// Run a python script using the specified interpreter binary.\n\nfn run_python_script(interpreter: &str, script: &str) -> Result<String, String> {\n\n let mut cmd = Command::new(interpreter);\n\n cmd.arg(\"-c\").arg(script);\n\n\n\n let out = cmd\n\n .output()\n\n .map_err(|e| format!(\"failed to run python interpreter `{:?}`: {}\", cmd, e))?;\n\n\n\n if !out.status.success() {\n\n let stderr = String::from_utf8(out.stderr).unwrap();\n\n let mut msg = format!(\"python script failed with stderr:\\n\\n\");\n\n msg.push_str(&stderr);\n\n return Err(msg);\n\n }\n\n\n\n let out = String::from_utf8(out.stdout).unwrap();\n\n return Ok(out);\n\n}\n\n\n", "file_path": "python3-sys/build.rs", "rank": 69, "score": 165009.27289889343 }, { "content": "/// Run a python script using the specified interpreter binary.\n\nfn run_python_script(interpreter: &str, script: &str) -> Result<String, String> {\n\n let mut cmd = Command::new(interpreter);\n\n cmd.arg(\"-c\").arg(script);\n\n\n\n let out = cmd\n\n .output()\n\n .map_err(|e| format!(\"failed to run python interpreter `{:?}`: {}\", cmd, e))?;\n\n\n\n if !out.status.success() {\n\n let stderr = String::from_utf8(out.stderr).unwrap();\n\n let mut msg = format!(\"python script failed with stderr:\\n\\n\");\n\n msg.push_str(&stderr);\n\n return Err(msg);\n\n }\n\n\n\n let out = String::from_utf8(out.stdout).unwrap();\n\n return Ok(out);\n\n}\n\n\n", "file_path": "python27-sys/build.rs", "rank": 70, "score": 165009.27289889343 }, { "content": " def __iter__(&self) -> PyResult<Self> {\n\n Ok(self.clone_ref(py))\n\n }\n\n\n", "file_path": "src/py_class/py_class.rs", "rank": 71, "score": 164259.20216224546 }, { "content": " def __next__(&self) -> PyResult<Option<PyObject>> {\n\n Ok(self.iter(py).borrow_mut().next())\n\n }\n\n});\n\n# fn main() {}\n\n```\n\n\n\n## String Conversions\n\n\n\n * `def __repr__(&self) -> PyResult<impl ToPyObject<ObjectType=PyString>>`\n\n * `def __str__(&self) -> PyResult<impl ToPyObject<ObjectType=PyString>>`\n\n\n\n Possible return types for `__str__` and `__repr__` are `PyResult<String>` or `PyResult<PyString>`.\n\n\n\n In Python 2.7, Unicode strings returned by `__str__` and `__repr__` will be converted to byte strings\n\n by the Python runtime, which results in an exception if the string contains non-ASCII characters.\n\n\n\n * `def __bytes__(&self) -> PyResult<PyBytes>`\n\n\n\n On Python 3.x, provides the conversion to `bytes`.\n", "file_path": "src/py_class/py_class.rs", "rank": 72, "score": 163971.43317437562 }, { "content": "pub fn _PyLong_AsByteArray(v: *mut PyLongObject,\n\n bytes: *mut c_uchar, n: size_t,\n\n little_endian: c_int,\n\n is_signed: c_int) -> c_int;\n", "file_path": "python27-sys/src/longobject.rs", "rank": 73, "score": 160889.99380969736 }, { "content": "pub fn _PyString_InsertThousandsGroupingLocale(buffer:\n\n *mut c_char,\n\n n_buffer: Py_ssize_t,\n\n digits:\n\n *mut c_char,\n\n n_digits: Py_ssize_t,\n\n min_width: Py_ssize_t)\n", "file_path": "python27-sys/src/stringobject.rs", "rank": 74, "score": 159527.13438744427 }, { "content": " def __richcmp__(&self, other: Option<&str>, op: CompareOp) -> PyResult<bool> {\n\n match other {\n\n Some(other) => match op {\n\n CompareOp::Lt => Ok(\"RCR\" < other),\n\n CompareOp::Le => Ok(\"RCR\" <= other),\n\n CompareOp::Eq => Ok(\"RCR\" == other),\n\n CompareOp::Ne => Ok(\"RCR\" != other),\n\n CompareOp::Gt => Ok(\"RCR\" > other),\n\n CompareOp::Ge => Ok(\"RCR\" >= other),\n\n },\n\n None => match op {\n\n CompareOp::Ne | CompareOp::Gt | CompareOp::Ge => Ok(true),\n\n CompareOp::Eq | CompareOp::Lt | CompareOp::Le => Ok(false),\n\n },\n\n }\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 75, "score": 159347.07166341218 }, { "content": "fn handle_panic(_py: Python, _panic: &dyn any::Any) {\n\n let msg = cstr!(\"Rust panic\");\n\n unsafe {\n\n ffi::PyErr_SetString(ffi::PyExc_SystemError, msg.as_ptr());\n\n }\n\n}\n\n\n\npub struct AbortOnDrop<'a>(pub &'a str);\n\n\n\nimpl<'a> Drop for AbortOnDrop<'a> {\n\n fn drop(&mut self) {\n\n use std::io::Write;\n\n let _ = writeln!(&mut io::stderr(), \"Cannot unwind out of {}\", self.0);\n\n unsafe { libc::abort() }\n\n }\n\n}\n\n\n\n// Tests for this file are in tests/test_function.rs\n", "file_path": "src/function.rs", "rank": 76, "score": 158852.91547076503 }, { "content": "/// Trait implemented by all Python object types.\n\npub trait PythonObject: crate::conversion::ToPyObject + Send + Sized + 'static {\n\n /// Casts the Python object to PyObject.\n\n fn as_object(&self) -> &PyObject;\n\n\n\n /// Casts the Python object to PyObject.\n\n fn into_object(self) -> PyObject;\n\n\n\n /// Unchecked downcast from PyObject to Self.\n\n /// Undefined behavior if the input object does not have the expected type.\n\n unsafe fn unchecked_downcast_from(obj: PyObject) -> Self;\n\n\n\n /// Unchecked downcast from PyObject to Self.\n\n /// Undefined behavior if the input object does not have the expected type.\n\n unsafe fn unchecked_downcast_borrow_from(obj: &PyObject) -> &Self;\n\n}\n\n\n\n// Marker type that indicates an error while downcasting\n\npub struct PythonObjectDowncastError<'p> {\n\n pub(crate) py: Python<'p>,\n\n pub(crate) expected_type_name: String,\n", "file_path": "src/python.rs", "rank": 77, "score": 158848.4564364451 }, { "content": "pub trait PyDrop: Sized {\n\n fn release_ref(self, py: Python);\n\n}\n\n\n\nimpl<T> PyDrop for T\n\nwhere\n\n T: PythonObject,\n\n{\n\n #[inline]\n\n fn release_ref(self, _py: Python) {\n\n let ptr = self.into_object().steal_ptr();\n\n unsafe {\n\n ffi::Py_DECREF(ptr);\n\n }\n\n }\n\n}\n\n\n\nimpl<T> PyDrop for Option<T>\n\nwhere\n\n T: PyDrop,\n\n{\n\n #[inline]\n\n fn release_ref(self, py: Python) {\n\n match self {\n\n Some(v) => v.release_ref(py),\n\n None => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/python.rs", "rank": 78, "score": 154053.68000385768 }, { "content": "pub trait PyClone: Sized {\n\n fn clone_ref(&self, py: Python) -> Self;\n\n}\n\n\n\nimpl<T> PyClone for T\n\nwhere\n\n T: PythonObject,\n\n{\n\n #[inline]\n\n fn clone_ref(&self, py: Python) -> T {\n\n let ptr = self.as_object().as_ptr();\n\n unsafe { T::unchecked_downcast_from(PyObject::from_borrowed_ptr(py, ptr)) }\n\n }\n\n}\n\n\n\nimpl<T> PyClone for Option<T>\n\nwhere\n\n T: PyClone,\n\n{\n\n #[inline]\n\n fn clone_ref(&self, py: Python) -> Option<T> {\n\n match *self {\n\n Some(ref v) => Some(v.clone_ref(py)),\n\n None => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/python.rs", "rank": 79, "score": 154053.68000385768 }, { "content": "pub fn _PyLong_Frexp(a: *mut PyLongObject, e: *mut Py_ssize_t)\n\n -> c_double;\n\n\n", "file_path": "python27-sys/src/longobject.rs", "rank": 80, "score": 154039.74224719647 }, { "content": " def __richcmp__(&self, other: &PyObject, op: CompareOp) -> PyResult<PyObject> {\n\n match op {\n\n CompareOp::Eq => Ok(true.to_py_object(py).into_object()),\n\n CompareOp::Ne => Ok(false.to_py_object(py).into_object()),\n\n _ => Ok(py.NotImplemented())\n\n }\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 81, "score": 153903.5754574385 }, { "content": "/// Trait implemented by Python object types that allow a checked downcast.\n\npub trait PythonObjectWithCheckedDowncast: PythonObject {\n\n /// Cast from PyObject to a concrete Python object type.\n\n fn downcast_from<'p>(\n\n py: Python<'p>,\n\n obj: PyObject,\n\n ) -> Result<Self, PythonObjectDowncastError<'p>>;\n\n\n\n /// Cast from PyObject to a concrete Python object type.\n\n fn downcast_borrow_from<'a, 'p>(\n\n py: Python<'p>,\n\n obj: &'a PyObject,\n\n ) -> Result<&'a Self, PythonObjectDowncastError<'p>>;\n\n}\n\n\n", "file_path": "src/python.rs", "rank": 82, "score": 153661.76377645048 }, { "content": "/// Newtype around PyObject that implements Ord using python value comparisons.\n\n/// Python exceptions are converted into Rust panics.\n\nstruct OrdPyObject(PyObject);\n\n\n\nimpl PartialEq for OrdPyObject {\n\n fn eq(&self, _other: &Self) -> bool {\n\n false\n\n }\n\n}\n\nimpl Eq for OrdPyObject {}\n\nimpl PartialOrd for OrdPyObject {\n\n fn partial_cmp(&self, _other: &Self) -> Option<cmp::Ordering> {\n\n None\n\n }\n\n}\n\nimpl Ord for OrdPyObject {\n\n fn cmp(&self, _other: &Self) -> cmp::Ordering {\n\n unimplemented!()\n\n }\n\n}\n\n\n\npy_class!(class BTreeSet |py| {\n\n data set: cell::RefCell<collections::BTreeSet<OrdPyObject>>;\n\n\n", "file_path": "extensions/tests/btree.rs", "rank": 83, "score": 153225.0907623329 }, { "content": "/// Parse string as interpreter version.\n\nfn get_interpreter_version(line: &str) -> Result<PythonVersion, String> {\n\n let version_re = Regex::new(r\"\\((\\d+), (\\d+)\\)\").unwrap();\n\n match version_re.captures(&line) {\n\n Some(cap) => Ok(PythonVersion {\n\n major: cap.get(1).unwrap().as_str().parse().unwrap(),\n\n minor: Some(cap.get(2).unwrap().as_str().parse().unwrap()),\n\n }),\n\n None => Err(format!(\"Unexpected response to version query {}\", line)),\n\n }\n\n}\n\n\n", "file_path": "python27-sys/build.rs", "rank": 84, "score": 150812.0737626481 }, { "content": "/// Parse string as interpreter version.\n\nfn get_interpreter_version(line: &str) -> Result<PythonVersion, String> {\n\n let version_re = Regex::new(r\"\\((\\d+), (\\d+)\\)\").unwrap();\n\n match version_re.captures(&line) {\n\n Some(cap) => Ok(PythonVersion {\n\n major: cap.get(1).unwrap().as_str().parse().unwrap(),\n\n minor: Some(cap.get(2).unwrap().as_str().parse().unwrap()),\n\n }),\n\n None => Err(format!(\"Unexpected response to version query {}\", line)),\n\n }\n\n}\n\n\n", "file_path": "python3-sys/build.rs", "rank": 85, "score": 150812.0737626481 }, { "content": "#[inline]\n\n#[doc(hidden)]\n\npub fn data_offset<T>(base_size: usize) -> usize {\n\n let align = mem::align_of::<T>();\n\n // round base_size up to next multiple of align\n\n (base_size + align - 1) / align * align\n\n}\n\n\n", "file_path": "src/py_class/mod.rs", "rank": 86, "score": 148613.9272647154 }, { "content": "/// Parse argument list\n\n///\n\n/// * fname: Name of the current function\n\n/// * params: Declared parameters of the function\n\n/// * args: Positional arguments\n\n/// * kwargs: Keyword arguments\n\n/// * output: Output array that receives the arguments.\n\n/// Must have same length as `params` and must be initialized to `None`.\n\npub fn parse_args(\n\n py: Python,\n\n fname: Option<&str>,\n\n params: &[ParamDescription],\n\n args: &PyTuple,\n\n kwargs: Option<&PyDict>,\n\n output: &mut [Option<PyObject>],\n\n) -> PyResult<()> {\n\n assert!(params.len() == output.len());\n\n let nargs = args.len(py);\n\n let nkeywords = kwargs.map_or(0, |d| d.len(py));\n\n if nargs + nkeywords > params.len() {\n\n return Err(err::PyErr::new::<exc::TypeError, _>(\n\n py,\n\n format!(\n\n \"{}{} takes at most {} argument{} ({} given)\",\n\n fname.unwrap_or(\"function\"),\n\n if fname.is_some() { \"()\" } else { \"\" },\n\n params.len(),\n\n if params.len() != 1 { \"s\" } else { \"\" },\n", "file_path": "src/argparse.rs", "rank": 87, "score": 146698.54118082474 }, { "content": "#[inline]\n\n#[doc(hidden)]\n\npub fn data_new_size<T>(base_size: usize) -> usize {\n\n data_offset::<T>(base_size) + mem::size_of::<T>()\n\n}\n\n\n\n#[inline]\n\n#[doc(hidden)]\n\npub unsafe fn data_get<'a, T>(_py: Python<'a>, obj: &'a PyObject, offset: usize) -> &'a T {\n\n let ptr = (obj.as_ptr() as *const u8).offset(offset as isize) as *const T;\n\n &*ptr\n\n}\n\n\n\n#[inline]\n\n#[doc(hidden)]\n\npub unsafe fn data_init<'a, T>(_py: Python<'a>, obj: &'a PyObject, offset: usize, value: T)\n\nwhere\n\n T: Send + 'static,\n\n{\n\n let ptr = (obj.as_ptr() as *mut u8).offset(offset as isize) as *mut T;\n\n ptr::write(ptr, value)\n\n}\n\n\n\n#[inline]\n\n#[doc(hidden)]\n\npub unsafe fn data_drop<'a, T>(_py: Python<'a>, obj: *mut ffi::PyObject, offset: usize) {\n\n let ptr = (obj as *mut u8).offset(offset as isize) as *mut T;\n\n ptr::drop_in_place(ptr)\n\n}\n\n\n", "file_path": "src/py_class/mod.rs", "rank": 88, "score": 145901.4348874559 }, { "content": " def __exit__(&self, ty: Option<PyType>, value: PyObject, traceback: PyObject) -> PyResult<bool> {\n\n self.exit_called(py).set(true);\n\n if ty == Some(py.get_type::<exc::ValueError>()) {\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n }\n\n});\n\n\n", "file_path": "tests/test_class.rs", "rank": 89, "score": 142999.08351707074 }, { "content": " def half(&self) -> PyResult<i32> {\n\n println!(\"half() was called with self={:?}\", self.number(py));\n\n Ok(self.number(py) / 2)\n\n }\n\n});\n\n\n", "file_path": "src/py_class/py_class.rs", "rank": 90, "score": 142652.5401207637 }, { "content": " def __ilshift__(&self, other: u32) -> PyResult<Self> {\n\n self.value(py).set(self.value(py).get() << other);\n\n Ok(self.clone_ref(py))\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 91, "score": 142091.24244692747 }, { "content": " def __imul__(&self, other: u32) -> PyResult<Self> {\n\n self.value(py).set(self.value(py).get() * other);\n\n Ok(self.clone_ref(py))\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 92, "score": 142091.24244692747 }, { "content": " def __iadd__(&self, other: u32) -> PyResult<Self> {\n\n self.value(py).set(self.value(py).get() + other);\n\n Ok(self.clone_ref(py))\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 93, "score": 142091.24244692747 }, { "content": " def __isub__(&self, other: u32) -> PyResult<Self> {\n\n self.value(py).set(self.value(py).get() - other);\n\n Ok(self.clone_ref(py))\n\n }\n\n\n", "file_path": "tests/test_class.rs", "rank": 94, "score": 142091.24244692747 }, { "content": " \"\\0\").as_bytes());\n\n Ok(::std::mem::transmute($crate::PyCapsule::import(py, caps_name)?))\n\n }\n\n }\n\n\n\n pub fn retrieve(py: $crate::Python) -> $crate::PyResult<CapsuleFn> {\n\n unsafe {\n\n INIT.call_once(|| { CAPS_FN = Some(import(py)) });\n\n match CAPS_FN.as_ref().unwrap() {\n\n &Ok(f) => Ok(f),\n\n &Err(ref e) => Err(e.clone_ref(py)),\n\n }\n\n }\n\n }\n\n }\n\n )\n\n}\n\n\n\nimpl PyCapsule {\n\n /// Retrieve the contents of a capsule pointing to some data as a reference.\n", "file_path": "src/objects/capsule.rs", "rank": 97, "score": 41.959334425770635 }, { "content": " /// of the Rust standard library).\n\n pub fn import(py: Python, name: &CStr) -> PyResult<*const c_void> {\n\n let caps_ptr = unsafe { PyCapsule_Import(name.as_ptr(), 0) };\n\n if caps_ptr.is_null() {\n\n return Err(PyErr::fetch(py));\n\n }\n\n Ok(caps_ptr)\n\n }\n\n\n\n /// Convenience method to create a capsule for some data\n\n ///\n\n /// The encapsuled data may be an array of functions, but it can't be itself a\n\n /// function directly.\n\n ///\n\n /// May panic when running out of memory.\n\n ///\n\n pub fn new_data<T, N>(py: Python, data: &'static T, name: N) -> Result<Self, NulError>\n\n where\n\n N: Into<Vec<u8>>,\n\n {\n", "file_path": "src/objects/capsule.rs", "rank": 98, "score": 41.306977557317296 }, { "content": "\n\nuse std::ptr;\n\n\n\nuse crate::conversion::{RefFromPyObject, ToPyObject};\n\nuse crate::err::{self, PyResult};\n\nuse crate::ffi;\n\nuse crate::objects::{exc, PyDict, PyObject, PyString, PyTuple};\n\nuse crate::python::{Python, PythonObject};\n\n\n\n/// Description of a python parameter; used for `parse_args()`.\n\npub struct ParamDescription<'a> {\n\n /// The name of the parameter.\n\n pub name: &'a str,\n\n /// Whether the parameter is optional.\n\n pub is_optional: bool,\n\n}\n\n\n\nimpl<'a> ParamDescription<'a> {\n\n /// Name, with leading `r#` stripped.\n\n pub fn name(&self) -> &str {\n", "file_path": "src/argparse.rs", "rank": 99, "score": 41.15164558017299 } ]
Rust
clients/client-core/src/client/real_messages_control/acknowledgement_control/retransmission_request_listener.rs
confio/nym
495ca35c1f46e7244d89fdf73402021d70df625d
use super::action_controller::{Action, ActionSender}; use super::PendingAcknowledgement; use super::RetransmissionRequestReceiver; use crate::client::{ real_messages_control::real_traffic_stream::{BatchRealMessageSender, RealMessage}, topology_control::TopologyAccessor, }; use futures::StreamExt; use log::*; use nymsphinx::preparer::MessagePreparer; use nymsphinx::{acknowledgements::AckKey, addressing::clients::Recipient}; use rand::{CryptoRng, Rng}; use std::sync::{Arc, Weak}; pub(super) struct RetransmissionRequestListener<R> where R: CryptoRng + Rng, { ack_key: Arc<AckKey>, ack_recipient: Recipient, message_preparer: MessagePreparer<R>, action_sender: ActionSender, real_message_sender: BatchRealMessageSender, request_receiver: RetransmissionRequestReceiver, topology_access: TopologyAccessor, } impl<R> RetransmissionRequestListener<R> where R: CryptoRng + Rng, { pub(super) fn new( ack_key: Arc<AckKey>, ack_recipient: Recipient, message_preparer: MessagePreparer<R>, action_sender: ActionSender, real_message_sender: BatchRealMessageSender, request_receiver: RetransmissionRequestReceiver, topology_access: TopologyAccessor, ) -> Self { RetransmissionRequestListener { ack_key, ack_recipient, message_preparer, action_sender, real_message_sender, request_receiver, topology_access, } } async fn on_retransmission_request(&mut self, timed_out_ack: Weak<PendingAcknowledgement>) { let timed_out_ack = match timed_out_ack.upgrade() { Some(timed_out_ack) => timed_out_ack, None => { debug!("We received an ack JUST as we were about to retransmit [1]"); return; } }; let packet_recipient = &timed_out_ack.recipient; let chunk_clone = timed_out_ack.message_chunk.clone(); let frag_id = chunk_clone.fragment_identifier(); let topology_permit = self.topology_access.get_read_permit().await; let topology_ref = match topology_permit .try_get_valid_topology_ref(&self.ack_recipient, Some(packet_recipient)) { Some(topology_ref) => topology_ref, None => { warn!("Could not retransmit the packet - the network topology is invalid"); self.action_sender .unbounded_send(Action::new_start_timer(frag_id)) .unwrap(); return; } }; let prepared_fragment = self .message_preparer .prepare_chunk_for_sending(chunk_clone, topology_ref, &self.ack_key, packet_recipient) .await .unwrap(); if Arc::strong_count(&timed_out_ack) == 1 { debug!("We received an ack JUST as we were about to retransmit [2]"); return; } drop(timed_out_ack); let new_delay = prepared_fragment.total_delay; self.action_sender .unbounded_send(Action::new_update_delay(frag_id, new_delay)) .unwrap(); self.real_message_sender .unbounded_send(vec![RealMessage::new( prepared_fragment.mix_packet, frag_id, )]) .unwrap(); } pub(super) async fn run(&mut self) { debug!("Started RetransmissionRequestListener"); while let Some(timed_out_ack) = self.request_receiver.next().await { self.on_retransmission_request(timed_out_ack).await; } error!("TODO: error msg. Or maybe panic?") } }
use super::action_controller::{Action, ActionSender}; use super::PendingAcknowledgement; use super::RetransmissionRequestReceiver; use crate::client::{ real_messages_control::real_traffic_stream::{BatchRealMessageSender, RealMessage}, topology_control::TopologyAccessor, }; use futures::StreamExt; use log::*; use nymsphinx::preparer::MessagePreparer; use nymsphinx::{acknowledgements::AckKey, addressing::clients::Recipient}; use rand::{CryptoRng, Rng}; use std::sync::{Arc, Weak}; pub(super) struct RetransmissionRequestListener<R> where R: CryptoRng + Rng, { ack_key: Arc<AckKey>, ack_recipient: Recipient, message_preparer: MessagePreparer<R>, action_sender: ActionSender, real_message_sender: BatchRealMessageSender, request_receiver: RetransmissionRequestReceiver, topology_access: TopologyAccessor, } impl<R> RetransmissionRequestListener<R> where R: CryptoRng + Rng, { pub(super) fn new( ack_key: Arc<AckKey>, ack_recipient: Recipient, message_preparer: MessagePreparer<R>, action_sender: ActionSender, real_message_sender: BatchRealMessageSender, request_receiver: RetransmissionRequestReceiver, topology_access: TopologyAccessor, ) -> Self { RetransmissionRequestListener { ack_key, ack_recipient, message_preparer, action_sender, real_message_sender, request_receiver, topology_access, } }
pub(super) async fn run(&mut self) { debug!("Started RetransmissionRequestListener"); while let Some(timed_out_ack) = self.request_receiver.next().await { self.on_retransmission_request(timed_out_ack).await; } error!("TODO: error msg. Or maybe panic?") } }
async fn on_retransmission_request(&mut self, timed_out_ack: Weak<PendingAcknowledgement>) { let timed_out_ack = match timed_out_ack.upgrade() { Some(timed_out_ack) => timed_out_ack, None => { debug!("We received an ack JUST as we were about to retransmit [1]"); return; } }; let packet_recipient = &timed_out_ack.recipient; let chunk_clone = timed_out_ack.message_chunk.clone(); let frag_id = chunk_clone.fragment_identifier(); let topology_permit = self.topology_access.get_read_permit().await; let topology_ref = match topology_permit .try_get_valid_topology_ref(&self.ack_recipient, Some(packet_recipient)) { Some(topology_ref) => topology_ref, None => { warn!("Could not retransmit the packet - the network topology is invalid"); self.action_sender .unbounded_send(Action::new_start_timer(frag_id)) .unwrap(); return; } }; let prepared_fragment = self .message_preparer .prepare_chunk_for_sending(chunk_clone, topology_ref, &self.ack_key, packet_recipient) .await .unwrap(); if Arc::strong_count(&timed_out_ack) == 1 { debug!("We received an ack JUST as we were about to retransmit [2]"); return; } drop(timed_out_ack); let new_delay = prepared_fragment.total_delay; self.action_sender .unbounded_send(Action::new_update_delay(frag_id, new_delay)) .unwrap(); self.real_message_sender .unbounded_send(vec![RealMessage::new( prepared_fragment.mix_packet, frag_id, )]) .unwrap(); }
function_block-full_function
[ { "content": "/// Entry point for splitting whole message into possibly multiple [`Set`]s.\n\n// TODO: make it take message: Vec<u8> instead\n\npub fn split_into_sets<R: Rng>(\n\n rng: &mut R,\n\n message: &[u8],\n\n max_plaintext_size: usize,\n\n) -> Vec<FragmentSet> {\n\n let num_of_sets = total_number_of_sets(message.len(), max_plaintext_size);\n\n if num_of_sets == 1 {\n\n let set_id = generate_set_id(rng);\n\n vec![prepare_fragment_set(\n\n message,\n\n set_id,\n\n None,\n\n None,\n\n max_plaintext_size,\n\n )]\n\n } else {\n\n let mut sets = Vec::with_capacity(num_of_sets);\n\n // pre-generate all ids for the sets\n\n let set_ids: Vec<_> = std::iter::repeat(())\n\n .map(|_| generate_set_id(rng))\n", "file_path": "common/nymsphinx/chunking/src/set.rs", "rank": 0, "score": 188436.8433953431 }, { "content": "pub fn prepare_identifier<R: RngCore + CryptoRng>(\n\n rng: &mut R,\n\n key: &AckKey,\n\n serialized_id: SerializedFragmentIdentifier,\n\n) -> Vec<u8> {\n\n let iv = random_iv::<AckEncryptionAlgorithm, _>(rng);\n\n let id_ciphertext = encrypt::<AckEncryptionAlgorithm>(key.inner(), &iv, &serialized_id);\n\n\n\n // IV || ID_CIPHERTEXT\n\n iv.into_iter().chain(id_ciphertext.into_iter()).collect()\n\n}\n\n\n", "file_path": "common/nymsphinx/acknowledgements/src/identifier.rs", "rank": 1, "score": 187038.50200717445 }, { "content": "// TODO: should we put an extra trait bound on this to require `CryptoRng`? Could there be any attacks\n\n// because of weak rng used?\n\npub fn sample_poisson_duration<R: Rng + ?Sized>(\n\n rng: &mut R,\n\n average_duration: time::Duration,\n\n) -> time::Duration {\n\n // this is our internal code used by our traffic streams\n\n // the error is only thrown if average delay is less than 0, which will never happen\n\n // so call to unwrap is perfectly safe here\n\n let exp = Exp::new(1.0 / average_duration.as_nanos() as f64).unwrap();\n\n time::Duration::from_nanos(exp.sample(rng).round() as u64)\n\n}\n", "file_path": "common/nymsphinx/src/utils/mod.rs", "rank": 2, "score": 177096.584643696 }, { "content": "pub fn generate_key<C, R>(rng: &mut R) -> Key<C>\n\nwhere\n\n C: NewStreamCipher,\n\n R: RngCore + CryptoRng,\n\n{\n\n let mut key = GenericArray::default();\n\n rng.fill_bytes(&mut key);\n\n key\n\n}\n\n\n", "file_path": "common/crypto/src/symmetric/stream_cipher.rs", "rank": 3, "score": 173336.28314066777 }, { "content": "pub fn random_iv<C, R>(rng: &mut R) -> IV<C>\n\nwhere\n\n C: NewStreamCipher,\n\n R: RngCore + CryptoRng,\n\n{\n\n let mut iv = GenericArray::default();\n\n rng.fill_bytes(&mut iv);\n\n iv\n\n}\n\n\n", "file_path": "common/crypto/src/symmetric/stream_cipher.rs", "rank": 4, "score": 173336.28314066777 }, { "content": "/// Generate an ephemeral encryption keypair and perform diffie-hellman to establish\n\n/// shared key with the remote.\n\npub fn new_ephemeral_shared_key<C, D, R>(\n\n rng: &mut R,\n\n remote_key: &encryption::PublicKey,\n\n) -> (encryption::KeyPair, Key<C>)\n\nwhere\n\n C: SyncStreamCipher + NewStreamCipher,\n\n D: Update + BlockInput + FixedOutput + Reset + Default + Clone,\n\n D::BlockSize: ArrayLength<u8>,\n\n D::OutputSize: ArrayLength<u8>,\n\n R: RngCore + CryptoRng,\n\n{\n\n let ephemeral_keypair = encryption::KeyPair::new_with_rng(rng);\n\n\n\n // after performing diffie-hellman we don't care about the private component anymore\n\n let dh_result = ephemeral_keypair.private_key().diffie_hellman(remote_key);\n\n\n\n // there is no reason for this to fail as our okm is expected to be only C::KeySize bytes\n\n let okm = hkdf::extract_then_expand::<D>(None, &dh_result, None, C::KeySize::to_usize())\n\n .expect(\"somehow too long okm was provided\");\n\n\n\n let derived_shared_key =\n\n Key::<C>::from_exact_iter(okm).expect(\"okm was expanded to incorrect length!\");\n\n\n\n (ephemeral_keypair, derived_shared_key)\n\n}\n\n\n", "file_path": "common/crypto/src/shared_key.rs", "rank": 5, "score": 151792.66369694175 }, { "content": "pub fn generate_loop_cover_packet<R>(\n\n rng: &mut R,\n\n topology: &NymTopology,\n\n ack_key: &AckKey,\n\n full_address: &Recipient,\n\n average_ack_delay: time::Duration,\n\n average_packet_delay: time::Duration,\n\n) -> Result<MixPacket, CoverMessageError>\n\nwhere\n\n R: RngCore + CryptoRng,\n\n{\n\n // we don't care about total ack delay - we will not be retransmitting it anyway\n\n let (_, ack_bytes) =\n\n generate_loop_cover_surb_ack(rng, topology, ack_key, full_address, average_ack_delay)?\n\n .prepare_for_sending();\n\n\n\n // cover message can't be distinguishable from a normal traffic so we have to go through\n\n // all the effort of key generation, encryption, etc. Note here we are generating shared key\n\n // with ourselves!\n\n let (ephemeral_keypair, shared_key) = new_ephemeral_shared_key::<\n", "file_path": "common/nymsphinx/cover/src/lib.rs", "rank": 6, "score": 126579.06141722709 }, { "content": "pub fn generate_loop_cover_surb_ack<R>(\n\n rng: &mut R,\n\n topology: &NymTopology,\n\n ack_key: &AckKey,\n\n full_address: &Recipient,\n\n average_ack_delay: time::Duration,\n\n) -> Result<SURBAck, CoverMessageError>\n\nwhere\n\n R: RngCore + CryptoRng,\n\n{\n\n Ok(SURBAck::construct(\n\n rng,\n\n full_address,\n\n ack_key,\n\n COVER_FRAG_ID.to_bytes(),\n\n average_ack_delay,\n\n topology,\n\n None,\n\n )?)\n\n}\n\n\n", "file_path": "common/nymsphinx/cover/src/lib.rs", "rank": 7, "score": 124654.17201710159 }, { "content": "fn setup_logging() {\n\n let mut log_builder = pretty_env_logger::formatted_timed_builder();\n\n if let Ok(s) = ::std::env::var(\"RUST_LOG\") {\n\n log_builder.parse_filters(&s);\n\n } else {\n\n // default to 'Info'\n\n log_builder.filter(None, log::LevelFilter::Info);\n\n }\n\n\n\n log_builder\n\n .filter_module(\"hyper\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Warn)\n\n .filter_module(\"reqwest\", log::LevelFilter::Warn)\n\n .filter_module(\"mio\", log::LevelFilter::Warn)\n\n .filter_module(\"want\", log::LevelFilter::Warn)\n\n .filter_module(\"sled\", log::LevelFilter::Warn)\n\n .filter_module(\"tungstenite\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_tungstenite\", log::LevelFilter::Warn)\n\n .init();\n\n}\n", "file_path": "gateway/src/main.rs", "rank": 8, "score": 119222.57872037357 }, { "content": "fn setup_logging() {\n\n let mut log_builder = pretty_env_logger::formatted_timed_builder();\n\n if let Ok(s) = ::std::env::var(\"RUST_LOG\") {\n\n log_builder.parse_filters(&s);\n\n } else {\n\n // default to 'Info'\n\n log_builder.filter(None, log::LevelFilter::Info);\n\n }\n\n\n\n log_builder\n\n .filter_module(\"hyper\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Warn)\n\n .filter_module(\"reqwest\", log::LevelFilter::Warn)\n\n .filter_module(\"mio\", log::LevelFilter::Warn)\n\n .filter_module(\"want\", log::LevelFilter::Warn)\n\n .init();\n\n}\n", "file_path": "mixnode/src/main.rs", "rank": 9, "score": 119222.57872037357 }, { "content": "fn setup_logging() {\n\n let mut log_builder = pretty_env_logger::formatted_timed_builder();\n\n if let Ok(s) = ::std::env::var(\"RUST_LOG\") {\n\n log_builder.parse_filters(&s);\n\n } else {\n\n // default to 'Info'\n\n log_builder.filter(None, log::LevelFilter::Info);\n\n }\n\n\n\n log_builder\n\n .filter_module(\"hyper\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Warn)\n\n .filter_module(\"reqwest\", log::LevelFilter::Warn)\n\n .filter_module(\"mio\", log::LevelFilter::Warn)\n\n .filter_module(\"want\", log::LevelFilter::Warn)\n\n .filter_module(\"tungstenite\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_tungstenite\", log::LevelFilter::Warn)\n\n .init();\n\n}\n", "file_path": "clients/socks5/src/main.rs", "rank": 10, "score": 116543.45059760493 }, { "content": "fn setup_logging() {\n\n let mut log_builder = pretty_env_logger::formatted_timed_builder();\n\n if let Ok(s) = ::std::env::var(\"RUST_LOG\") {\n\n log_builder.parse_filters(&s);\n\n } else {\n\n // default to 'Info'\n\n log_builder.filter(None, log::LevelFilter::Info);\n\n }\n\n\n\n log_builder\n\n .filter_module(\"hyper\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Warn)\n\n .filter_module(\"reqwest\", log::LevelFilter::Warn)\n\n .filter_module(\"mio\", log::LevelFilter::Warn)\n\n .filter_module(\"want\", log::LevelFilter::Warn)\n\n .filter_module(\"tungstenite\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_tungstenite\", log::LevelFilter::Warn)\n\n .init();\n\n}\n", "file_path": "clients/native/src/main.rs", "rank": 11, "score": 116543.45059760493 }, { "content": "fn setup_logging() {\n\n let mut log_builder = pretty_env_logger::formatted_timed_builder();\n\n if let Ok(s) = ::std::env::var(\"RUST_LOG\") {\n\n log_builder.parse_filters(&s);\n\n } else {\n\n // default to 'Info'\n\n log_builder.filter(None, log::LevelFilter::Info);\n\n }\n\n\n\n log_builder\n\n .filter_module(\"hyper\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Warn)\n\n .filter_module(\"reqwest\", log::LevelFilter::Warn)\n\n .filter_module(\"mio\", log::LevelFilter::Warn)\n\n .filter_module(\"want\", log::LevelFilter::Warn)\n\n .filter_module(\"sled\", log::LevelFilter::Warn)\n\n .filter_module(\"tungstenite\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_tungstenite\", log::LevelFilter::Warn)\n\n .init();\n\n}\n\n\n", "file_path": "network-monitor/src/main.rs", "rank": 12, "score": 116543.45059760493 }, { "content": "fn new_notifier(\n\n encryption_keypair: encryption::KeyPair,\n\n directory_client: Arc<directory_client::Client>,\n\n mixnet_receiver: MixnetReceiver,\n\n test_run_receiver: TestRunUpdateReceiver,\n\n) -> Notifier {\n\n Notifier::new(\n\n mixnet_receiver,\n\n encryption_keypair,\n\n directory_client,\n\n test_run_receiver,\n\n )\n\n}\n\n\n", "file_path": "network-monitor/src/main.rs", "rank": 13, "score": 116540.26483932514 }, { "content": "fn new_packet_sender(\n\n directory_client: Arc<directory_client::Client>,\n\n tested_network: TestedNetwork,\n\n self_address: Recipient,\n\n test_run_sender: TestRunUpdateSender,\n\n) -> PacketSender {\n\n PacketSender::new(\n\n directory_client,\n\n tested_network,\n\n self_address,\n\n test_run_sender,\n\n )\n\n}\n\n\n", "file_path": "network-monitor/src/main.rs", "rank": 14, "score": 114014.79238802934 }, { "content": "fn setup_logging() {\n\n let mut log_builder = pretty_env_logger::formatted_timed_builder();\n\n if let Ok(s) = ::std::env::var(\"RUST_LOG\") {\n\n log_builder.parse_filters(&s);\n\n } else {\n\n // default to 'Info'\n\n log_builder.filter(None, log::LevelFilter::Info);\n\n }\n\n\n\n log_builder\n\n .filter_module(\"hyper\", log::LevelFilter::Warn)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Warn)\n\n .filter_module(\"reqwest\", log::LevelFilter::Warn)\n\n .filter_module(\"mio\", log::LevelFilter::Warn)\n\n .filter_module(\"want\", log::LevelFilter::Warn)\n\n .init();\n\n}\n", "file_path": "service-providers/sphinx-socks/src/main.rs", "rank": 15, "score": 111633.05212892286 }, { "content": "/// Construct a new gateway client.\n\npub fn new_gateway_client(\n\n gateway: gateway::Node,\n\n identity_keypair: identity::KeyPair,\n\n ack_sender: AckSender,\n\n mixnet_messages_sender: MixnetSender,\n\n) -> GatewayClient {\n\n let timeout = time::Duration::from_millis(500);\n\n let identity_arc = Arc::new(identity_keypair);\n\n\n\n gateway_client::GatewayClient::new(\n\n gateway.client_listener,\n\n identity_arc,\n\n gateway.identity_key,\n\n None,\n\n mixnet_messages_sender,\n\n ack_sender,\n\n timeout,\n\n )\n\n}\n\n\n", "file_path": "network-monitor/src/main.rs", "rank": 16, "score": 108770.68707143975 }, { "content": "fn new_directory_client(directory_uri: &str) -> Arc<directory_client::Client> {\n\n let config = directory_client::Config::new(directory_uri.to_string());\n\n Arc::new(DirectoryClient::new(config))\n\n}\n\n\n", "file_path": "network-monitor/src/main.rs", "rank": 17, "score": 89505.02357662226 }, { "content": "// Note: you should NEVER create more than a single instance of this using 'new()'.\n\n// You should always use .clone() to create additional instances\n\nstruct MixMetrics {\n\n inner: Arc<Mutex<MixMetricsInner>>,\n\n}\n\n\n", "file_path": "mixnode/src/node/metrics.rs", "rank": 18, "score": 76585.55639711444 }, { "content": "struct MetricsInformer {\n\n total_received: u64,\n\n sent_map: SentMetricsMap,\n\n\n\n running_stats_logging_delay: Duration,\n\n last_reported_stats: SystemTime,\n\n}\n\n\n\nimpl MetricsInformer {\n\n fn new(running_stats_logging_delay: Duration) -> Self {\n\n MetricsInformer {\n\n total_received: 0,\n\n sent_map: HashMap::new(),\n\n running_stats_logging_delay,\n\n last_reported_stats: SystemTime::now(),\n\n }\n\n }\n\n\n\n fn should_log_running_stats(&self) -> bool {\n\n self.last_reported_stats + self.running_stats_logging_delay < SystemTime::now()\n", "file_path": "mixnode/src/node/metrics.rs", "rank": 19, "score": 76575.05904689635 }, { "content": "struct MetricsSender {\n\n metrics: MixMetrics,\n\n directory_client: directory_client::Client,\n\n pub_key_str: String,\n\n sending_delay: Duration,\n\n metrics_informer: MetricsInformer,\n\n}\n\n\n\nimpl MetricsSender {\n\n fn new(\n\n metrics: MixMetrics,\n\n directory_server: String,\n\n pub_key_str: String,\n\n sending_delay: Duration,\n\n running_logging_delay: Duration,\n\n ) -> Self {\n\n MetricsSender {\n\n metrics,\n\n directory_client: directory_client::Client::new(directory_client::Config::new(\n\n directory_server,\n", "file_path": "mixnode/src/node/metrics.rs", "rank": 20, "score": 76575.05904689635 }, { "content": "struct MetricsReceiver {\n\n metrics: MixMetrics,\n\n metrics_rx: mpsc::UnboundedReceiver<MetricEvent>,\n\n}\n\n\n\nimpl MetricsReceiver {\n\n fn new(metrics: MixMetrics, metrics_rx: mpsc::UnboundedReceiver<MetricEvent>) -> Self {\n\n MetricsReceiver {\n\n metrics,\n\n metrics_rx,\n\n }\n\n }\n\n\n\n fn start(mut self) -> JoinHandle<()> {\n\n tokio::spawn(async move {\n\n while let Some(metrics_data) = self.metrics_rx.next().await {\n\n match metrics_data {\n\n MetricEvent::Received => self.metrics.increment_received_metrics().await,\n\n MetricEvent::Sent(destination) => {\n\n self.metrics.increment_sent_metrics(destination).await\n\n }\n\n }\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "mixnode/src/node/metrics.rs", "rank": 21, "score": 76575.05904689635 }, { "content": "struct MixMetricsInner {\n\n received: u64,\n\n sent: SentMetricsMap,\n\n}\n\n\n\nimpl MixMetrics {\n\n pub(crate) fn new() -> Self {\n\n MixMetrics {\n\n inner: Arc::new(Mutex::new(MixMetricsInner {\n\n received: 0,\n\n sent: HashMap::new(),\n\n })),\n\n }\n\n }\n\n\n\n async fn increment_received_metrics(&mut self) {\n\n let mut unlocked = self.inner.lock().await;\n\n unlocked.received += 1;\n\n }\n\n\n", "file_path": "mixnode/src/node/metrics.rs", "rank": 22, "score": 75307.58595640468 }, { "content": "#[derive(PartialEq, Debug, Clone)]\n\nstruct ReconstructionBuffer {\n\n /// Easier way to determine if buffer has received all fragments it expected to get.\n\n /// This way it is not required to iterate through the entire `fragments` vector looking for\n\n /// possible `None` elements.\n\n is_complete: bool,\n\n\n\n /// Once all fragments are received, the value of `previous_fragments_set_id` is copied\n\n /// from the first `Fragment` in the set.\n\n previous_fragments_set_id: Option<i32>,\n\n /// Once all fragments are received, the value of `next_fragments_set_id` is copied\n\n /// from the last `Fragment` in the set (assuming the set is full, i.e. it contains\n\n /// `u8::max_value()` elements).\n\n next_fragments_set_id: Option<i32>,\n\n\n\n /// The actual `Fragment` data held by the `ReconstructionBuffer`. When created it is already\n\n /// appropriately resized and all missing fragments are set to a `None`, thus keeping\n\n /// everything in order the whole time, allowing for O(1) insertions and O(n) reconstruction.\n\n fragments: Vec<Option<Fragment>>,\n\n}\n\n\n", "file_path": "common/nymsphinx/chunking/src/reconstruction.rs", "rank": 23, "score": 75307.58595640468 }, { "content": "struct Inner {\n\n /// Maximum number of times particular sphinx-secret can be re-used before being rotated.\n\n secret_reuse_limit: usize,\n\n\n\n /// Currently used initial sphinx-secret for the packets sent.\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n current_initial_secret: RwLock<EphemeralSecret>,\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n // this is a temporary work-around for wasm (which currently does not have retransmission\n\n // and hence will not require multi-thread access) and also we can't import tokio's RWLock\n\n // in wasm.\n\n current_initial_secret: EphemeralSecret,\n\n\n\n /// If the client is running as VPN it's expected to keep re-using the same initial secret\n\n /// for a while so that the mixnodes could cache some secret derivation results. However,\n\n /// we should reset it every once in a while.\n\n packets_with_current_secret: AtomicUsize,\n\n}\n\n\n", "file_path": "common/nymsphinx/src/preparer/vpn_manager.rs", "rank": 24, "score": 75307.58595640468 }, { "content": "fn main() {\n\n dotenv::dotenv().ok();\n\n setup_logging();\n\n println!(\"{}\", banner());\n\n\n\n let arg_matches = App::new(\"Nym Mixnode\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(\"Nymtech\")\n\n .about(\"Implementation of the Loopix-based Mixnode\")\n\n .subcommand(commands::init::command_args())\n\n .subcommand(commands::run::command_args())\n\n .subcommand(commands::upgrade::command_args())\n\n .get_matches();\n\n\n\n execute(arg_matches);\n\n}\n\n\n", "file_path": "mixnode/src/main.rs", "rank": 25, "score": 75040.5431954882 }, { "content": "fn main() {\n\n dotenv::dotenv().ok();\n\n setup_logging();\n\n println!(\"{}\", banner());\n\n\n\n let arg_matches = App::new(\"Nym Mixnet Gateway\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(\"Nymtech\")\n\n .about(\"Implementation of the Nym Mixnet Gateway\")\n\n .subcommand(commands::init::command_args())\n\n .subcommand(commands::run::command_args())\n\n .subcommand(commands::upgrade::command_args())\n\n .get_matches();\n\n\n\n execute(arg_matches);\n\n}\n\n\n", "file_path": "gateway/src/main.rs", "rank": 26, "score": 75040.5431954882 }, { "content": "#[derive(Default)]\n\nstruct NodeResult {\n\n ip_v4_compatible: bool,\n\n ip_v6_compatible: bool,\n\n}\n\n\n", "file_path": "network-monitor/src/notifications/test_run.rs", "rank": 27, "score": 74110.73878779501 }, { "content": "#[derive(Default)]\n\nstruct TestReport {\n\n total_sent: usize,\n\n total_received: usize,\n\n malformed: Vec<String>,\n\n outdated: Vec<(String, String)>,\n\n\n\n // below are only populated if we're going to be printing the report\n\n only_ipv4_compatible: Vec<String>, // can't speak v6, but can speak v4\n\n only_ipv6_compatible: Vec<String>, // can't speak v4, but can speak v6\n\n completely_unroutable: Vec<String>, // can't speak either v4 or v6\n\n fully_working: Vec<String>,\n\n}\n\n\n\nimpl TestReport {\n\n fn print(&self, detailed: bool) {\n\n info!(target: \"Test Report\", \"Sent total of {} packets\", self.total_sent);\n\n info!(target: \"Test Report\", \"Received total of {} packets\", self.total_received);\n\n info!(target: \"Test Report\", \"{} nodes are malformed\", self.malformed.len());\n\n info!(target: \"Test Report\", \"{} nodes are outdated\", self.outdated.len());\n\n info!(target: \"Test Report\", \"{} nodes speak ONLY IPv4 (NO IPv6 connectivity)\", self.only_ipv4_compatible.len());\n", "file_path": "network-monitor/src/notifications/test_run.rs", "rank": 28, "score": 74110.73878779501 }, { "content": "fn main() {\n\n dotenv::dotenv().ok();\n\n setup_logging();\n\n println!(\"{}\", banner());\n\n\n\n let arg_matches = App::new(\"Nym Socks5 Proxy\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(\"Nymtech\")\n\n .about(\"A Socks5 localhost proxy that converts incoming messages to Sphinx and sends them to a Nym address\")\n\n .subcommand(commands::init::command_args())\n\n .subcommand(commands::run::command_args())\n\n .subcommand(commands::upgrade::command_args())\n\n .get_matches();\n\n\n\n execute(arg_matches);\n\n}\n\n\n", "file_path": "clients/socks5/src/main.rs", "rank": 29, "score": 73623.18639064737 }, { "content": "#[wasm_bindgen_test]\n\nfn pass() {\n\n assert_eq!(1 + 1, 2);\n\n}\n", "file_path": "clients/webassembly/tests/web.rs", "rank": 30, "score": 73623.18639064737 }, { "content": "fn main() {\n\n dotenv::dotenv().ok();\n\n setup_logging();\n\n println!(\"{}\", banner());\n\n\n\n let arg_matches = App::new(\"Nym Client\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(\"Nymtech\")\n\n .about(\"Implementation of the Nym Client\")\n\n .subcommand(commands::init::command_args())\n\n .subcommand(commands::run::command_args())\n\n .subcommand(commands::upgrade::command_args())\n\n .get_matches();\n\n\n\n execute(arg_matches);\n\n}\n\n\n", "file_path": "clients/native/src/main.rs", "rank": 31, "score": 73623.18639064737 }, { "content": "struct ActiveConnection {\n\n is_closed: bool,\n\n connection_sender: Option<ConnectionSender>,\n\n ordered_buffer: OrderedMessageBuffer,\n\n}\n\n\n\nimpl ActiveConnection {\n\n fn write_to_buf(&mut self, payload: Vec<u8>) {\n\n let ordered_message = match OrderedMessage::try_from_bytes(payload) {\n\n Ok(msg) => msg,\n\n Err(err) => {\n\n error!(\"Malformed ordered message - {:?}\", err);\n\n return;\n\n }\n\n };\n\n self.ordered_buffer.write(ordered_message);\n\n }\n\n\n\n fn read_from_buf(&mut self) -> Option<Vec<u8>> {\n\n self.ordered_buffer.read()\n", "file_path": "common/socks5/proxy-helpers/src/connection_controller.rs", "rank": 32, "score": 72978.77444053727 }, { "content": "struct RequestReceiver {\n\n received_buffer: ReceivedMessagesBuffer,\n\n query_receiver: ReceivedBufferRequestReceiver,\n\n}\n\n\n\nimpl RequestReceiver {\n\n fn new(\n\n received_buffer: ReceivedMessagesBuffer,\n\n query_receiver: ReceivedBufferRequestReceiver,\n\n ) -> Self {\n\n RequestReceiver {\n\n received_buffer,\n\n query_receiver,\n\n }\n\n }\n\n\n\n fn start(mut self, handle: &Handle) -> JoinHandle<()> {\n\n handle.spawn(async move {\n\n while let Some(request) = self.query_receiver.next().await {\n\n match request {\n", "file_path": "clients/client-core/src/client/received_buffer.rs", "rank": 33, "score": 72978.77444053727 }, { "content": "fn check_if_up_to_date() {\n\n let monitor_version = env!(\"CARGO_PKG_VERSION\");\n\n let good_v4_topology = good_topology::new_v4();\n\n for (_, layer_mixes) in good_v4_topology.mixes().into_iter() {\n\n for mix in layer_mixes.into_iter() {\n\n if !version_checker::is_minor_version_compatible(monitor_version, &*mix.version) {\n\n panic!(\n\n \"Our good topology is not compatible with monitor! Mix runs {}, we have {}\",\n\n mix.version, monitor_version\n\n )\n\n }\n\n }\n\n }\n\n\n\n for gateway in good_v4_topology.gateways().into_iter() {\n\n if !version_checker::is_minor_version_compatible(monitor_version, &*gateway.version) {\n\n panic!(\n\n \"Our good topology is not compatible with monitor! Gateway runs {}, we have {}\",\n\n gateway.version, monitor_version\n\n )\n", "file_path": "network-monitor/src/main.rs", "rank": 34, "score": 72289.46777588043 }, { "content": "// Note: you should NEVER create more than a single instance of this using 'new()'.\n\n// You should always use .clone() to create additional instances\n\nstruct ReceivedMessagesBuffer {\n\n inner: Arc<Mutex<ReceivedMessagesBufferInner>>,\n\n\n\n /// Storage containing keys to all [`ReplySURB`]s ever sent out that we did not receive back.\n\n // There's no need to put it behind a Mutex since it's already properly concurrent\n\n reply_key_storage: ReplyKeyStorage,\n\n}\n\n\n\nimpl ReceivedMessagesBuffer {\n\n fn new(\n\n local_encryption_keypair: Arc<encryption::KeyPair>,\n\n reply_key_storage: ReplyKeyStorage,\n\n ) -> Self {\n\n ReceivedMessagesBuffer {\n\n inner: Arc::new(Mutex::new(ReceivedMessagesBufferInner {\n\n messages: Vec::new(),\n\n local_encryption_keypair,\n\n message_receiver: MessageReceiver::new(),\n\n message_sender: None,\n\n recently_reconstructed: HashSet::new(),\n", "file_path": "clients/client-core/src/client/received_buffer.rs", "rank": 35, "score": 71917.05341507618 }, { "content": "struct CacheInvalidator {\n\n entry_ttl: Duration,\n\n vpn_key_cache: Arc<DashMap<SharedSecret, CachedKeys>>,\n\n expirations: NonExhaustiveDelayQueue<SharedSecret>,\n\n action_receiver: InvalidatorActionReceiver,\n\n}\n\n\n\n// we do not have a strong requirement of invalidating things EXACTLY after their TTL expires.\n\n// we want them to be eventually gone in a relatively timely manner.\n\nimpl CacheInvalidator {\n\n // two obvious ways I've seen of running this were as follows:\n\n //\n\n // 1) every X second, purge all expired entries\n\n // pros: simpler to implement\n\n // cons: will require to obtain write lock multiple times in quick succession\n\n //\n\n // 2) purge entry as soon as it expires\n\n // pros: the lock situation will be spread more in time\n\n // cons: possibly less efficient?\n\n\n", "file_path": "common/mixnode-common/src/cached_packet_processor/cache.rs", "rank": 36, "score": 71906.55606485809 }, { "content": "struct FragmentedMessageReceiver {\n\n received_buffer: ReceivedMessagesBuffer,\n\n mixnet_packet_receiver: MixnetMessageReceiver,\n\n}\n\n\n\nimpl FragmentedMessageReceiver {\n\n fn new(\n\n received_buffer: ReceivedMessagesBuffer,\n\n mixnet_packet_receiver: MixnetMessageReceiver,\n\n ) -> Self {\n\n FragmentedMessageReceiver {\n\n received_buffer,\n\n mixnet_packet_receiver,\n\n }\n\n }\n\n fn start(mut self, handle: &Handle) -> JoinHandle<()> {\n\n handle.spawn(async move {\n\n while let Some(new_messages) = self.mixnet_packet_receiver.next().await {\n\n self.received_buffer.handle_new_received(new_messages).await;\n\n }\n", "file_path": "clients/client-core/src/client/received_buffer.rs", "rank": 37, "score": 71906.55606485809 }, { "content": "fn show_incentives_url() {\n\n println!(\"\\n##### NOTE #####\");\n\n println!(\n\n \"\\nIf you would like to join our testnet incentives program, please visit https://nymtech.net/incentives\"\n\n );\n\n println!(\"\\n\\n\");\n\n}\n\n\n\nasync fn choose_layer(matches: &ArgMatches<'_>, directory_server: String) -> u64 {\n\n let max_layer = DEFAULT_NUM_MIX_HOPS;\n\n if let Some(layer) = matches.value_of(\"layer\").map(|layer| layer.parse::<u64>()) {\n\n if let Err(err) = layer {\n\n // if layer was overridden, it must be parsable\n\n panic!(\"Invalid layer value provided - {:?}\", err);\n\n }\n\n let layer = layer.unwrap();\n\n if layer <= max_layer as u64 && layer > 0 {\n\n return layer;\n\n }\n\n }\n", "file_path": "mixnode/src/commands/init.rs", "rank": 38, "score": 71032.19629200175 }, { "content": "fn show_incentives_url() {\n\n println!(\"\\n##### NOTE #####\");\n\n println!(\n\n \"\\nIf you would like to join our testnet incentives program, please visit https://nymtech.net/incentives\"\n\n );\n\n println!(\"\\n\\n\");\n\n}\n\n\n", "file_path": "gateway/src/commands/init.rs", "rank": 39, "score": 71032.19629200175 }, { "content": "struct ReceivedMessagesBufferInner {\n\n messages: Vec<ReconstructedMessage>,\n\n local_encryption_keypair: Arc<encryption::KeyPair>,\n\n\n\n // TODO: looking how it 'looks' here, perhaps `MessageReceiver` should be renamed to something\n\n // else instead.\n\n message_receiver: MessageReceiver,\n\n message_sender: Option<ReconstructedMessagesSender>,\n\n\n\n // TODO: this will get cleared upon re-running the client\n\n // but perhaps it should be changed to include timestamps of when the message was reconstructed\n\n // and every now and then remove ids older than X\n\n recently_reconstructed: HashSet<i32>,\n\n}\n\n\n\nimpl ReceivedMessagesBufferInner {\n\n fn process_received_fragment(&mut self, raw_fragment: Vec<u8>) -> Option<ReconstructedMessage> {\n\n let fragment_data = match self\n\n .message_receiver\n\n .recover_plaintext(self.local_encryption_keypair.private_key(), raw_fragment)\n", "file_path": "clients/client-core/src/client/received_buffer.rs", "rank": 40, "score": 70889.47512260202 }, { "content": "fn banner() -> String {\n\n format!(\n\n r#\"\n\n\n\n _ __ _ _ _ __ ___\n\n | '_ \\| | | | '_ \\ _ \\\n\n | | | | |_| | | | | | |\n\n |_| |_|\\__, |_| |_| |_|\n\n |___/\n\n\n\n (mixnode - version {:})\n\n\n\n \"#,\n\n env!(\"CARGO_PKG_VERSION\")\n\n )\n\n}\n\n\n", "file_path": "mixnode/src/main.rs", "rank": 41, "score": 70758.63848429418 }, { "content": "fn banner() -> String {\n\n format!(\n\n r#\"\n\n\n\n _ __ _ _ _ __ ___\n\n | '_ \\| | | | '_ \\ _ \\\n\n | | | | |_| | | | | | |\n\n |_| |_|\\__, |_| |_| |_|\n\n |___/\n\n\n\n (gateway - version {:})\n\n\n\n \"#,\n\n env!(\"CARGO_PKG_VERSION\")\n\n )\n\n}\n\n\n", "file_path": "gateway/src/main.rs", "rank": 42, "score": 70758.63848429418 }, { "content": "/// Given part of the underlying message as well id of the set as well as its potential linked sets,\n\n/// correctly delegates to appropriate set constructor.\n\nfn prepare_fragment_set(\n\n message: &[u8],\n\n id: i32,\n\n previous_link_id: Option<i32>,\n\n next_link_id: Option<i32>,\n\n max_plaintext_size: usize,\n\n) -> FragmentSet {\n\n if previous_link_id.is_some() || next_link_id.is_some() {\n\n prepare_linked_fragment_set(\n\n message,\n\n id,\n\n previous_link_id,\n\n next_link_id,\n\n max_plaintext_size,\n\n )\n\n } else {\n\n // the bounds on whether the message fits in an unlinked set should have been done by the callee\n\n // when determining ids of other sets\n\n prepare_unlinked_fragmented_set(message, id, max_plaintext_size)\n\n }\n\n}\n\n\n", "file_path": "common/nymsphinx/chunking/src/set.rs", "rank": 43, "score": 69844.98227780113 }, { "content": "fn banner() -> String {\n\n format!(\n\n r#\"\n\n\n\n _ __ _ _ _ __ ___\n\n | '_ \\| | | | '_ \\ _ \\\n\n | | | | |_| | | | | | |\n\n |_| |_|\\__, |_| |_| |_|\n\n |___/\n\n\n\n (socks5 proxy - version {:})\n\n\n\n \"#,\n\n env!(\"CARGO_PKG_VERSION\")\n\n )\n\n}\n\n\n", "file_path": "clients/socks5/src/main.rs", "rank": 44, "score": 69424.91986952725 }, { "content": "fn banner() -> String {\n\n format!(\n\n r#\"\n\n\n\n _ __ _ _ _ __ ___\n\n | '_ \\| | | | '_ \\ _ \\\n\n | | | | |_| | | | | | |\n\n |_| |_|\\__, |_| |_| |_|\n\n |___/\n\n\n\n (client - version {:})\n\n\n\n \"#,\n\n env!(\"CARGO_PKG_VERSION\")\n\n )\n\n}\n\n\n", "file_path": "clients/native/src/main.rs", "rank": 45, "score": 69424.91986952725 }, { "content": "/// Splits underlying message into multiple `Fragment`s while all of them fit in a single\n\n/// `Set` (number of `Fragment`s <= 255)\n\nfn prepare_unlinked_fragmented_set(\n\n message: &[u8],\n\n id: i32,\n\n max_plaintext_size: usize,\n\n) -> FragmentSet {\n\n let pre_casted_frags = (message.len() as f64\n\n / unlinked_fragment_payload_max_len(max_plaintext_size) as f64)\n\n .ceil() as usize;\n\n\n\n debug_assert!(pre_casted_frags <= u8::max_value() as usize);\n\n let num_fragments = pre_casted_frags as u8;\n\n\n\n let mut fragments = Vec::with_capacity(num_fragments as usize);\n\n\n\n for i in 1..(pre_casted_frags + 1) {\n\n // we can't use u8 directly here as upper (NON-INCLUSIVE, so it would always fit) bound could be u8::max_value() + 1\n\n let lb = (i as usize - 1) * unlinked_fragment_payload_max_len(max_plaintext_size);\n\n let ub = usize::min(\n\n message.len(),\n\n i as usize * unlinked_fragment_payload_max_len(max_plaintext_size),\n", "file_path": "common/nymsphinx/chunking/src/set.rs", "rank": 46, "score": 68722.1288576765 }, { "content": "/// Similarly to `prepare_unlinked_fragmented_set`, splits part of underlying message into\n\n/// multiple `Fragment`s. The byte slice of the message *must* fit into a single linked set, however,\n\n/// the whole message itself is still longer than a single `Set` (number of `Fragment`s > 255).\n\n/// During the process of splitting message, this function is called multiple times.\n\nfn prepare_linked_fragment_set(\n\n message: &[u8],\n\n id: i32,\n\n previous_link_id: Option<i32>,\n\n next_link_id: Option<i32>,\n\n max_plaintext_size: usize,\n\n) -> FragmentSet {\n\n // determine number of fragments in the set:\n\n let num_frags_usize = if next_link_id.is_some() {\n\n u8::max_value() as usize\n\n } else {\n\n // we know this set is linked, if it's not post-linked then it MUST BE pre-linked\n\n let tail_len = if message.len() >= linked_fragment_payload_max_len(max_plaintext_size) {\n\n message.len() - linked_fragment_payload_max_len(max_plaintext_size)\n\n } else {\n\n 0\n\n };\n\n let pre_casted_frags = 1\n\n + (tail_len as f64 / unlinked_fragment_payload_max_len(max_plaintext_size) as f64)\n\n .ceil() as usize;\n", "file_path": "common/nymsphinx/chunking/src/set.rs", "rank": 47, "score": 68722.1288576765 }, { "content": "#[test]\n\nfn empty_message_does_not_affect_ordering() {\n\n let mut msg1 = OrderedMessage {\n\n data: vec![255, 255, 255],\n\n index: 1,\n\n };\n\n\n\n let mut msg2 = OrderedMessage {\n\n data: vec![],\n\n index: 2,\n\n };\n\n\n\n assert!(msg1 < msg2);\n\n\n\n msg1.index = 2;\n\n msg2.index = 1;\n\n\n\n assert!(msg1 > msg2);\n\n}\n", "file_path": "common/socks5/ordered-buffer/src/message.rs", "rank": 48, "score": 67658.54052720615 }, { "content": "fn usage() -> &'static str {\n\n \"usage: --help to see available options.\\n\\n\"\n\n}\n\n\n", "file_path": "mixnode/src/main.rs", "rank": 49, "score": 67084.95216476143 }, { "content": "fn usage() -> &'static str {\n\n \"usage: --help to see available options.\\n\\n\"\n\n}\n\n\n", "file_path": "gateway/src/main.rs", "rank": 50, "score": 67084.95216476143 }, { "content": "#[wasm_bindgen]\n\npub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "clients/webassembly/src/lib.rs", "rank": 51, "score": 66980.43437144795 }, { "content": "pub fn recover_identifier(\n\n key: &AckKey,\n\n iv_id_ciphertext: &[u8],\n\n) -> Option<SerializedFragmentIdentifier> {\n\n // The content of an 'ACK' packet consists of AckEncryptionAlgorithm::IV followed by\n\n // serialized FragmentIdentifier\n\n if iv_id_ciphertext.len() != PacketSize::ACKPacket.plaintext_size() {\n\n return None;\n\n }\n\n\n\n let iv_size = <AckEncryptionAlgorithm as NewStreamCipher>::NonceSize::to_usize();\n\n let iv = iv_from_slice::<AckEncryptionAlgorithm>(&iv_id_ciphertext[..iv_size]);\n\n\n\n let id = stream_cipher::decrypt::<AckEncryptionAlgorithm>(\n\n key.inner(),\n\n iv,\n\n &iv_id_ciphertext[iv_size..],\n\n );\n\n\n\n let mut id_arr = [0u8; FRAG_ID_LEN];\n", "file_path": "common/nymsphinx/acknowledgements/src/identifier.rs", "rank": 52, "score": 66980.43437144795 }, { "content": "/// Returns number of fragments the message will be split to as well as number of available\n\n/// bytes in the final fragment\n\npub fn number_of_required_fragments(\n\n message_len: usize,\n\n plaintext_per_fragment: usize,\n\n) -> (usize, usize) {\n\n let max_unlinked = unlinked_fragment_payload_max_len(plaintext_per_fragment);\n\n let max_linked = linked_fragment_payload_max_len(plaintext_per_fragment);\n\n\n\n match set::total_number_of_sets(message_len, plaintext_per_fragment) {\n\n n if n == 1 => {\n\n // is if it's a single fragment message\n\n if message_len < max_unlinked {\n\n return (1, max_unlinked - message_len);\n\n }\n\n\n\n // all fragments will be 'unlinked'\n\n let quot = message_len / max_unlinked;\n\n let rem = message_len % max_unlinked;\n\n\n\n if rem == 0 {\n\n (quot, 0)\n", "file_path": "common/nymsphinx/chunking/src/lib.rs", "rank": 53, "score": 65857.58095132331 }, { "content": "fn dummy_message() -> ClientFile {\n\n ClientFile {\n\n content: DUMMY_MESSAGE_CONTENT.to_vec(),\n\n path: Default::default(),\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ClientFile {\n\n content: Vec<u8>,\n\n path: PathBuf,\n\n}\n\n\n\nimpl ClientFile {\n\n fn new(content: Vec<u8>, path: PathBuf) -> Self {\n\n ClientFile { content, path }\n\n }\n\n\n\n pub(crate) fn into_tuple(self) -> (Vec<u8>, PathBuf) {\n\n (self.content, self.path)\n", "file_path": "gateway/src/node/storage/inboxes.rs", "rank": 54, "score": 65857.58095132331 }, { "content": "fn execute(matches: ArgMatches) {\n\n match matches.subcommand() {\n\n (\"init\", Some(m)) => commands::init::execute(m),\n\n (\"run\", Some(m)) => commands::run::execute(m),\n\n (\"upgrade\", Some(m)) => commands::upgrade::execute(m),\n\n _ => println!(\"{}\", usage()),\n\n }\n\n}\n\n\n", "file_path": "gateway/src/main.rs", "rank": 55, "score": 65827.68068088275 }, { "content": "fn usage() -> &'static str {\n\n \"usage: --help to see available options.\\n\\n\"\n\n}\n\n\n", "file_path": "clients/socks5/src/main.rs", "rank": 56, "score": 65827.68068088275 }, { "content": "fn usage() -> &'static str {\n\n \"usage: --help to see available options.\\n\\n\"\n\n}\n\n\n", "file_path": "clients/native/src/main.rs", "rank": 57, "score": 65827.68068088275 }, { "content": "fn execute(matches: ArgMatches) {\n\n match matches.subcommand() {\n\n (\"init\", Some(m)) => commands::init::execute(m),\n\n (\"run\", Some(m)) => commands::run::execute(m),\n\n (\"upgrade\", Some(m)) => commands::upgrade::execute(m),\n\n _ => println!(\"{}\", usage()),\n\n }\n\n}\n\n\n", "file_path": "mixnode/src/main.rs", "rank": 58, "score": 65827.68068088275 }, { "content": "fn execute(matches: ArgMatches) {\n\n match matches.subcommand() {\n\n (\"init\", Some(m)) => commands::init::execute(m),\n\n (\"run\", Some(m)) => commands::run::execute(m),\n\n (\"upgrade\", Some(m)) => commands::upgrade::execute(m),\n\n _ => println!(\"{}\", usage()),\n\n }\n\n}\n\n\n", "file_path": "clients/native/src/main.rs", "rank": 59, "score": 64640.46666668212 }, { "content": "/// Perform HKDF `extract` then `expand` as a single step.\n\npub fn extract_then_expand<D>(\n\n salt: Option<&[u8]>,\n\n ikm: &[u8],\n\n info: Option<&[u8]>,\n\n okm_length: usize,\n\n) -> Result<Vec<u8>, HkdfError>\n\nwhere\n\n D: Update + BlockInput + FixedOutput + Reset + Default + Clone,\n\n D::BlockSize: ArrayLength<u8>,\n\n D::OutputSize: ArrayLength<u8>,\n\n{\n\n // TODO: this would need to change if we ever needed the generated pseudorandom key, but\n\n // realistically I don't see any reasons why we might need it\n\n\n\n let hkdf = Hkdf::<D>::new(salt, ikm);\n\n let mut okm = vec![0u8; okm_length];\n\n if hkdf.expand(info.unwrap_or_else(|| &[]), &mut okm).is_err() {\n\n return Err(HkdfError::InvalidOkmLength);\n\n }\n\n\n\n Ok(okm)\n\n}\n", "file_path": "common/crypto/src/hkdf.rs", "rank": 60, "score": 64640.46666668212 }, { "content": "fn execute(matches: ArgMatches) {\n\n match matches.subcommand() {\n\n (\"init\", Some(m)) => commands::init::execute(m),\n\n (\"run\", Some(m)) => commands::run::execute(m),\n\n (\"upgrade\", Some(m)) => commands::upgrade::execute(m),\n\n _ => println!(\"{}\", usage()),\n\n }\n\n}\n\n\n", "file_path": "clients/socks5/src/main.rs", "rank": 61, "score": 64640.46666668212 }, { "content": "fn show_binding_warning(address: String) {\n\n println!(\"\\n##### NOTE #####\");\n\n println!(\n\n \"\\nYou are trying to bind to {} - you might not be accessible to other nodes\\n\\\n\n You can ignore this note if you're running setup on a local network \\n\\\n\n or have set a custom 'announce-host'\",\n\n address\n\n );\n\n println!(\"\\n\\n\");\n\n}\n\n\n", "file_path": "mixnode/src/commands/run.rs", "rank": 62, "score": 63517.613246557485 }, { "content": "fn show_binding_warning(address: String) {\n\n println!(\"\\n##### NOTE #####\");\n\n println!(\n\n \"\\nYou are trying to bind to {} - you might not be accessible to other nodes\\n\\\n\n You can ignore this warning if you're running setup on a local network \\n\\\n\n or have set a custom 'announce-host'\",\n\n address\n\n );\n\n println!(\"\\n\\n\");\n\n}\n\n\n", "file_path": "gateway/src/commands/run.rs", "rank": 63, "score": 63517.613246557485 }, { "content": "pub fn missing_string_value() -> String {\n\n MISSING_VALUE.to_string()\n\n}\n\n\n\nimpl Config {\n\n pub fn new<S: Into<String>>(id: S) -> Self {\n\n Config::default().with_id(id)\n\n }\n\n\n\n // builder methods\n\n pub fn with_id<S: Into<String>>(mut self, id: S) -> Self {\n\n let id = id.into();\n\n if self.gateway.private_sphinx_key_file.as_os_str().is_empty() {\n\n self.gateway.private_sphinx_key_file =\n\n self::Gateway::default_private_sphinx_key_file(&id);\n\n }\n\n if self.gateway.public_sphinx_key_file.as_os_str().is_empty() {\n\n self.gateway.public_sphinx_key_file =\n\n self::Gateway::default_public_sphinx_key_file(&id);\n\n }\n", "file_path": "gateway/src/config/mod.rs", "rank": 64, "score": 63517.613246557485 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let id = matches.value_of(\"id\").unwrap();\n\n println!(\"Initialising gateway {}...\", id);\n\n\n\n if Config::default_config_file_path(id).exists() {\n\n eprintln!(\"Gateway \\\"{}\\\" was already initialised before! If you wanted to upgrade your gateway to most recent version, try `upgrade` command instead!\", id);\n\n process::exit(1);\n\n }\n\n\n\n let mut config = Config::new(id);\n\n\n\n config = override_config(config, matches);\n\n\n\n let identity_keys = identity::KeyPair::new();\n\n let sphinx_keys = encryption::KeyPair::new();\n\n let pathfinder = GatewayPathfinder::new_from_config(&config);\n\n pemstore::store_keypair(\n\n &sphinx_keys,\n\n &pemstore::KeyPairPath::new(\n\n pathfinder.private_encryption_key().to_owned(),\n", "file_path": "gateway/src/commands/init.rs", "rank": 65, "score": 61570.214880998974 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n println!(\"Starting gateway {}...\", id);\n\n\n\n let mut config =\n\n Config::load_from_file(matches.value_of(\"config\").map(|path| path.into()), Some(id))\n\n .expect(\"Failed to load config file\");\n\n\n\n config = override_config(config, matches);\n\n\n\n let pathfinder = GatewayPathfinder::new_from_config(&config);\n\n let sphinx_keypair = load_sphinx_keys(&pathfinder);\n\n let identity = load_identity_keys(&pathfinder);\n\n\n\n let mix_listening_ip_string = config.get_mix_listening_address().ip().to_string();\n\n if special_addresses().contains(&mix_listening_ip_string.as_ref()) {\n\n show_binding_warning(mix_listening_ip_string);\n\n }\n\n\n", "file_path": "gateway/src/commands/run.rs", "rank": 66, "score": 61570.214880998974 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n println!(\"Starting mixnode {}...\", id);\n\n\n\n let mut config =\n\n Config::load_from_file(matches.value_of(\"config\").map(|path| path.into()), Some(id))\n\n .expect(\"Failed to load config file\");\n\n\n\n config = override_config(config, matches);\n\n\n\n let pathfinder = MixNodePathfinder::new_from_config(&config);\n\n let identity_keypair = load_identity_keys(&pathfinder);\n\n let sphinx_keypair = load_sphinx_keys(&pathfinder);\n\n\n\n let listening_ip_string = config.get_listening_address().ip().to_string();\n\n if special_addresses().contains(&listening_ip_string.as_ref()) {\n\n show_binding_warning(listening_ip_string);\n\n }\n\n\n", "file_path": "mixnode/src/commands/run.rs", "rank": 67, "score": 61570.214880998974 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n // TODO: this should probably be made implicit by slapping `#[tokio::main]` on our main method\n\n // and then removing runtime from mixnode itself in `run`\n\n let mut rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n let id = matches.value_of(\"id\").unwrap();\n\n println!(\"Initialising mixnode {}...\", id);\n\n\n\n if Config::default_config_file_path(id).exists() {\n\n eprintln!(\"Mixnode \\\"{}\\\" was already initialised before! If you wanted to upgrade your node to most recent version, try `upgrade` command instead!\", id);\n\n process::exit(1);\n\n }\n\n\n\n let mut config = Config::new(id);\n\n config = override_config(config, matches);\n\n let layer = choose_layer(matches, config.get_presence_directory_server()).await;\n\n // TODO: I really don't like how we override config and are presumably done with it\n\n // only to change it here\n\n config = config.with_layer(layer);\n\n debug!(\"Choosing layer {}\", config.get_layer());\n", "file_path": "mixnode/src/commands/init.rs", "rank": 68, "score": 61570.214880998974 }, { "content": "fn special_addresses() -> Vec<&'static str> {\n\n vec![\"localhost\", \"127.0.0.1\", \"0.0.0.0\", \"::1\", \"[::1]\"]\n\n}\n\n\n", "file_path": "mixnode/src/commands/run.rs", "rank": 69, "score": 61570.214880998974 }, { "content": "fn special_addresses() -> Vec<&'static str> {\n\n vec![\"localhost\", \"127.0.0.1\", \"0.0.0.0\", \"::1\", \"[::1]\"]\n\n}\n\n\n", "file_path": "gateway/src/commands/run.rs", "rank": 70, "score": 61570.214880998974 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let current = Version::parse(env!(\"CARGO_PKG_VERSION\")).unwrap();\n\n\n\n // technically this is not a correct way of checking it as a released version might contain valid build identifiers\n\n // however, we are not using them ourselves at the moment and hence it should be fine.\n\n // if we change our mind, we could easily tweak this code\n\n if current.is_prerelease() || !current.build.is_empty() {\n\n eprintln!(\n\n \"Trying to upgrade to a non-released version {}. This is not supported!\",\n\n current\n\n );\n\n process::exit(1)\n\n }\n\n\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n let mut existing_config = Config::load_from_file(None, Some(id)).unwrap_or_else(|err| {\n\n eprintln!(\"failed to load existing config file! - {:?}\", err);\n\n process::exit(1)\n\n });\n", "file_path": "gateway/src/commands/upgrade.rs", "rank": 71, "score": 61570.214880998974 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let current = Version::parse(env!(\"CARGO_PKG_VERSION\")).unwrap();\n\n\n\n // technically this is not a correct way of checking it as a released version might contain valid build identifiers\n\n // however, we are not using them ourselves at the moment and hence it should be fine.\n\n // if we change our mind, we could easily tweak this code\n\n if current.is_prerelease() || !current.build.is_empty() {\n\n eprintln!(\n\n \"Trying to upgrade to a non-released version {}. This is not supported!\",\n\n current\n\n );\n\n process::exit(1)\n\n }\n\n\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n let mut existing_config = Config::load_from_file(None, Some(id)).unwrap_or_else(|err| {\n\n eprintln!(\"failed to load existing config file! - {:?}\", err);\n\n process::exit(1)\n\n });\n", "file_path": "mixnode/src/commands/upgrade.rs", "rank": 72, "score": 61570.214880998974 }, { "content": "pub fn missing_string_value() -> String {\n\n MISSING_VALUE.to_string()\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(deny_unknown_fields)]\n\npub struct Config<T> {\n\n client: Client<T>,\n\n\n\n #[serde(default)]\n\n logging: Logging,\n\n #[serde(default)]\n\n debug: Debug,\n\n}\n\n\n\nimpl<T: NymConfig> Config<T> {\n\n pub fn new<S: Into<String>>(id: S) -> Self {\n\n let mut cfg = Config::default();\n\n cfg.with_id(id);\n\n cfg\n", "file_path": "clients/client-core/src/config/mod.rs", "rank": 73, "score": 61445.130230205505 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n println!(\"Initialising client...\");\n\n\n\n let id = matches.value_of(\"id\").unwrap(); // required for now\n\n\n\n if Config::default_config_file_path(id).exists() {\n\n eprintln!(\"Client \\\"{}\\\" was already initialised before! If you wanted to upgrade your client to most recent version, try `upgrade` command instead!\", id);\n\n process::exit(1);\n\n }\n\n\n\n let mut config = Config::new(id);\n\n\n\n let mut rng = OsRng;\n\n\n\n // TODO: ideally that should be the last thing that's being done to config.\n\n // However, we are later further overriding it with gateway id\n\n config = override_config(config, matches);\n\n if matches.is_present(\"fastmode\") {\n\n config.get_base_mut().set_high_default_traffic_volume();\n\n }\n", "file_path": "clients/native/src/commands/init.rs", "rank": 74, "score": 60506.62655052862 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n println!(\"Initialising client...\");\n\n\n\n let id = matches.value_of(\"id\").unwrap(); // required for now\n\n let provider_address = matches.value_of(\"provider\").unwrap();\n\n\n\n if Config::default_config_file_path(id).exists() {\n\n eprintln!(\"Socks5 client \\\"{}\\\" was already initialised before! If you wanted to upgrade your client to most recent version, try `upgrade` command instead!\", id);\n\n process::exit(1);\n\n }\n\n\n\n let mut config = Config::new(id, provider_address);\n\n\n\n let mut rng = OsRng;\n\n\n\n // TODO: ideally that should be the last thing that's being done to config.\n\n // However, we are later further overriding it with gateway id\n\n config = override_config(config, matches);\n\n if matches.is_present(\"fastmode\") {\n\n config.get_base_mut().set_high_default_traffic_volume();\n", "file_path": "clients/socks5/src/commands/init.rs", "rank": 75, "score": 60506.62655052862 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n let mut config =\n\n Config::load_from_file(matches.value_of(\"config\").map(|path| path.into()), Some(id))\n\n .expect(\"Failed to load config file\");\n\n\n\n config = override_config(config, matches);\n\n\n\n NymClient::new(config).run_forever();\n\n}\n", "file_path": "clients/native/src/commands/run.rs", "rank": 76, "score": 60506.62655052862 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let current = Version::parse(env!(\"CARGO_PKG_VERSION\")).unwrap();\n\n\n\n // technically this is not a correct way of checking it as a released version might contain valid build identifiers\n\n // however, we are not using them ourselves at the moment and hence it should be fine.\n\n // if we change our mind, we could easily tweak this code\n\n if current.is_prerelease() || !current.build.is_empty() {\n\n eprintln!(\n\n \"Trying to upgrade to a non-released version {}. This is not supported!\",\n\n current\n\n );\n\n process::exit(1)\n\n }\n\n\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n let mut existing_config = Config::load_from_file(None, Some(id)).unwrap_or_else(|err| {\n\n eprintln!(\"failed to load existing config file! - {:?}\", err);\n\n process::exit(1)\n\n });\n", "file_path": "clients/socks5/src/commands/upgrade.rs", "rank": 77, "score": 60506.62655052862 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let current = Version::parse(env!(\"CARGO_PKG_VERSION\")).unwrap();\n\n\n\n // technically this is not a correct way of checking it as a released version might contain valid build identifiers\n\n // however, we are not using them ourselves at the moment and hence it should be fine.\n\n // if we change our mind, we could easily tweak this code\n\n if current.is_prerelease() || !current.build.is_empty() {\n\n eprintln!(\n\n \"Trying to upgrade to a non-released version {}. This is not supported!\",\n\n current\n\n );\n\n process::exit(1)\n\n }\n\n\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n let mut existing_config = Config::load_from_file(None, Some(id)).unwrap_or_else(|err| {\n\n eprintln!(\"failed to load existing config file! - {:?}\", err);\n\n process::exit(1)\n\n });\n", "file_path": "clients/native/src/commands/upgrade.rs", "rank": 78, "score": 60506.62655052862 }, { "content": "pub fn execute(matches: &ArgMatches) {\n\n let id = matches.value_of(\"id\").unwrap();\n\n\n\n let mut config =\n\n Config::load_from_file(matches.value_of(\"config\").map(|path| path.into()), Some(id))\n\n .expect(\"Failed to load config file\");\n\n\n\n config = override_config(config, matches);\n\n\n\n NymClient::new(config).run_forever();\n\n}\n", "file_path": "clients/socks5/src/commands/run.rs", "rank": 79, "score": 60506.62655052862 }, { "content": "/// Recompute shared key using remote public key and local private key.\n\npub fn recompute_shared_key<C, D>(\n\n remote_key: &encryption::PublicKey,\n\n local_key: &encryption::PrivateKey,\n\n) -> Key<C>\n\nwhere\n\n C: SyncStreamCipher + NewStreamCipher,\n\n D: Update + BlockInput + FixedOutput + Reset + Default + Clone,\n\n D::BlockSize: ArrayLength<u8>,\n\n D::OutputSize: ArrayLength<u8>,\n\n{\n\n let dh_result = local_key.diffie_hellman(remote_key);\n\n\n\n // there is no reason for this to fail as our okm is expected to be only C::KeySize bytes\n\n let okm = hkdf::extract_then_expand::<D>(None, &dh_result, None, C::KeySize::to_usize())\n\n .expect(\"somehow too long okm was provided\");\n\n\n\n Key::<C>::from_exact_iter(okm).expect(\"okm was expanded to incorrect length!\")\n\n}\n", "file_path": "common/crypto/src/shared_key.rs", "rank": 80, "score": 59502.662784752116 }, { "content": "/// Helper function used to determine if given message represents a loop cover message.\n\n// It kinda seems like there must exist \"prefix\" or \"starts_with\" method for bytes\n\n// or something, but I couldn't find anything\n\npub fn is_cover(data: &[u8]) -> bool {\n\n if data.len() < LOOP_COVER_MESSAGE_PAYLOAD.len() {\n\n return false;\n\n }\n\n\n\n for i in 0..LOOP_COVER_MESSAGE_PAYLOAD.len() {\n\n if data[i] != LOOP_COVER_MESSAGE_PAYLOAD[i] {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn is_cover_works_for_identical_input() {\n", "file_path": "common/nymsphinx/cover/src/lib.rs", "rank": 81, "score": 58865.076691731665 }, { "content": "pub fn command_args<'a, 'b>() -> App<'a, 'b> {\n\n App::new(\"run\")\n\n .about(\"Starts the mixnode\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-mixnode we want to run\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n // the rest of arguments are optional, they are used to override settings in config file\n\n .arg(\n\n Arg::with_name(\"location\")\n\n .long(\"location\")\n\n .help(\"Optional geographical location of this node\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .long(\"config\")\n", "file_path": "mixnode/src/commands/run.rs", "rank": 82, "score": 58417.00927752523 }, { "content": "pub fn command_args<'a, 'b>() -> App<'a, 'b> {\n\n App::new(\"upgrade\").about(\"Try to upgrade the gateway\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-gateway we want to upgrade\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n // the rest of arguments depend on the upgrade path\n\n .arg(Arg::with_name(\"current version\")\n\n .long(\"current-version\")\n\n .help(\"REQUIRED FOR PRE-0.9.0 UPGRADES. Self provided version of the nym-gateway if none is available in the config. NOTE: if provided incorrectly, results may be catastrophic.\")\n\n .takes_value(true)\n\n )\n\n}\n\n\n", "file_path": "gateway/src/commands/upgrade.rs", "rank": 83, "score": 58417.00927752523 }, { "content": "pub fn command_args<'a, 'b>() -> App<'a, 'b> {\n\n App::new(\"upgrade\").about(\"Try to upgrade the mixnode\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-mixnode we want to upgrade\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n // the rest of arguments depend on the upgrade path\n\n .arg(Arg::with_name(\"current version\")\n\n .long(\"current-version\")\n\n .help(\"REQUIRED FOR PRE-0.9.0 UPGRADES. Self provided version of the nym-mixnode if none is available in the config. NOTE: if provided incorrectly, results may be catastrophic.\")\n\n .takes_value(true)\n\n )\n\n}\n\n\n", "file_path": "mixnode/src/commands/upgrade.rs", "rank": 84, "score": 58417.00927752523 }, { "content": "fn select_gateway(arg: Option<&str>) -> &str {\n\n if let Some(gateway_id) = arg {\n\n gateway_id\n\n } else {\n\n // TODO1: this should only be done on testnet\n\n // TODO2: it should probably check if chosen gateway is actually online\n\n GOOD_GATEWAYS.choose(&mut rand::thread_rng()).unwrap()\n\n }\n\n}\n\n\n", "file_path": "clients/socks5/src/commands/init.rs", "rank": 85, "score": 57851.75811174391 }, { "content": "fn select_gateway(arg: Option<&str>) -> &str {\n\n if let Some(gateway_id) = arg {\n\n gateway_id\n\n } else {\n\n // TODO1: this should only be done on testnet\n\n // TODO2: it should probably check if chosen gateway is actually online\n\n GOOD_GATEWAYS.choose(&mut rand::thread_rng()).unwrap()\n\n }\n\n}\n\n\n", "file_path": "clients/native/src/commands/init.rs", "rank": 86, "score": 57851.75811174391 }, { "content": "fn pre_090_upgrade(from: &str, config: Config) -> Config {\n\n // note: current is guaranteed to not have any `build` information suffix (nor pre-release\n\n // information), as this was asserted at the beginning of this command)\n\n //\n\n // upgrade to current (if it's a 0.9.X) or try to upgrade to 0.9.0 as an intermediate\n\n // step in future upgrades (so, for example, we might go 0.8.0 -> 0.9.0 -> 0.10.0)\n\n // this way we don't need to have all the crazy paths on how to upgrade from any version to any\n\n // other version. We just upgrade one minor version at a time.\n\n let current = Version::parse(env!(\"CARGO_PKG_VERSION\")).unwrap();\n\n let to_version = if current.major == 0 && current.minor == 9 {\n\n current\n\n } else {\n\n Version::new(0, 9, 0)\n\n };\n\n\n\n print_start_upgrade(&from, &to_version);\n\n\n\n // this is not extracted to separate function as you only have to manually pass version\n\n // if upgrading from pre090 version\n\n let from = match from.strip_prefix(\"v\") {\n", "file_path": "mixnode/src/commands/upgrade.rs", "rank": 87, "score": 57451.147505398905 }, { "content": "fn pre_090_upgrade(from: &str, config: Config) -> Config {\n\n // this is not extracted to separate function as you only have to manually pass version\n\n // if upgrading from pre090 version\n\n let from = match from.strip_prefix(\"v\") {\n\n Some(stripped) => stripped,\n\n None => from,\n\n };\n\n\n\n let from = match from.strip_prefix(\"V\") {\n\n Some(stripped) => stripped,\n\n None => from,\n\n };\n\n\n\n let from_version = parse_version(from).expect(\"invalid version provided!\");\n\n if from_version.major == 0 && from_version.minor < 8 {\n\n // technically this could be implemented, but is there any point in that?\n\n eprintln!(\"upgrading node from before v0.8.0 is not supported. Please run `init` with new binary instead\");\n\n process::exit(1)\n\n }\n\n\n", "file_path": "gateway/src/commands/upgrade.rs", "rank": 88, "score": 57451.147505398905 }, { "content": "pub fn command_args<'a, 'b>() -> App<'a, 'b> {\n\n App::new(\"upgrade\").about(\"Try to upgrade the mixnode\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-client we want to upgrade\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n // the rest of arguments depend on the upgrade path\n\n .arg(Arg::with_name(\"current version\")\n\n .long(\"current-version\")\n\n .help(\"REQUIRED FOR PRE-0.9.0 UPGRADES. Self provided version of the nym-client if none is available in the config. NOTE: if provided incorrectly, results may be catastrophic.\")\n\n .takes_value(true)\n\n )\n\n}\n\n\n", "file_path": "clients/native/src/commands/upgrade.rs", "rank": 89, "score": 57294.15585740059 }, { "content": "pub fn command_args<'a, 'b>() -> App<'a, 'b> {\n\n App::new(\"upgrade\").about(\"Try to upgrade the mixnode\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-socks5-client we want to upgrade\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n // the rest of arguments depend on the upgrade path\n\n .arg(Arg::with_name(\"current version\")\n\n .long(\"current-version\")\n\n .help(\"REQUIRED FOR PRE-0.9.0 UPGRADES. Self provided version of the nym-socks5-client if none is available in the config. NOTE: if provided incorrectly, results may be catastrophic.\")\n\n .takes_value(true)\n\n )\n\n}\n\n\n", "file_path": "clients/socks5/src/commands/upgrade.rs", "rank": 90, "score": 57294.15585740059 }, { "content": "pub fn zero_iv<C>() -> IV<C>\n\nwhere\n\n C: NewStreamCipher,\n\n{\n\n GenericArray::default()\n\n}\n\n\n", "file_path": "common/crypto/src/symmetric/stream_cipher.rs", "rank": 91, "score": 56893.4440009196 }, { "content": "pub fn missing_string_value<T: From<String>>() -> T {\n\n MISSING_VALUE.to_string().into()\n\n}\n\n\n\nimpl Config {\n\n pub fn new<S: Into<String>>(id: S) -> Self {\n\n Config::default().with_id(id)\n\n }\n\n\n\n // builder methods\n\n pub fn with_id<S: Into<String>>(mut self, id: S) -> Self {\n\n let id = id.into();\n\n if self\n\n .mixnode\n\n .private_identity_key_file\n\n .as_os_str()\n\n .is_empty()\n\n {\n\n self.mixnode.private_identity_key_file =\n\n self::MixNode::default_private_identity_key_file(&id);\n", "file_path": "mixnode/src/config/mod.rs", "rank": 92, "score": 56442.25281951728 }, { "content": "pub fn command_args<'a, 'b>() -> clap::App<'a, 'b> {\n\n App::new(\"run\")\n\n .about(\"Starts the gateway\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the gateway we want to run\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n // the rest of arguments are optional, they are used to override settings in config file\n\n .arg(\n\n Arg::with_name(\"location\")\n\n .long(\"location\")\n\n .help(\"Optional geographical location of this gateway\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .long(\"config\")\n", "file_path": "gateway/src/commands/run.rs", "rank": 93, "score": 56226.90258748048 }, { "content": "pub fn command_args<'a, 'b>() -> clap::App<'a, 'b> {\n\n App::new(\"init\")\n\n .about(\"Initialise the gateway\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the gateway we want to create config for.\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"location\")\n\n .long(\"location\")\n\n .help(\"Optional geographical location of this provider\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"mix-host\")\n\n .long(\"mix-host\")\n\n .help(\"The custom host on which the gateway will be running for receiving sphinx packets\")\n", "file_path": "gateway/src/commands/init.rs", "rank": 94, "score": 56226.90258748048 }, { "content": "pub fn command_args<'a, 'b>() -> clap::App<'a, 'b> {\n\n App::new(\"init\")\n\n .about(\"Initialise the mixnode\")\n\n .arg(\n\n Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-mixnode we want to create config for.\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"location\")\n\n .long(\"location\")\n\n .help(\"Optional geographical location of this node\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"layer\")\n\n .long(\"layer\")\n\n .help(\"The mixnet layer of this particular node\")\n", "file_path": "mixnode/src/commands/init.rs", "rank": 95, "score": 56226.90258748048 }, { "content": "fn load_sphinx_keys(pathfinder: &GatewayPathfinder) -> encryption::KeyPair {\n\n let sphinx_keypair: encryption::KeyPair = pemstore::load_keypair(&pemstore::KeyPairPath::new(\n\n pathfinder.private_encryption_key().to_owned(),\n\n pathfinder.public_encryption_key().to_owned(),\n\n ))\n\n .expect(\"Failed to read stored sphinx key files\");\n\n println!(\n\n \"Public sphinx key: {}\\n\",\n\n sphinx_keypair.public_key().to_base58_string()\n\n );\n\n sphinx_keypair\n\n}\n\n\n", "file_path": "gateway/src/commands/run.rs", "rank": 96, "score": 55981.99972485143 }, { "content": "fn load_identity_keys(pathfinder: &GatewayPathfinder) -> identity::KeyPair {\n\n let identity_keypair: identity::KeyPair = pemstore::load_keypair(&pemstore::KeyPairPath::new(\n\n pathfinder.private_identity_key().to_owned(),\n\n pathfinder.public_identity_key().to_owned(),\n\n ))\n\n .expect(\"Failed to read stored identity key files\");\n\n println!(\n\n \"Public identity key: {}\\n\",\n\n identity_keypair.public_key().to_base58_string()\n\n );\n\n identity_keypair\n\n}\n\n\n", "file_path": "gateway/src/commands/run.rs", "rank": 97, "score": 55981.99972485143 }, { "content": "fn read_pem_file(filepath: &Path) -> io::Result<Pem> {\n\n let mut pem_bytes = File::open(filepath)?;\n\n let mut buf = Vec::new();\n\n pem_bytes.read_to_end(&mut buf)?;\n\n pem::parse(&buf).map_err(|e| io::Error::new(io::ErrorKind::Other, e))\n\n}\n\n\n", "file_path": "common/pemstore/src/lib.rs", "rank": 98, "score": 55483.938708692964 }, { "content": "pub fn command_args<'a, 'b>() -> clap::App<'a, 'b> {\n\n App::new(\"init\")\n\n .about(\"Initialise a Nym client. Do this first!\")\n\n .arg(Arg::with_name(\"id\")\n\n .long(\"id\")\n\n .help(\"Id of the nym-mixnet-client we want to create config for.\")\n\n .takes_value(true)\n\n .required(true)\n\n )\n\n .arg(Arg::with_name(\"gateway\")\n\n .long(\"gateway\")\n\n .help(\"Id of the gateway we are going to connect to.\")\n\n .takes_value(true)\n\n )\n\n .arg(Arg::with_name(\"directory\")\n\n .long(\"directory\")\n\n .help(\"Address of the directory server the client is getting topology from\")\n\n .takes_value(true),\n\n )\n\n .arg(Arg::with_name(\"disable-socket\")\n", "file_path": "clients/native/src/commands/init.rs", "rank": 99, "score": 55163.314257010126 } ]
Rust
src/lib.rs
aesteve/vertx-eventbus-client-rs
2a8391d48f177d8ec9daaf9e2e973cb9d3502634
use std::io; pub mod listener; pub mod message; pub mod publisher; mod utils; use crate::listener::EventBusListener; use crate::publisher::EventBusPublisher; use std::net::{TcpStream, ToSocketAddrs}; pub fn eventbus<A: ToSocketAddrs>(address: A) -> io::Result<(EventBusPublisher, EventBusListener)> { let socket = TcpStream::connect(&address)?; socket.set_nonblocking(true)?; let control_socket = socket .try_clone()?; let w_socket = socket .try_clone()?; Ok(( EventBusPublisher::new(w_socket)?, EventBusListener::new(control_socket)?, )) } #[cfg(test)] mod tests { use crate::eventbus; use crate::message::{Message, SendMessage}; use serde_json::json; use testcontainers::images::generic::{GenericImage, WaitFor}; use testcontainers::*; fn mock_eventbus_server() -> GenericImage { GenericImage::new("aesteve/tests:mock-eventbus-server") .with_wait_for(WaitFor::message_on_stdout("TCP bridge connected")) } #[test] fn test_ping() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, _) = eventbus(addr).expect("Event bus creation must not fail"); publisher .ping() .expect("Should be able to send ping to the server"); } #[test] fn consumer_test() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (_, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let mut consumer = listener.consumer("out-address".to_string()).unwrap(); let mut received_msgs = Vec::new(); while received_msgs.len() < 3 { if let Some(Ok(msg)) = consumer.next() { assert!(received_msgs .iter() .find(|m: &&Message| m.body == msg.body) .is_none()); received_msgs.push(msg); } } listener .unregister_consumer("out-address".to_string()) .expect("Unregistering consumer must not fail"); } #[test] fn send_reply_pattern() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let reply_address = "the-reply-address"; let mut consumer = listener.consumer(reply_address.to_string()).unwrap(); let payload = json!({"test": "value"}); let expected_payload = payload.clone(); publisher .send(SendMessage { address: "echo-address".to_string(), reply_address: Some(reply_address.to_string()), body: Some(payload), headers: None, }) .expect("Sending a message to the event bus must work fine"); let mut received_msgs = 0; while received_msgs == 0 { if let Some(Ok(msg)) = consumer.next() { assert_eq!(reply_address, msg.address); assert_eq!( expected_payload, msg.body.expect("Body should be extracted") ); received_msgs += 1; } } } #[test] fn pub_sub_pattern() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, _) = eventbus(addr).expect("Event bus creation must not fail"); let payload = json!({"test": "value"}); publisher .publish(Message { address: "in-address".to_string(), body: Some(payload), headers: None, }) .expect("Publishing a message to the event bus must work fine"); } #[test] fn test_errors() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let payload = json!({"test": "value"}); publisher .send(SendMessage { address: "error-address".to_string(), reply_address: Some("the-reply-address".to_string()), body: Some(payload), headers: None, }) .expect("Publishing a message to the event bus must work fine"); let mut errors_received = 0; let mut errors = listener.errors().expect("Can listen to errors"); while errors_received == 0 { if let Some(Ok(error_msg)) = errors.next() { assert_eq!(error_msg.message, "FORBIDDEN".to_string(),); errors_received += 1; } } } #[test] fn connect_to_an_unexisting_address_should_fail() { let eb = eventbus("127.0.0.1::1111"); assert!(eb.is_err()); } #[test] fn should_be_notified_of_errors() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (_, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let mut error_listener = listener .errors() .expect("Can ask for an iterator over error messages"); listener .consumer("something_we_dont_have_access_to".to_string()) .expect("Can subscribe to any address"); let mut errors_received = 0; while errors_received < 1 { if let Some(Ok(error_msg)) = error_listener.next() { errors_received += 1; assert!(error_msg.message.contains("denied")) } } } }
use std::io; pub mod listener; pub mod message; pub mod publisher; mod utils; use crate::listener::EventBusListener; use crate::publisher::EventBusPublisher; use std::net::{TcpStream, ToSocketAddrs}; pub fn eventbus<A: ToSocketAddrs>(address: A) -> io::Result<(EventBusPublisher, EventBusListener)> { let socket = TcpStream::connect(&address)?; socket.set_nonblocking(true)?; let control_socket = socket .try_clone()?; let w_socket = socket .try_clone()?; Ok(( EventBusPublisher::new(w_socket)?, EventBusListener::new(control_socket)?, )) } #[cfg(test)] mod tests { use crate::eventbus; use crate::message::{Message, SendMessage}; use serde_json::json; use testcontainers::images::generic::{GenericImage, WaitFor}; use testcontainers::*; fn mock_eventbus_server() -> GenericImage { GenericImage::new("aesteve/tests:mock-eventbus-server") .with_wait_for(WaitFor::message_on_stdout("TCP bridge connected")) } #[test] fn test_ping() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, _) = eventbus(addr).expect("Event bus creation must not fail"); publisher .ping() .expect("Should be able to send ping to the server"); } #[test] fn consumer_test() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (_, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let mut consumer = listener.consumer("out-address".to_string()).unwrap(); let mut received_msgs = Vec::new(); while received_msgs.len() < 3 { if let Some(Ok(msg)) = consumer.next() { assert!(received_msgs .iter() .find(|m: &&Message| m.body == msg.body) .is_none()); received_msgs.push(msg); } } listener .unregister_consumer("out-address".to_string()) .expect("Unregistering consumer must not fail"); } #[test] fn send_reply_pattern() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let reply_address = "the-reply-address"; let mut consumer = listener.consumer(reply_address.to_string()).unwrap(); let payload = json!({"test": "value"}); let expected_payload = payload.clone(); publisher .send(SendMessage { address: "echo-address".to_string(), reply_address: Some(reply_address.to_string()), body: Some(payload), headers: None, }) .expect("Sending a message to the event bus must work fine"); let mut received_msgs = 0; while received_msgs == 0 { if let Some(Ok(msg)) = consumer.next() { assert_eq!(reply_address, msg.address); assert_eq!( expected_payload, msg.body.expect("Body should be extracted") ); received_msgs += 1; } } } #[test] fn pub_sub_pattern() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, _) = eventbus(addr).expect("Event bus creation must not fail"); let payload = json!({"test": "value"}); publisher .publish(Message { address: "in-address".to_string(), body: Some(payload), headers: None, }) .expect("Publishing a message to the event bus must work fine"); } #[test] fn test_errors() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (mut publisher, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let payload = json!({"test": "value"}); publisher .send(SendMessage { address: "error-address".to_string(), reply_address: Some("the-reply-address".to_string()), body: Some(payload), headers: None, }) .expect("Publishing a message to the event bus must work fine"); let mut errors_received = 0; let mut errors = listener.errors().expect("Can listen to errors"); while errors_received == 0 { if let Some(Ok(error_msg)) = errors.next() { assert_eq!(error_msg.message, "FORBIDDEN".to_string(),); errors_received += 1; } } } #[test] fn connect_to_an_unexisting_address_should_fail() { let eb = eventbus("127.0.0.1::1111"); assert!(eb.is_err()); } #[test] fn should_be_notified_of_errors() { let docker = clients::Cli::default(); let node = docker.run(mock_eventbus_server()); let host_port = node .get_host_port(7542) .expect("Mock event bus server implementation needs to be up before running tests"); let addr = format!("localhost:{}", host_port); println!("Mock server running on {}", addr); let (_, mut listener) = eventbus(addr).expect("Event bus creation must not fail"); let mut error_listener = listener .errors() .expect("Can ask for an iterator over error messages"); listener .consumer("something_we_dont_have_access_to".to_string()) .expect("Can subscribe to any address"); let mut errors_received = 0; while errors_received < 1 {
} } }
if let Some(Ok(error_msg)) = error_listener.next() { errors_received += 1; assert!(error_msg.message.contains("denied")) }
if_condition
[ { "content": "type ErrorNotifier = Mutex<Option<Sender<UserMessage<ErrorMessage>>>>;\n\n\n\npub struct EventBusListener {\n\n socket: TcpStream,\n\n handlers: MessageHandlersByAddress,\n\n error_handler: Arc<ErrorNotifier>,\n\n}\n\n\n\nimpl EventBusListener {\n\n pub fn new(socket: TcpStream) -> io::Result<Self> {\n\n let msg_dispatcher = socket.try_clone()?;\n\n let error_notifier = Arc::new(Mutex::new(None));\n\n let notifier = error_notifier.clone();\n\n let listener = EventBusListener {\n\n socket,\n\n handlers: Arc::new(Mutex::new(HashMap::new())),\n\n error_handler: error_notifier,\n\n };\n\n let consumers = listener.handlers.clone();\n\n\n", "file_path": "src/listener.rs", "rank": 1, "score": 47266.94243361885 }, { "content": "fn reader_loop(\n\n read_stream: TcpStream,\n\n handlers: MessageHandlersByAddress,\n\n error_notifier: &ErrorNotifier,\n\n) {\n\n let mut socket = buffered_reader::Generic::new(&read_stream, Some(4096));\n\n loop {\n\n // first, read the 4 bytes indicating message length into `len`\n\n match socket.read_be_u32() {\n\n Ok(len) =>\n\n // then consume `len` bytes of data => it's a whole message\n\n if let Ok(bytes_read) = socket.data_consume(len as usize) {\n\n forward_json(&bytes_read[..len as usize], &handlers, error_notifier);\n\n },\n\n Err(e) => match e.kind() {\n\n std::io::ErrorKind::WouldBlock => {}, // transient failure, not to be propagated to the end-user\n\n kind =>\n\n for (_, handler) in handlers.lock().expect(\"Could retrieve message handlers to notify of an I/O error\").iter() {\n\n if handler.send(Err(kind)).is_err() {}\n\n }\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/listener.rs", "rank": 2, "score": 43008.469907302744 }, { "content": "fn forward_json(\n\n bytes_read: &[u8],\n\n handlers: &MessageHandlersByAddress,\n\n error_notifier: &ErrorNotifier,\n\n) {\n\n // event bus protocol is JSON encoded\n\n if let Ok(json) = std::str::from_utf8(bytes_read) {\n\n match serde_json::from_str::<InMessage>(&json) {\n\n Ok(in_msg) => {\n\n if let InMessage::Message(msg) = in_msg {\n\n if let Some(handler) = handlers\n\n .lock()\n\n .expect(\"Could not retrieve message handler for this address\")\n\n .get(msg.address.as_str())\n\n {\n\n handler\n\n .send(Ok(msg))\n\n .expect(\"Could not notify a new message has been received\");\n\n }\n\n } else if let InMessage::Err(err) = in_msg {\n", "file_path": "src/listener.rs", "rank": 3, "score": 43008.469907302744 }, { "content": "type MessageHandlersByAddress = Arc<Mutex<HashMap<String, Sender<UserMessage<Message>>>>>;\n", "file_path": "src/listener.rs", "rank": 4, "score": 40524.35214230888 }, { "content": "use crate::message::OutMessage;\n\nuse byteorder::{BigEndian, ByteOrder};\n\nuse std::io;\n\nuse std::io::Write;\n\nuse std::net::TcpStream;\n\n\n\npub(crate) fn write_msg(mut socket: &TcpStream, msg: &OutMessage) -> io::Result<()> {\n\n let mut buf = [0u8; 4];\n\n let msg_str = serde_json::to_string(msg)?;\n\n BigEndian::write_u32(&mut buf, msg_str.len() as u32);\n\n buf.to_vec().extend(msg_str.as_bytes());\n\n let a = [&buf[..], &msg_str.as_bytes()[..]].concat();\n\n socket.write_all(&a)?; // write message length as Big Endian then msg\n\n socket.flush()\n\n}\n", "file_path": "src/utils.rs", "rank": 5, "score": 19824.752498157555 }, { "content": " pub fn send(&mut self, msg: SendMessage) -> io::Result<&mut Self> {\n\n write_msg(&self.socket, &OutMessage::Send(msg)).map(|_| self)\n\n }\n\n\n\n pub fn publish(&mut self, msg: Message) -> io::Result<&mut Self> {\n\n write_msg(&self.socket, &OutMessage::Publish(msg)).map(|_| self)\n\n }\n\n\n\n pub fn ping(&mut self) -> io::Result<&mut Self> {\n\n write_msg(&self.socket, &OutMessage::Ping).map(|_| self)\n\n }\n\n\n\n fn send_heartbeat_periodically(&mut self, rx: Receiver<()>) -> io::Result<()> {\n\n let heartbeat_socket = self.socket.try_clone()?;\n\n thread::spawn(move || loop {\n\n if write_msg(&heartbeat_socket, &OutMessage::Ping).is_err() {\n\n println!(\"Could not send periodic heartbeat to TCP server\")\n\n }\n\n if rx.try_recv().ok().is_none() {\n\n thread::sleep(Duration::from_secs(10));\n", "file_path": "src/publisher.rs", "rank": 6, "score": 19179.994239254494 }, { "content": "use crate::message::{Message, OutMessage, SendMessage};\n\nuse crate::utils::write_msg;\n\nuse std::net::TcpStream;\n\nuse std::sync::mpsc::{channel, Receiver, Sender};\n\nuse std::time::Duration;\n\nuse std::{io, thread};\n\n\n\npub struct EventBusPublisher {\n\n socket: TcpStream,\n\n tx: Sender<()>,\n\n}\n\n\n\nimpl EventBusPublisher {\n\n pub fn new(socket: TcpStream) -> io::Result<Self> {\n\n let (tx, rx) = channel::<()>();\n\n let mut created = EventBusPublisher { socket, tx };\n\n created.send_heartbeat_periodically(rx)?;\n\n Ok(created)\n\n }\n\n\n", "file_path": "src/publisher.rs", "rank": 7, "score": 19179.731981546884 }, { "content": " } else {\n\n break;\n\n }\n\n });\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Drop for EventBusPublisher {\n\n fn drop(&mut self) {\n\n if self.tx.send(()).is_err() {}\n\n }\n\n}\n", "file_path": "src/publisher.rs", "rank": 8, "score": 19173.331084231493 }, { "content": "pub struct SendMessage {\n\n pub address: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub reply_address: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub body: Option<Value>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub headers: Option<HashMap<String, String>>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct RegisterMessage {\n\n pub address: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::message::{InMessage, Message, OutMessage, SendMessage};\n\n use serde_json::json;\n\n\n", "file_path": "src/message.rs", "rank": 9, "score": 18241.177517506923 }, { "content": " Send(SendMessage),\n\n Publish(Message),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Message {\n\n pub address: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub body: Option<Value>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub headers: Option<HashMap<String, String>>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct ErrorMessage {\n\n pub message: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/message.rs", "rank": 10, "score": 18238.254354954803 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse std::collections::HashMap;\n\nuse std::io::ErrorKind;\n\nuse std::sync::mpsc::Receiver;\n\n\n\npub type UserMessage<T> = Result<T, ErrorKind>;\n\n\n\npub struct MessageConsumer<T> {\n\n pub msg_queue: Receiver<UserMessage<T>>,\n\n}\n\n\n\nimpl<T> Iterator for MessageConsumer<T> {\n\n type Item = UserMessage<T>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.msg_queue.try_recv().ok()\n\n }\n\n}\n\n\n", "file_path": "src/message.rs", "rank": 11, "score": 18237.873259511412 }, { "content": " other => panic!(format!(\"Expecting a message, not {:?}\", other)),\n\n };\n\n }\n\n\n\n #[test]\n\n fn marshall_messages() {\n\n let msg: String = serde_json::to_string(&OutMessage::Ping).unwrap();\n\n assert_eq!(JSON_PING, msg);\n\n\n\n let msg: String = serde_json::to_string(&OutMessage::Send(SendMessage {\n\n address: \"the-address\".to_string(),\n\n body: Some(json!({})),\n\n reply_address: Some(\"the-reply-address\".to_string()),\n\n headers: None,\n\n }))\n\n .unwrap();\n\n assert_eq!(JSON_SEND, msg);\n\n }\n\n}\n", "file_path": "src/message.rs", "rank": 12, "score": 18236.805957506363 }, { "content": " const JSON_PING: &str = r#\"{\"type\":\"ping\"}\"#;\n\n const JSON_PONG: &str = r#\"{\"type\":\"pong\"}\"#;\n\n\n\n const JSON_SEND: &str =\n\n r#\"{\"type\":\"send\",\"address\":\"the-address\",\"replyAddress\":\"the-reply-address\",\"body\":{}}\"#;\n\n const JSON_RECEIVED: &str = r#\"{\"type\":\"message\",\"address\":\"the-address\",\"body\":{}}\"#;\n\n\n\n #[test]\n\n fn unmarshall_messages() {\n\n assert_eq!(InMessage::Pong, serde_json::from_str(JSON_PONG).unwrap());\n\n\n\n match serde_json::from_str(JSON_RECEIVED).unwrap() {\n\n InMessage::Message(msg) => assert_eq!(\n\n Message {\n\n address: \"the-address\".to_string(),\n\n body: Some(json!({})),\n\n headers: None\n\n },\n\n msg\n\n ),\n", "file_path": "src/message.rs", "rank": 13, "score": 18236.774164154638 }, { "content": "#[derive(Debug, Deserialize, PartialEq)]\n\n#[serde(rename_all(serialize = \"lowercase\", deserialize = \"lowercase\"))]\n\n#[serde(tag = \"type\")]\n\npub enum InMessage {\n\n Pong,\n\n // user, incoming messages\n\n Err(ErrorMessage),\n\n Message(Message),\n\n}\n\n\n\n#[derive(Debug, Serialize, PartialEq)]\n\n#[serde(rename_all(serialize = \"lowercase\", deserialize = \"lowercase\"))]\n\n#[serde(tag = \"type\")]\n\npub enum OutMessage {\n\n // internal, control\n\n Ping, // outgoing\n\n // internal, primitives associated to user actions, outgoing\n\n Register(RegisterMessage),\n\n Unregister(RegisterMessage),\n\n // user, outgoing message\n", "file_path": "src/message.rs", "rank": 14, "score": 18229.94695542559 }, { "content": " pub fn unregister_consumer(&mut self, address: String) -> io::Result<&mut Self> {\n\n self.handlers\n\n .lock()\n\n .expect(\"Could not add the callback to the list of consumers\")\n\n .remove(address.as_str());\n\n write_msg(\n\n &self.socket,\n\n &OutMessage::Unregister(RegisterMessage { address }),\n\n )\n\n .map(|_| self)\n\n }\n\n\n\n pub fn errors(&mut self) -> io::Result<MessageConsumer<ErrorMessage>> {\n\n let (errors_notifier, errors_receiver) = channel::<UserMessage<ErrorMessage>>();\n\n self.error_handler\n\n .lock()\n\n .expect(\"Could not replace the value of the error notifier\")\n\n .replace(errors_notifier);\n\n Ok(MessageConsumer {\n\n msg_queue: errors_receiver,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/listener.rs", "rank": 15, "score": 17979.577299842953 }, { "content": " thread::spawn(move || {\n\n reader_loop(msg_dispatcher, consumers, notifier.as_ref());\n\n });\n\n Ok(listener)\n\n }\n\n\n\n pub fn consumer(&mut self, address: String) -> io::Result<MessageConsumer<Message>> {\n\n let (tx, rx) = channel::<UserMessage<Message>>();\n\n let handler = MessageConsumer { msg_queue: rx };\n\n self.handlers\n\n .lock()\n\n .expect(\"Could not add the callback to the list of consumers\")\n\n .insert(address.clone(), tx);\n\n write_msg(\n\n &self.socket,\n\n &OutMessage::Register(RegisterMessage { address }),\n\n )?;\n\n Ok(handler)\n\n }\n\n\n", "file_path": "src/listener.rs", "rank": 16, "score": 17977.192727673955 }, { "content": "use crate::message::{\n\n ErrorMessage, InMessage, Message, MessageConsumer, OutMessage, RegisterMessage, UserMessage,\n\n};\n\nuse crate::utils::write_msg;\n\nuse buffered_reader::BufferedReader;\n\nuse std::collections::HashMap;\n\nuse std::net::TcpStream;\n\nuse std::sync::mpsc::channel;\n\nuse std::sync::mpsc::Sender;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::{io, thread};\n\n\n", "file_path": "src/listener.rs", "rank": 17, "score": 17973.21480366979 }, { "content": " let notifier = error_notifier\n\n .lock()\n\n .expect(\"Could not acquire error notifier to propagate an error message\");\n\n if notifier.is_some() {\n\n notifier\n\n .as_ref()\n\n .unwrap()\n\n .send(Ok(err))\n\n .expect(\"Could not notify of an incoming error message\");\n\n }\n\n }\n\n }\n\n Err(err) => {\n\n println!(\n\n \"Invalid JSON received from EventBus: {}. Error: {:?}\",\n\n json, err\n\n );\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/listener.rs", "rank": 18, "score": 17972.946307313785 }, { "content": "[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)\n\n[![Crates.io Version](https://img.shields.io/crates/v/vertx-eventbus-bridge.svg)](https://crates.io/crates/vertx-eventbus-bridge)\n\n[![codecov](https://codecov.io/gh/aesteve/vertx-eventbus-client-rs/branch/master/graph/badge.svg)](https://codecov.io/gh/aesteve/vertx-eventbus-client-rs)\n\n\n\n\n\n\n\n## Vert.x TCP EventBus client for Rust\n\n\n\n\n\nVert.x allows to expose its event-bus through a [TCP socket](https://vertx.io/docs/vertx-tcp-eventbus-bridge/java/).\n\nThis crate offers primitives in the Rust programming language to interact with the event bus over this TCP connection. \n\n\n\nThis allows to send messages from a Rust codebase (embedded sensors for instance) to a running Vert.x instance, or reacting to event bus messages published from the Vert.x application in a Rust codebase.\n\n\n\n\n\n## Early stage\n\n\n\nThis project is still in early development phase, although it's available as a [crate](https://crates.io/crates/vertx-eventbus-bridge) if you need it. \n\nThe design (at the moment: iterators over incoming messages) is highly suggest to change.\n\n\n\nAny proposition or technical comment is highly welcomed in the issues.\n\n\n\n## Testing\n\n\n\nIn order to test the client against a \"real\" TCP Event-Bus bridge (and avoid observator-bias: testing my own understanding of the protocol), [this docker image](https://hub.docker.com/layers/aesteve/tests/mock-eventbus-server/images/sha256-90eff1e74362118b41ec7fc7a22ecd180a6c2c07206083dec9885d6b61edd24c?context=repo) is used, through testcontainers.\n\nThe code for this image can be found [here](https://github.com/aesteve/vertx-eventbus-mock-server), it's a simple Vert.x application listening/publishing to the Event-Bus and exposing it through TCP on port 7542.\n", "file_path": "README.md", "rank": 28, "score": 13.768094414286297 } ]
Rust
pallets/stake-nft/src/mock.rs
SubGame-Network/subgame-network
d9906befc41e972e11fa1a669d47eddb15dabdd8
use crate as pallet_stake_nft; use pallet_timestamp; use balances; use frame_support::parameter_types; use frame_system as system; use pallet_nft; use pallet_lease; use sp_core::H256; use sp_runtime::{ testing::Header, traits::{BlakeTwo256, IdentityLookup}, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>; type Block = frame_system::mocking::MockBlock<Test>; frame_support::construct_runtime!( pub enum Test where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Module, Call, Config, Storage, Event<T>}, SubgameNFT: pallet_nft::{Module, Call, Storage, Event<T>}, SubgameStakeNft: pallet_stake_nft::{Module, Call, Storage, Event<T>}, Lease: pallet_lease::{Module, Call, Storage, Event<T>}, Balances: balances::{Module, Call, Storage, Config<T>, Event<T>}, Timestamp: pallet_timestamp::{Module, Call, Storage, Inherent}, } ); parameter_types! { pub const ExistentialDeposit: u64 = 500; pub const MaxLocks: u32 = 50; } impl balances::Config for Test { type MaxLocks = (); type Balance = u64; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); } parameter_types! { pub const BlockHashCount: u64 = 250; } impl system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); type DbWeight = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = u64; type AccountData = balances::AccountData<u64>; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); type PalletInfo = PalletInfo; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type SS58Prefix = (); } parameter_types! { pub const BridgeOwner: u32 = 3; } parameter_types! { pub const CommodityLimit: u128 = 1000000000000000000000; pub const UserCommodityLimit: u64 = 10000000000000000000; } impl pallet_nft::Config for Test { type CommodityAdmin = frame_system::EnsureRoot<Self::AccountId>; type CommodityLimit = CommodityLimit; type UserCommodityLimit = UserCommodityLimit; type Event = Event; } impl pallet_lease::Config for Test { type Event = Event; type PalletId = u64; type UniqueAssets = SubgameNFT; type OwnerAddress = BridgeOwner; } impl pallet_stake_nft::Config for Test { type ProgramId = u64; type PalletId = u64; type Balances = Balances; type UniqueAssets = SubgameNFT; type Lease = Lease; type OwnerAddress = BridgeOwner; type Event = Event; } impl pallet_timestamp::Config for Test { type Moment = u64; type OnTimestampSet = (); type MinimumPeriod = (); type WeightInfo = (); } pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = system::GenesisConfig::default() .build_storage::<Test>() .unwrap(); balances::GenesisConfig::<Test> { balances: vec![ (1, 1000000), (2, 1000000), (3, 1000000), (4, 1000000), (5, 1000000), ], } .assimilate_storage(&mut t) .unwrap(); let mut ext: sp_io::TestExternalities = t.into(); ext.execute_with(|| System::set_block_number(1)); ext }
use crate as pallet_stake_nft; use pallet_timestamp; use balances; use frame_support::parameter_types; use frame_system as system; use pallet_nft; use pallet_lease; use sp_core::H256; use sp_runtime::{ testing::Header, traits::{BlakeTwo256, IdentityLookup}, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>; type Block = frame_system::mocking::MockBlock<Test>; frame_support::construct_runtime!( pub enum Test where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Module, Call, Config, Storage, Event<T>}, SubgameNFT: pallet_nft::{Module, Call, Storage, Event<T>}, SubgameStakeNft: pallet_stake_nft::{Module, Call, Storage, Event<T>}, Lease: pallet_lease::{Module, Call, Storage, Event<T>}, Balances: balances::{Module, Call, Storage, Config<T>, Event<T>}, Timestamp: pallet_timestamp::{Module, Call, Storage, Inherent}, } ); parameter_types! { pub const ExistentialDeposit: u64 = 500; pub const MaxLocks: u32 = 50; } impl balances::Config for Test { type MaxLocks = (); type Balance = u64; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); } parameter_types! { pub const BlockHashCount: u64 = 250; } impl system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); type DbWeight = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = u64; type AccountData = balances::AccountData<u64>; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); type PalletInfo = PalletInfo; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type SS58Prefix = (); } parameter_types! { pub const BridgeOwner: u32 = 3; } parameter_types! { pub const CommodityLimit: u128 = 1000000000000000000000; pub const UserCommodityLimit: u64 = 10000000000000000000; } impl pallet_nft::Config for Test { type CommodityAdmin = frame_system::EnsureRoot<Self::AccountId>; type CommodityLimit = CommodityLimit; type UserCommodityLimit = UserCommodityLimit; type Event = Event; } impl pallet_lease::Config for Test { type Event = Event; type PalletId = u64; type UniqueAssets = SubgameNFT; type OwnerAddress = BridgeOwner; } impl pallet_stake_nft::Config for Test { type ProgramId = u64; type PalletId = u64; type Balances = Balances; type UniqueAssets = SubgameNFT; type Lease = Lease; type OwnerAddress = BridgeOwner; type Event = Event; } impl pallet_timestamp::Config for Test { type Moment = u64; type OnTimestampSet = (); type MinimumPeriod = (); type WeightInfo = (); } pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = system::GenesisConfig::default() .build_storage::<Test>() .unwrap(); balances::GenesisConfig::<Test> { balances: vec![ (1, 1000000), (2, 100000
0), (3, 1000000), (4, 1000000), (5, 1000000), ], } .assimilate_storage(&mut t) .unwrap(); let mut ext: sp_io::TestExternalities = t.into(); ext.execute_with(|| System::set_block_number(1)); ext }
function_block-function_prefixed
[ { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n Chips: pallet_chips::{Module, Call, Storage, Event<T>},\n\n GameGuessHashModule: pallet_gametemplates_guess_hash::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n\n\n", "file_path": "pallets/gametemplates-guess-hash/src/mock.rs", "rank": 0, "score": 286572.3362243369 }, { "content": "pub trait Config: frame_system::Config + pallet_timestamp::Config {\n\n /// The dispatch origin that is able to mint new instances of this type of commodity.\n\n type OwnerAddress: Get<Self::AccountId>;\n\n /// The data type that is used to describe this type of commodity.\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n\n\n type ProgramId: Member + Parameter + Default + Copy + HasCompact + Ord;\n\n type PalletId: Member + Parameter + Default + Copy + HasCompact + Ord;\n\n type Balances: Currency<Self::AccountId>;\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n}\n\n\n\n/// The runtime system's hashing algorithm is used to uniquely identify commodities.\n\npub type StakeId<T> = <T as frame_system::Config>::Hash;\n\npub type UniqueAssetInfoOf<T> = <<T as Config>::UniqueAssets as UniqueAssets<<T as frame_system::Config>::AccountId>>::AssetInfo;\n\n\n\npub type BalanceOf<T> =\n\n <<T as Config>::Balances as Currency<<T as frame_system::Config>::AccountId>>::Balance;\n\npub type NftId<T> = \n\n<<T as Config>::UniqueAssets as UniqueAssets<<T as frame_system::Config>::AccountId>>::AssetId;\n\npub type PalletId<T> = \n\n <<T as Config>::Lease as Lease<<T as frame_system::Config>::AccountId, NftId<T>>>::PalletId;\n\npub type MomentOf<T> = <T as pallet_timestamp::Config>::Moment;\n\n\n\ndecl_storage! {\n", "file_path": "pallets/stake-nft/src/lib.rs", "rank": 1, "score": 266480.1218109036 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n SubGameStake: pallet_stake::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n\n\n\nimpl system::Config for Test {\n", "file_path": "pallets/stake/src/mock.rs", "rank": 2, "score": 259299.49601752625 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// impl_outer_origin! {\n\n// pub enum Origin for Test where system = frame_system {}\n\n// }\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Commodity: pallet_nft::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n", "file_path": "pallets/nft/src/mock.rs", "rank": 3, "score": 259299.49601752622 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n SubGameAssets: pallet_subgame_assets::{Module, Call, Storage, Event<T>},\n\n Swap: pallet_swap::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n\n\n", "file_path": "pallets/swap/src/mock.rs", "rank": 4, "score": 259299.49601752622 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n Chips: pallet_chips::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n\n\n\nimpl system::Config for Test {\n", "file_path": "pallets/chips/src/mock.rs", "rank": 5, "score": 259299.49601752625 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n GameTemplate: pallet_gametemplate::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n\n\n\nimpl system::Config for Test {\n", "file_path": "pallets/gametemplates/src/mock.rs", "rank": 6, "score": 259299.49601752625 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n SubGameAssets: pallet_subgame_assets::{Module, Call, Storage, Event<T>},\n\n PalletTimestamp: pallet_timestamp::{Module, Call, Storage, Inherent},\n\n TSPWhitelist: pallet_tspwhitelist::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n", "file_path": "pallets/tspwhitelist/src/mock.rs", "rank": 7, "score": 259299.49601752625 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// impl_outer_origin! {\n\n// pub enum Origin for Test where system = frame_system {}\n\n// }\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n SubgameNFT: pallet_nft::{Module, Call, Storage, Event<T>},\n\n Lease: pallet_lease::{Module, Call, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n }\n\n);\n\n\n", "file_path": "pallets/lease/src/mock.rs", "rank": 8, "score": 259299.49601752622 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n SubgameNFT: pallet_nft::{Module, Call, Storage, Event<T>},\n\n SubgameStakeNft: pallet_stake_nft::{Module, Call, Storage, Event<T>},\n\n Lease: pallet_lease::{Module, Call, Storage, Event<T>},\n\n DemoGame: pallet_demogame::{Module, Call, Storage, Event<T>},\n\n Timestamp: pallet_timestamp::{Module, Call, Storage, Inherent},\n\n }\n\n);\n\n\n\nparameter_types! {\n", "file_path": "pallets/demogame/src/mock.rs", "rank": 9, "score": 259299.49601752622 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Module, Call, Config, Storage, Event<T>},\n\n Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},\n\n Chips: pallet_chips::{Module, Call, Storage, Event<T>},\n\n GameGuessHashModule: pallet_gametemplates_guess_hash::{Module, Call, Storage, Event<T>},\n\n GameCenter: pallet_gamecenter::{Module, Call, Storage, Event<T>},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n}\n", "file_path": "pallets/gamecenter/src/mock.rs", "rank": 10, "score": 259299.49601752622 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type GameIndex: Parameter + AtLeast32Bit + Bounded + Default + Copy;\n\n type WeightInfo: WeightInfo;\n\n type Chips: ChipsTrait + ChipsTransfer<Self::AccountId>;\n\n}\n\n\n", "file_path": "pallets/gametemplates-guess-hash/src/lib.rs", "rank": 12, "score": 246198.69316683756 }, { "content": "/// Jump to the specified block\n\nfn run_to_block(n: u64) {\n\n while System::block_number() < n {\n\n GameGuessHashModule::on_finalize(System::block_number());\n\n System::on_finalize(System::block_number());\n\n System::set_block_number(System::block_number() + 1);\n\n System::on_initialize(System::block_number());\n\n GameGuessHashModule::on_initialize(System::block_number());\n\n }\n\n}\n\n\n\n/// 【Scenario】Test the deployment function\n", "file_path": "pallets/gametemplates-guess-hash/src/tests.rs", "rank": 13, "score": 245358.40612380602 }, { "content": "pub trait Config: frame_system::Config + SubGameAssets::Config + PalletTimestamp::Config {\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\ttype WeightInfo: WeightInfo;\n\n\ttype Currency: ReservableCurrency<Self::AccountId>;\n\n\ttype OwnerAddress: Get<Self::AccountId>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "pallets/tspwhitelist/src/lib.rs", "rank": 14, "score": 231730.25444671692 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n // Initial allocation of money\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/gametemplates-guess-hash/src/mock.rs", "rank": 15, "score": 226067.7737292481 }, { "content": "fn run_to_block(n: u64, t: u64) {\n\n\twhile System::block_number() < n {\n\n\t\tTSPWhitelist::on_finalize(System::block_number());\n\n\t\tSystem::on_finalize(System::block_number());\n\n\t\tSystem::set_block_number(System::block_number()+1);\n\n PalletTimestamp::set_timestamp(t);\n\n\t\tSystem::on_initialize(System::block_number());\n\n\t\tTSPWhitelist::on_initialize(System::block_number());\n\n\t}\n\n}\n\n\n", "file_path": "pallets/tspwhitelist/src/tests.rs", "rank": 16, "score": 224673.0530863393 }, { "content": "type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance;\n\n\n\n#[derive(Encode, Decode, Default)]\n\npub struct UserInfo<Account, ReferrerAccount> {\n\n pub account: Account,\n\n pub referrer_account: ReferrerAccount,\n\n}\n\n\n", "file_path": "pallets/stake/src/lib.rs", "rank": 17, "score": 222981.51638280146 }, { "content": "/// Jump to the specified block\n\nfn run_to_block(n: u64, t: u64) {\n\n while System::block_number() < n {\n\n SubgameNFT::on_finalize(System::block_number());\n\n System::on_finalize(System::block_number());\n\n System::set_block_number(System::block_number() + 1);\n\n Timestamp::set_timestamp(t);\n\n System::on_initialize(System::block_number());\n\n SubgameNFT::on_initialize(System::block_number());\n\n }\n\n}\n\n\n\n\n", "file_path": "pallets/stake-nft/src/tests.rs", "rank": 18, "score": 221013.80957191813 }, { "content": "type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance;\n\n\n\nuse sp_std::convert::TryInto;\n\n\n", "file_path": "pallets/subgame-assets/src/lib.rs", "rank": 19, "score": 220508.44350508967 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let (client, backend, keystore_container, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n config\n\n .network\n\n .extra_sets\n\n .push(sc_finality_grandpa::grandpa_peers_set_config());\n\n\n\n let select_chain = sc_consensus::LongestChain::new(backend.clone());\n\n let inherent_data_providers = sp_inherents::InherentDataProviders::new();\n\n\n\n let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n config.transaction_pool.clone(),\n\n config.prometheus_registry(),\n\n task_manager.spawn_handle(),\n\n client.clone(),\n\n on_demand.clone(),\n\n ));\n\n\n", "file_path": "node/src/service.rs", "rank": 20, "score": 219868.57833156857 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let sc_service::PartialComponents {\n\n client,\n\n backend,\n\n mut task_manager,\n\n import_queue,\n\n keystore_container,\n\n select_chain,\n\n transaction_pool,\n\n inherent_data_providers,\n\n other: import_setup,\n\n } = new_partial(&config)?;\n\n\n\n // if let Some(url) = &config.keystore_remote {\n\n // match remote_keystore(url) {\n\n // Ok(k) => keystore_container.set_remote_keystore(k),\n\n // Err(e) => {\n\n // return Err(ServiceError::Other(format!(\n\n // \"Error hooking up remote keystore for {}: {}\",\n\n // url, e\n", "file_path": "node/src/service.rs", "rank": 21, "score": 219868.57833156857 }, { "content": "// jump to block\n\nfn run_to_block(n: u64) {\n\n while System::block_number() < n {\n\n GameGuessHashModule::on_finalize(System::block_number());\n\n GameCenter::on_finalize(System::block_number());\n\n System::on_finalize(System::block_number());\n\n System::set_block_number(System::block_number() + 1);\n\n System::on_initialize(System::block_number());\n\n GameGuessHashModule::on_initialize(System::block_number());\n\n GameCenter::on_initialize(System::block_number());\n\n }\n\n}\n\n\n\n// 【Scenario】test create game func\n", "file_path": "pallets/gamecenter/src/tests.rs", "rank": 22, "score": 216297.8472191092 }, { "content": "fn run_to_block( n: u64) {\n\n\twhile System::block_number() < n {\n\n\t\tSwap::on_finalize(System::block_number());\n\n\t\tSystem::on_finalize(System::block_number());\n\n\t\tSystem::set_block_number(System::block_number()+1);\n\n\t\tSystem::on_initialize(System::block_number());\n\n\t\tSwap::on_initialize(System::block_number());\n\n\t}\n\n}\n\n\n", "file_path": "pallets/swap/src/tests.rs", "rank": 23, "score": 216293.00638922746 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type Balances: Currency<Self::AccountId>;\n\n type OwnerAddress: Get<Self::AccountId>;\n\n type ImportAddress: Get<Self::AccountId>;\n\n type WeightInfo: WeightInfo;\n\n type Currency: Currency<Self::AccountId> + ReservableCurrency<Self::AccountId>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "pallets/stake/src/lib.rs", "rank": 24, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type WeightInfo: WeightInfo;\n\n type GuessHash: GuessHashTrait + GuessHashFunc<Self::AccountId, GameInstanceId, u128>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "pallets/gamecenter/src/lib.rs", "rank": 25, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type Balances: Currency<Self::AccountId>;\n\n /// The address where funds are temporarily deposited\n\n type OwnerAddress: Get<Self::AccountId>;\n\n type WeightInfo: WeightInfo;\n\n\n\n type Assets: AssetsTrait + AssetsTransfer<Self::AccountId, u32>;\n\n}\n\n\n\npub type BalanceOf<T> =\n\n <<T as Config>::Balances as Currency<<T as frame_system::Config>::AccountId>>::Balance;\n\ndecl_storage! {\n", "file_path": "pallets/bridge/src/lib.rs", "rank": 26, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n type PalletId: Get<PalletId<Self>>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "pallets/demogame/src/lib.rs", "rank": 27, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type WeightInfo: WeightInfo;\n\n /// Only the account has the right to manage all game templates\n\n type OwnerAddress: Get<Self::AccountId>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "pallets/gametemplates/src/lib.rs", "rank": 28, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n /// The owner can manage the pallet list and set permissions.\n\n type OwnerAddress: Get<Self::AccountId>;\n\n /// The data type that is used to describe this type of NFT.\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n /// Indicates the id type of the pallet\n\n type PalletId: Member + Parameter + Default + Copy + HasCompact + Ord;\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n}\n\n\n\npub type NftId<T> = \n\n <<T as Config>::UniqueAssets as UniqueAssets<<T as frame_system::Config>::AccountId>>::AssetId;\n\n\n\ndecl_storage! {\n", "file_path": "pallets/lease/src/lib.rs", "rank": 29, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n /// The dispatch origin that is able to mint new instances of this type of commodity.\n\n type CommodityAdmin: EnsureOrigin<Self::Origin>;\n\n /// The maximum number of this type of commodity that may exist (minted - burned).\n\n type CommodityLimit: Get<u128>;\n\n /// The maximum number of this type of commodity that any single account may own.\n\n type UserCommodityLimit: Get<u64>;\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n}\n\n\n\n/// The runtime system's hashing algorithm is used to uniquely identify commodities.\n\npub type CommodityId<T> = <T as frame_system::Config>::Hash;\n\n\n\n/// Associates a commodity with its ID.\n\npub type Commodity<T> = (CommodityId<T>, Vec<u8>);\n\n\n\ndecl_storage! {\n", "file_path": "pallets/nft/src/lib.rs", "rank": 30, "score": 216110.207452029 }, { "content": "pub trait Config: frame_system::Config {\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n type Balances: Currency<Self::AccountId>;\n\n type ChipBalance: Member + Parameter + AtLeast32BitUnsigned + Default + Copy;\n\n /// The address where funds are temporarily deposited\n\n type MasterAddress: Get<Self::AccountId>;\n\n type WeightInfo: WeightInfo;\n\n}\n\n\n\npub type BalanceOf<T> =\n\n <<T as Config>::Balances as Currency<<T as frame_system::Config>::AccountId>>::Balance;\n\ndecl_storage! {\n", "file_path": "pallets/chips/src/lib.rs", "rank": 31, "score": 216110.207452029 }, { "content": "/// The module configuration trait.\n\npub trait Config: frame_system::Config {\n\n\t/// The overarching event type.\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\t\n\n type Balances: Currency<Self::AccountId>;\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n type PalletId: Get<PalletId<Self>>;\n\n\t/// Weight information for extrinsics in this pallet.\n\n\ttype WeightInfo: WeightInfo;\n\n}\n\n\n\n\n\n\n\ndecl_storage! {\n", "file_path": "pallets/nft-exchange/src/lib.rs", "rank": 32, "score": 213520.1286571485 }, { "content": "/// The module configuration trait.\n\npub trait Config: frame_system::Config {\n\n\t/// The overarching event type.\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\t// type MyRandomness: Randomness<Self::Hash>;\n\n\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n type ManageCardInfo: ManageCardInfo<Self::AccountId>;\n\n type PalletId: Get<PalletId<Self>>;\n\n\t/// Weight information for extrinsics in this pallet.\n\n\ttype WeightInfo: WeightInfo;\n\n}\n\n\n\n// 實際的卡片\n\n#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)]\n\npub struct Card<NftId> {\n\n\t/// The card id\n\n\tid: u128,\n\n\tcard_info_id: u128,\n\n\tlevel: u8,\n\n\tability_value_1: u32,\n\n\t/// The card nft id\n\n\tnft_id: NftId,\n\n}\n\n\n\n\n\ndecl_storage! {\n", "file_path": "pallets/card-factory/src/lib.rs", "rank": 33, "score": 213520.1286571485 }, { "content": "/// The module configuration trait.\n\npub trait Config: frame_system::Config {\n\n\t/// The overarching event type.\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\n\n\t/// The units in which we record balances.\n\n\ttype SGAssetBalance: Member + Parameter + AtLeast32BitUnsigned + Default + Copy;\n\n\n\n\t/// The arithmetic type of asset identifier.\n\n\ttype AssetId: Member + Parameter + Default + Copy + HasCompact + AtLeast32BitUnsigned;\n\n\n\n\t/// The currency mechanism.\n\n\ttype Currency: ReservableCurrency<Self::AccountId>;\n\n\n\n\t/// The origin which may forcibly create or destroy an asset.\n\n\ttype ForceOrigin: EnsureOrigin<Self::Origin>;\n\n\n\n\t/// The basic amount of funds that must be reserved when creating a new asset class.\n\n\ttype AssetDepositBase: Get<BalanceOf<Self>>;\n\n\n\n\t/// The additional funds that must be reserved for every zombie account that an asset class\n", "file_path": "pallets/subgame-assets/src/lib.rs", "rank": 34, "score": 213520.1286571485 }, { "content": "/// The module configuration trait.\n\npub trait Config: frame_system::Config {\n\n\t/// The overarching event type.\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\t\n\n\ttype OwnerAddress: Get<Self::AccountId>;\n\n type Balances: Currency<Self::AccountId>;\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n\t/// Weight information for extrinsics in this pallet.\n\n\ttype WeightInfo: WeightInfo;\n\n}\n\n\n\n\n\n\n\ndecl_storage! {\n", "file_path": "pallets/seventh-planet/src/lib.rs", "rank": 35, "score": 213520.1286571485 }, { "content": "/// The module configuration trait.\n\npub trait Config: frame_system::Config {\n\n\t/// The overarching event type.\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\t\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n type PalletId: Get<PalletId<Self>>;\n\n\t/// Weight information for extrinsics in this pallet.\n\n\ttype WeightInfo: WeightInfo;\n\n}\n\n\n\n\n\n\n\ndecl_storage! {\n", "file_path": "pallets/manage-card-info/src/lib.rs", "rank": 36, "score": 211042.55424044767 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/gametemplates-guess-hash/src/mock.rs", "rank": 37, "score": 208630.0116949066 }, { "content": "pub trait Config: frame_system::Config + SubGameAssets::Config {\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\ttype WeightInfo: WeightInfo;\n\n\ttype SwapId: Parameter + Member + AtLeast32Bit + Default + Copy + MaybeSerializeDeserialize;\n\n\ttype Currency: ReservableCurrency<Self::AccountId>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "pallets/swap/src/lib.rs", "rank": 38, "score": 206946.8203105216 }, { "content": "/// The module configuration trait.\n\npub trait Config: frame_system::Config + SubGameAssets::Config {\n\n\t/// The overarching event type.\n\n\ttype Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n\t\n\n type UniqueAssets: UniqueAssets<Self::AccountId>;\n\n type Assets: AssetsTrait + AssetsTransfer<Self::AccountId, u32>;\n\n type Lease: Lease<Self::AccountId, NftId<Self>>;\n\n type PalletId: Get<PalletId<Self>>;\n\n\t/// Weight information for extrinsics in this pallet.\n\n\ttype WeightInfo: WeightInfo;\n\n}\n\n\n\n\n\n\n\ndecl_storage! {\n", "file_path": "pallets/game-recharge/src/lib.rs", "rank": 39, "score": 204869.2351765759 }, { "content": "fn assert_last_event<T: Config>(generic_event: <T as Config>::Event) {\n\n\tlet events = frame_system::Module::<T>::events();\n\n\tlet system_event: <T as frame_system::Config>::Event = generic_event.into();\n\n\t// compare to the last event record\n\n\tlet frame_system::EventRecord { event, .. } = &events[events.len() - 1];\n\n\tassert_eq!(event, &system_event);\n\n}\n\n\n\nbenchmarks! {\n\n\tcreate {\n\n\t\tlet caller: T::AccountId = whitelisted_caller();\n\n\t\tlet caller_lookup = T::Lookup::unlookup(caller.clone());\n\n\t\tT::Currency::make_free_balance_be(&caller, BalanceOf::<T>::max_value());\n\n\t}: _(SystemOrigin::Signed(caller.clone()), Default::default(), caller_lookup, 1, 1u32.into())\n\n\tverify {\n\n\t\tassert_last_event::<T>(Event::Created(Default::default(), caller.clone(), caller).into());\n\n\t}\n\n\n\n\tforce_create {\n\n\t\tlet caller: T::AccountId = whitelisted_caller();\n", "file_path": "pallets/subgame-assets/src/benchmarking.rs", "rank": 40, "score": 201161.67254815082 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 10000000000000000),\n\n (2, 10000000000000000),\n\n (3, 10000000000000000),\n\n (4, 10000000000000000),\n\n (5, 10000000000000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/swap/src/mock.rs", "rank": 41, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/gametemplates/src/mock.rs", "rank": 42, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/demogame/src/mock.rs", "rank": 43, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/stake/src/mock.rs", "rank": 44, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (12500082579580134024, 10000000000000000),\n\n (1, 10000000000000000),\n\n (2, 10000000000000000),\n\n (3, 10000000000000000),\n\n (4, 10000000000000000),\n\n (5, 10000000000000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/tspwhitelist/src/mock.rs", "rank": 45, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/gamecenter/src/mock.rs", "rank": 46, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/chips/src/mock.rs", "rank": 47, "score": 197708.1516303743 }, { "content": "/// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n let mut t = system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap();\n\n // Initial allocation of money\n\n balances::GenesisConfig::<Test> {\n\n // Provide some initial balances\n\n balances: vec![\n\n (1, 1000000),\n\n (2, 1000000),\n\n (3, 1000000),\n\n (4, 1000000),\n\n (5, 1000000),\n\n ],\n\n }\n\n .assimilate_storage(&mut t)\n\n .unwrap();\n\n let mut ext: sp_io::TestExternalities = t.into();\n\n ext.execute_with(|| System::set_block_number(1));\n\n ext\n\n}\n", "file_path": "pallets/lease/src/mock.rs", "rank": 48, "score": 197708.1516303743 }, { "content": "// This function basically just builds a genesis storage key/value store according to\n\n// our desired mockup.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap()\n\n .into()\n\n}\n", "file_path": "pallets/nft/src/mock.rs", "rank": 49, "score": 197707.9140456344 }, { "content": "fn create_default_minted_asset<T: Config>(max_zombies: u32, amount: T::Balance)\n\n\t-> (T::AccountId, <T::Lookup as StaticLookup>::Source)\n\n{\n\n\tlet (caller, caller_lookup) = create_default_asset::<T>(max_zombies);\n\n\tassert!(Assets::<T>::mint(\n\n\t\tSystemOrigin::Signed(caller.clone()).into(),\n\n\t\tDefault::default(),\n\n\t\tcaller_lookup.clone(),\n\n\t\tamount,\n\n\t).is_ok());\n\n\t(caller, caller_lookup)\n\n}\n\n\n", "file_path": "pallets/subgame-assets/src/benchmarking.rs", "rank": 51, "score": 189901.68142386008 }, { "content": "fn add_zombies<T: Config>(minter: T::AccountId, n: u32) {\n\n\tlet origin = SystemOrigin::Signed(minter);\n\n\tfor i in 0..n {\n\n\t\tlet target = account(\"zombie\", i, SEED);\n\n\t\tlet target_lookup = T::Lookup::unlookup(target);\n\n\t\tassert!(Assets::<T>::mint(origin.clone().into(), Default::default(), target_lookup, 100u32.into()).is_ok());\n\n\t}\n\n}\n\n\n", "file_path": "pallets/subgame-assets/src/benchmarking.rs", "rank": 52, "score": 187998.7892328605 }, { "content": "pub trait GuessHashFunc<AccountId, GameIndex, ChipBalance>: GuessHashTrait {\n\n fn create_game(\n\n sender: &AccountId,\n\n bet_next_few_block: u32,\n\n amount: ChipBalance,\n\n ) -> sp_std::result::Result<GameIndex, DispatchError>;\n\n fn bet(\n\n sender: &AccountId,\n\n game_id: GameIndex,\n\n value: ChipBalance,\n\n game_mode: GameMode,\n\n ) -> dispatch::DispatchResult;\n\n}\n\n/// Provided to other modules(new game/ bet)\n\nimpl<T: Config> GuessHashFunc<T::AccountId, T::GameIndex, ChipBalance<T>> for Module<T> {\n\n fn create_game(\n\n sender: &T::AccountId,\n\n bet_next_few_block: u32,\n\n amount: ChipBalance<T>,\n\n ) -> sp_std::result::Result<T::GameIndex, DispatchError> {\n", "file_path": "pallets/gametemplates-guess-hash/src/lib.rs", "rank": 53, "score": 185852.97822300682 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n NativeVersion {\n\n runtime_version: VERSION,\n\n can_author_with: Default::default(),\n\n }\n\n}\n\n\n\nconst NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75);\n\n/// We allow for 2 seconds of compute with a 6 second average block time.\n\nconst MAXIMUM_BLOCK_WEIGHT: Weight = 2 * WEIGHT_PER_SECOND;\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: BlockNumber = 2400;\n\n\tpub const Version: RuntimeVersion = VERSION;\n\n\tpub RuntimeBlockLength: BlockLength =\n\n\t\tBlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO);\n\n\tpub RuntimeBlockWeights: BlockWeights = BlockWeights::builder()\n\n\t\t.base_block(BlockExecutionWeight::get())\n\n\t\t.for_class(DispatchClass::all(), |weights| {\n\n\t\t\tweights.base_extrinsic = ExtrinsicBaseWeight::get();\n", "file_path": "runtime/src/lib.rs", "rank": 54, "score": 183944.7746488619 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/gamecenter/src/mock.rs", "rank": 55, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/chips/src/mock.rs", "rank": 56, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/lease/src/mock.rs", "rank": 57, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/swap/src/mock.rs", "rank": 58, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/stake/src/mock.rs", "rank": 59, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/tspwhitelist/src/mock.rs", "rank": 60, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/gametemplates/src/mock.rs", "rank": 61, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/demogame/src/mock.rs", "rank": 62, "score": 180989.67558462662 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/nft/src/mock.rs", "rank": 63, "score": 180989.67558462662 }, { "content": "fn create_default_asset<T: Config>(max_zombies: u32)\n\n\t-> (T::AccountId, <T::Lookup as StaticLookup>::Source)\n\n{\n\n\tlet caller: T::AccountId = whitelisted_caller();\n\n\tlet caller_lookup = T::Lookup::unlookup(caller.clone());\n\n\tlet root = SystemOrigin::Root.into();\n\n\tassert!(Assets::<T>::force_create(\n\n\t\troot,\n\n\t\tDefault::default(),\n\n\t\tcaller_lookup.clone(),\n\n\t\tmax_zombies,\n\n\t\t1u32.into(),\n\n\t).is_ok());\n\n\t(caller, caller_lookup)\n\n}\n\n\n", "file_path": "pallets/subgame-assets/src/benchmarking.rs", "rank": 64, "score": 180517.74437230287 }, { "content": "/// chips unit type\n\ntype ChipBalance<T> = <<T as Config>::Chips as pallet_chips::ChipsTrait>::ChipBalance;\n\n\n\n/// Define the game mode\n\npub type GameMode = u8;\n\n/// Guess the odd number\n\npub const GAME_MODE_IS_SINGLE: GameMode = 1;\n\n/// Guess the even number\n\npub const GAME_MODE_IS_DOUBLE: GameMode = 2;\n\n\n\ndecl_storage! {\n", "file_path": "pallets/gametemplates-guess-hash/src/lib.rs", "rank": 66, "score": 172511.3142858426 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn wasm_binary_unwrap() -> &'static [u8] {\n\n\tWASM_BINARY.expect(\"Development wasm binary is not available. This means the client is \\\n\n\t\t\t\t\t\tbuilt with `SKIP_WASM_BUILD` flag and it is only usable for \\\n\n\t\t\t\t\t\tproduction chains. Please rebuild with the flag disabled.\")\n\n}\n\n\n\nimpl pallet_authority_discovery::Config for Runtime {}\n\n\n\npub type Moment = u64;\n\nparameter_types! {\n\n pub const EpochDuration: u64 = EPOCH_DURATION_IN_BLOCKS as u64;\n\n pub const ExpectedBlockTime: Moment = MILLISECS_PER_BLOCK;\n\n pub const ReportLongevity: u64 = \n\n BondingDuration::get() as u64 * SessionsPerEra::get() as u64 * EpochDuration::get(); // Kusama\n\n}\n\n\n\nimpl pallet_babe::Config for Runtime {\n\n type EpochDuration = EpochDuration;\n\n type ExpectedBlockTime = ExpectedBlockTime;\n\n type EpochChangeTrigger = pallet_babe::ExternalTrigger;\n", "file_path": "runtime/src/lib.rs", "rank": 67, "score": 166392.7438970263 }, { "content": "pub fn mainnet_config() -> Result<ChainSpec, String> {\n\n let wasm_binary =\n\n WASM_BINARY.ok_or_else(|| \"Main net wasm binary not available\".to_string())?;\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"SubGame\",\n\n // ID\n\n \"subgame\",\n\n ChainType::Live,\n\n move || {\n\n mainnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n vec![\n\n (\n\n // 5FWWuhHThTUSL97FBpXU9EwobbZ6YZqCs8ryRGDvzAqhfzYF\t\n\n hex![\"98643a2c1477740412cf7b2d7203443626b191523df56ba35ec4a4c5b56b814c\"].into(),\n\n // 5E9k7QkUua2GtrJpiG6WD69YU3qjVbMzzmutbstahkJmo3ZS\t\n\n hex![\"5c50483925073024e9457f6df77e6a66bd22eb80f9bd0ffd815df1aa969ed04c\"].into(),\n\n // 5FFKTjUrtLFARzXtXVB2Wy12w4A7PezpR8VKMSyEPFtG9y86\t\n", "file_path": "node/src/chain_spec.rs", "rank": 68, "score": 157111.63276051142 }, { "content": "pub fn development_config() -> Result<ChainSpec, String> {\n\n let wasm_binary =\n\n WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"subgame_dev\",\n\n // ID\n\n \"subgame_dev\",\n\n ChainType::Development,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n vec![authority_keys_from_seed(\"Alice\")],\n\n // Sudo account\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n // Pre-funded accounts\n\n vec![\n\n (get_account_id_from_seed::<sr25519::Public>(\"Alice\"), 5000000000000000000),\n", "file_path": "node/src/chain_spec.rs", "rank": 69, "score": 157111.63276051142 }, { "content": "pub fn local_testnet_config() -> Result<ChainSpec, String> {\n\n let wasm_binary =\n\n WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"SubGame Staging\",\n\n // ID\n\n \"subgame_staging\",\n\n ChainType::Live,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n vec![\n\n (\n\n // 5FpfTNAjx3yjU8P6N74uwGj19bpuEXrHTHRm1pWdxAw8Pn65\t\n\n hex![\"a63b69bded2ac349e87634116fe96ae1cd1e700f851317aee1a00f0745ec6c1a\"].into(),\n\n // 5FF5CH22pskNGB8d7r7DokSmyvXZJDXSqE4CF3rwFn1wYMP4\t\n\n hex![\"8c9cfc192f256cf6ef76101827353c0f0e28d50ced6aef7a7677fac4f2017825\"].into(),\n\n // 5GEXjMYCYuogszM2WQnnNKA6bbzCNTKaAAY3BFmNkEsEwgsi\t\n", "file_path": "node/src/chain_spec.rs", "rank": 70, "score": 154635.3485920283 }, { "content": "#[test]\n\nfn draw() {\n\n new_test_ext().execute_with(|| {\n\n // 【Given】Arrange\n\n // A user has 500 chips\n\n let _ = Chips::buy_chips(Origin::signed(1), 500);\n\n // B user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(2), 100);\n\n // C user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(5), 100);\n\n // D user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(4), 100);\n\n\n\n let bet_next_few_block_num = 19u32;\n\n // A user have a new game, game index = 1, pool = 500\n\n let _ = GameGuessHashModule::create_game(Origin::signed(1), bet_next_few_block_num, 500);\n\n // B bet single num, 100 chips\n\n let _ = GameGuessHashModule::bet(Origin::signed(2), 1, 100, 1);\n\n // C bet single num, 100 chips\n\n let _ = GameGuessHashModule::bet(Origin::signed(5), 1, 100, 1);\n\n // D bet double num, 100 chips\n", "file_path": "pallets/gametemplates-guess-hash/src/tests.rs", "rank": 71, "score": 150707.97932880337 }, { "content": "#[test]\n\nfn bet() {\n\n new_test_ext().execute_with(|| {\n\n // 【Given】Arrange\n\n // A user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(1), 100);\n\n // B user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(2), 100);\n\n let bet_next_few_block_num = 10u32;\n\n // A user have a new game, game index = 1\n\n let _ = GameGuessHashModule::create_game(Origin::signed(1), bet_next_few_block_num, 100);\n\n\n\n // 【When】Act\n\n // B user bet 100 chips/ bet number\n\n assert_ok!(GameGuessHashModule::bet(Origin::signed(2), 1, 100, 1));\n\n\n\n // 【Then】Assert\n\n // Check the chip balance=0\n\n assert_eq!(Chips::chips_map(2).unwrap().balance, 0);\n\n // Check the pledge of chips=100\n\n assert_eq!(Chips::chips_map(2).unwrap().reserve, 100);\n", "file_path": "pallets/gametemplates-guess-hash/src/tests.rs", "rank": 72, "score": 150707.97932880337 }, { "content": "#[test]\n\nfn create_game() {\n\n new_test_ext().execute_with(|| {\n\n // 【Given】Arrange\n\n // A user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(1), 100);\n\n\n\n // 【When】Act\n\n // A user create game\n\n let bet_next_few_block_num = 10u32;\n\n assert_ok!(GameGuessHashModule::create_game(\n\n Origin::signed(1),\n\n bet_next_few_block_num,\n\n 100\n\n ));\n\n\n\n // 【Then】Assert\n\n // Check the chip balance=0\n\n assert_eq!(Chips::chips_map(1).unwrap().balance, 0);\n\n // Check the pledge of chips=100\n\n assert_eq!(Chips::chips_map(1).unwrap().reserve, 100);\n", "file_path": "pallets/gametemplates-guess-hash/src/tests.rs", "rank": 73, "score": 147690.69940344544 }, { "content": "pub fn new_partial(\n\n config: &Configuration,\n\n) -> Result<\n\n sc_service::PartialComponents<\n\n FullClient,\n\n FullBackend,\n\n FullSelectChain,\n\n sp_consensus::DefaultImportQueue<Block, FullClient>,\n\n sc_transaction_pool::FullPool<Block, FullClient>,\n\n (\n\n sc_consensus_babe::BabeBlockImport<\n\n Block,\n\n FullClient,\n\n sc_finality_grandpa::GrandpaBlockImport<\n\n FullBackend,\n\n Block,\n\n FullClient,\n\n FullSelectChain,\n\n >,\n\n >,\n", "file_path": "node/src/service.rs", "rank": 74, "score": 144209.62759588475 }, { "content": "pub trait WeightInfo {\n\n fn create_game() -> Weight;\n\n fn bet() -> Weight;\n\n fn on_finalize(count: u32) -> Weight;\n\n}\n", "file_path": "pallets/gametemplates-guess-hash/src/lib.rs", "rank": 75, "score": 140189.3789408168 }, { "content": "/// Properties for Subgame.\n\npub fn subgame_properties() -> Properties {\n\n let mut properties = Properties::new();\n\n properties.insert(\"ss58Format\".into(), 27.into());\n\n properties.insert(\"tokenDecimals\".into(), vec![10].into());\n\n properties.insert(\"tokenSymbol\".into(), vec![\"SGB\"].into());\n\n properties\n\n}\n", "file_path": "node/src/chain_spec.rs", "rank": 76, "score": 135235.32361584643 }, { "content": "/// Properties for Subgame.\n\npub fn subgame_mainnet_properties() -> Properties {\n\n let mut properties = Properties::new();\n\n properties.insert(\"ss58Format\".into(), 27.into());\n\n properties.insert(\"tokenDecimals\".into(), vec![10].into());\n\n properties.insert(\"tokenSymbol\".into(), vec![\"SGB\"].into());\n\n properties\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 77, "score": 133324.0881363937 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 78, "score": 132563.05768919585 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n let cli = Cli::from_args();\n\n\n\n match &cli.subcommand {\n\n Some(Subcommand::Key(cmd)) => cmd.run(&cli),\n\n Some(Subcommand::BuildSpec(cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.sync_run(|config| cmd.run(config.chain_spec, config.network))\n\n }\n\n Some(Subcommand::CheckBlock(cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.async_run(|config| {\n\n let PartialComponents {\n\n client,\n\n task_manager,\n\n import_queue,\n\n ..\n\n } = service::new_partial(&config)?;\n\n Ok((cmd.run(client, import_queue), task_manager))\n\n })\n", "file_path": "node/src/command.rs", "rank": 79, "score": 131070.18000465322 }, { "content": "type FullBackend = sc_service::TFullBackend<Block>;\n", "file_path": "node/src/service.rs", "rank": 80, "score": 128469.71776714425 }, { "content": "/// Generate an Babe authority key.\n\npub fn authority_keys_from_seed(s: &str) -> (AccountId, AccountId, BabeId, GrandpaId, ImOnlineId, AuthorityDiscoveryId,) {\n\n (\n\n get_account_id_from_seed::<sr25519::Public>(&format!(\"{}//stash\", s)),\n\n\t\tget_account_id_from_seed::<sr25519::Public>(s),\n\n\t\tget_from_seed::<BabeId>(s),\n\n get_from_seed::<GrandpaId>(s),\n\n\t\tget_from_seed::<ImOnlineId>(s),\n\n\t\tget_from_seed::<AuthorityDiscoveryId>(s),\n\n )\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 81, "score": 123853.50296265227 }, { "content": "type FullSelectChain = sc_consensus::LongestChain<FullBackend, Block>;\n\n\n", "file_path": "node/src/service.rs", "rank": 82, "score": 121456.34434591344 }, { "content": "pub trait NftExchange<AccountId, NftId, BalanceOf> {\n\n\tfn _create_platform(\n\n\t\tadmin: AccountId,\n\n\t\tpercentage_of_fee: u8,\n\n\t\tfee_account: AccountId,\n\n\t) -> DispatchResult;\n\n\n\n\tfn _update_platform(\n\n\t\tadmin: AccountId,\n\n\t\tid: u128,\n\n\t\tpercentage_of_fee: u8,\n\n\t\tfee_account: AccountId,\n\n\t) -> DispatchResult;\n\n\n\n\tfn _create_auction(\n\n\t\tplatform_id: u128,\n\n\t\tseller: AccountId,\n\n\t\tnft_id: NftId,\n\n\t\tamount: BalanceOf,\n\n\t) -> DispatchResult;\n", "file_path": "pallets/nft-exchange/src/nft_exchange.rs", "rank": 83, "score": 119162.8141170754 }, { "content": "type FullClient = sc_service::TFullClient<Block, RuntimeApi, Executor>;\n", "file_path": "node/src/service.rs", "rank": 84, "score": 116638.20324802645 }, { "content": "#[test]\n\nfn stake() {\n\n new_test_ext().execute_with(|| {\n\n let user = 2;\n\n let amount: u64 = 1000;\n\n\n\n let account = \"s234567\";\n\n let account_vec = account.clone().as_bytes().to_vec();\n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n\n\n assert_ok!(SubGameStake::stake(Origin::signed(user.clone()), amount.clone()));\n\n assert_eq!(amount, Balances::reserved_balance(&user));\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 85, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn bet() {\n\n new_test_ext().execute_with(|| {\n\n // 【Given】Arrange\n\n // A user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(1), 100);\n\n // B user has 100 chips\n\n let _ = Chips::buy_chips(Origin::signed(2), 100);\n\n let bet_next_few_block_num = 10u32;\n\n // A user have a new game, game index = 1\n\n let _ = GameGuessHashModule::create_game(Origin::signed(1), bet_next_few_block_num, 100);\n\n\n\n // 【When】Act\n\n // B user下注 100 chips/ bet single\n\n assert_ok!(GameCenter::play_game(Origin::signed(2), 1, 100, 1));\n\n\n\n // 【Then】Assert\n\n // check chip balance=0\n\n assert_eq!(Chips::chips_map(2).unwrap().balance, 0);\n\n // check chop reserve=100\n\n assert_eq!(Chips::chips_map(2).unwrap().reserve, 100);\n", "file_path": "pallets/gamecenter/src/tests.rs", "rank": 86, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn sign_up() {\n\n new_test_ext().execute_with(|| {\n\n let user = 2;\n\n \n\n let account = \"s234567\";\n\n let account_vec = account.clone().as_bytes().to_vec();\n\n\n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n\n\n let want_account = account.clone().to_lowercase().as_bytes().to_vec();\n\n assert_eq!(want_account, SubGameStake::user_info_map(user.clone()).account);\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 87, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn whitelist() {\n\n new_test_ext().execute_with(|| {\n\n let now: u64 = chrono::Utc::now().timestamp().saturated_into::<u64>() * 1000u64;\n\n run_to_block(10, now);\n\n\n\n\t\tinit_asset();\n\n\n\n\t\t// let user = 1;\n\n\t\t// let amount = (1.1f64 * SGB_DECIMALS as f64) as u64;\n\n // assert_noop!(TSPWhitelist::whitelist(Origin::signed(user.clone()), amount), Error::<Test>::BuyTooLittle);\n\n\n\n\t\t// let user = 1;\n\n\t\t// let amount = 0u64 * SGB_DECIMALS;\n\n // assert_noop!(TSPWhitelist::whitelist(Origin::signed(user.clone()), amount), Error::<Test>::BuyTooLittle);\n\n\n\n\t\t// let user = 1;\n\n\t\t// let amount = 10000000u64 * SGB_DECIMALS;\n\n // assert_noop!(TSPWhitelist::whitelist(Origin::signed(user.clone()), amount), Error::<Test>::NotEnoughBalance);\n\n\n\n\t\t// let user = 1;\n", "file_path": "pallets/tspwhitelist/src/tests.rs", "rank": 88, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn swap2() {\n\n new_test_ext().execute_with(|| {\n\n\n\n // $r = 1 - 0.003\n\n\t\t// $a = $dx / $x\n\n\t\t// $dy = ($a * $r) / (1 + ($a * $r)) * $y\n\n let x: f64 = 9378908395443.0;\n\n let y: f64 = 1037063538.0;\n\n let dx: f64 = 10000000000.25;\n\n let r: f64 = 1.0 - 0.003;\n\n let a: f64 = dx / x;\n\n let dy: f64 = (a * r) / (1.0 + (a * r)) * y;\n\n let output_amount: u64 = dy.floor() as u64;\n\n\n\n println!(\"{:?}\", output_amount);\n\n });\n\n}", "file_path": "pallets/swap/src/tests.rs", "rank": 89, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn swap() {\n\n new_test_ext().execute_with(|| {\n\n init_asset();\n\n\n\n let user = 1;\n\n let asset_x: u32 = 8;\n\n let x: u64 = 1 * GOGO_DECIMALS;\n\n let asset_y: u32 = 7;\n\n let y: u64 = 11 * USDT_DECIMALS; \n\n assert_ok!(Swap::create_pool(Origin::signed(user.clone()), asset_x, x, asset_y, y));\n\n let swap_pool = Swap::swap_pool(1);\n\n\n\n let before_user_y_balance = SubGameAssets::Module::<Test>::balance(swap_pool.asset_y, user);\n\n\n\n // Should return Slipage error\n\n let swap_id = 1;\n\n let input_asset: u32 = 8;\n\n let input_amount: u64 = 1 * GOGO_DECIMALS;\n\n let output_asset: u32 = 7;\n\n let expected_output_amount: u64 = 5 * USDT_DECIMALS; \n", "file_path": "pallets/swap/src/tests.rs", "rank": 90, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn unlock() {\n\n new_test_ext().execute_with(|| {\n\n let owner = 1;\n\n let user = 2;\n\n let amount: u64 = 1000;\n\n\n\n let account = \"s234567\";\n\n let account_vec = account.clone().as_bytes().to_vec();\n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n\n\n assert_ok!(SubGameStake::stake(Origin::signed(user.clone()), amount.clone()));\n\n\n\n assert_ok!(SubGameStake::unlock(Origin::signed(owner.clone()), user.clone(), amount.clone()));\n\n assert_eq!(0, Balances::reserved_balance(&user));\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 91, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn burn() {\n\n new_test_ext().execute_with(|| {\n\n assert_ok!(SUT::mint(Origin::root(), 1, Vec::<u8>::from(\"test\")));\n\n assert_eq!(SUT::total_for_account(1), 1);\n\n\n\n // let assets = SUT::assets_for_account(&(1 as u64));\n\n\n\n // assert_ok!(SUT::burn(Origin::signed(1), assets[0].0));\n\n\n\n assert_eq!(SUT::total(), 0);\n\n assert_eq!(SUT::burned(), 1);\n\n assert_eq!(SUT::total_for_account(1), 0);\n\n // assert_eq!(SUT::commodities_for_account::<u64>(1), vec![]);\n\n assert_eq!(\n\n SUT::account_for_commodity::<H256>(Vec::<u8>::from(\"test\").blake2_256().into()),\n\n 0\n\n );\n\n });\n\n}\n\n\n", "file_path": "pallets/nft/src/tests.rs", "rank": 92, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn demo() {\n\n new_test_ext().execute_with(|| {\n\n let program_id = 1;\n\n let pallet_id = 1;\n\n let stake_amount = 100;\n\n let day = 1;\n\n assert_ok!(SubgameStakeNft::add_program(Origin::signed(3), program_id, stake_amount, day));\n\n assert_ok!(Lease::add_pallet(Origin::signed(3), 1, Vec::<u8>::from(\"test pallet\")));\n\n\n\n assert_ok!(SubgameStakeNft::stake(Origin::signed(4), program_id, pallet_id));\n\n\n\n \n\n assert_eq!(\n\n DemoGame::call_success(4),\n\n 0\n\n );\n\n assert_ok!(\n\n DemoGame::demo(Origin::signed(4))\n\n );\n\n assert_eq!(\n\n DemoGame::call_success(4),\n\n 1\n\n );\n\n \n\n });\n\n\n\n\n\n\n\n\n\n}\n", "file_path": "pallets/demogame/src/tests.rs", "rank": 93, "score": 116132.11431252642 }, { "content": "#[test]\n\nfn withdraw() {\n\n new_test_ext().execute_with(|| {\n\n let owner = 1;\n\n let user = 2;\n\n let amount: u64 = 1000;\n\n let default_balance: u64 = 1000000;\n\n\n\n let account = \"s234567\";\n\n let account_vec = account.clone().as_bytes().to_vec();\n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n\n\n assert_ok!(SubGameStake::withdraw(Origin::signed(owner.clone()), user.clone(), amount.clone()));\n\n assert_eq!(default_balance + amount, Balances::free_balance(&user));\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 94, "score": 116132.11431252642 }, { "content": "pub trait GuessHashTrait {}\n\n\n\nimpl<T: Config> GuessHashTrait for Module<T> {}\n\n\n", "file_path": "pallets/gametemplates-guess-hash/src/lib.rs", "rank": 95, "score": 115302.3671356268 }, { "content": "#[test]\n\nfn sign_up_exists() {\n\n new_test_ext().execute_with(|| {\n\n let user = 2;\n\n \n\n let account = \"s234567\";\n\n let account_vec = account.as_bytes().to_vec();\n\n \n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n assert_noop!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()), Error::<Test>::UserExists);\n\n\n\n let user = 3;\n\n assert_noop!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()), Error::<Test>::UserExists);\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 96, "score": 113899.91910828219 }, { "content": "#[test]\n\nfn import_stake() {\n\n new_test_ext().execute_with(|| {\n\n let owner = 1;\n\n let user = 2;\n\n let amount: u64 = 1000;\n\n \n\n let account = \"s234567\";\n\n let account_vec = account.clone().as_bytes().to_vec();\n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n\n\n assert_ok!(SubGameStake::import_stake(Origin::signed(owner.clone()), user.clone(), amount.clone()));\n\n assert_eq!(amount, Balances::reserved_balance(&user));\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 97, "score": 113899.91910828219 }, { "content": "#[test]\n\nfn delete_user() {\n\n new_test_ext().execute_with(|| {\n\n let owner = 1;\n\n let user = 2;\n\n \n\n let account = \"s234567\";\n\n let account_vec = account.clone().as_bytes().to_vec();\n\n\n\n let referrer_account = \"gametop\";\n\n let referrer_account_vec = referrer_account.as_bytes().to_vec();\n\n\n\n assert_ok!(SubGameStake::sign_up(Origin::signed(user.clone()), account_vec.clone(), referrer_account_vec.clone()));\n\n\n\n assert_ok!(SubGameStake::delete_user(Origin::signed(owner.clone()), user.clone(), account_vec.clone()));\n\n\n\n let want_account = \"\".as_bytes().to_vec();\n\n assert_eq!(want_account, SubGameStake::user_info_map(user.clone()).account);\n\n });\n\n}\n", "file_path": "pallets/stake/src/tests.rs", "rank": 98, "score": 113899.91910828219 }, { "content": "#[test]\n\nfn unlock_not_owner() {\n\n new_test_ext().execute_with(|| {\n\n let user = 2;\n\n let amount: u64 = 1000;\n\n assert_noop!(SubGameStake::unlock(Origin::signed(user.clone()), user.clone(), amount.clone()), Error::<Test>::PermissionDenied);\n\n });\n\n}\n\n\n", "file_path": "pallets/stake/src/tests.rs", "rank": 99, "score": 113899.91910828219 } ]
Rust
core/codegen/tests/route.rs
Compro-Prasad/Rocket
198b6f0e9726f7e53c61d365b797df630c876c59
#![deny(non_snake_case)] #[macro_use] extern crate rocket; use std::path::PathBuf; use rocket::http::ext::Normalize; use rocket::local::blocking::Client; use rocket::data::{self, Data, FromData, ToByteUnit}; use rocket::request::{Request, Form}; use rocket::http::{Status, RawStr, ContentType}; #[derive(FromForm, UriDisplayQuery)] struct Inner<'r> { field: &'r RawStr } struct Simple(String); #[async_trait] impl FromData for Simple { type Error = (); async fn from_data(_: &Request<'_>, data: Data) -> data::Outcome<Self, ()> { let string = data.open(64.bytes()).stream_to_string().await.unwrap(); data::Outcome::Success(Simple(string)) } } #[post("/<a>/<name>/name/<path..>?sky=blue&<sky>&<query..>", format = "json", data = "<simple>", rank = 138)] fn post1( sky: usize, name: &RawStr, a: String, query: Form<Inner<'_>>, path: PathBuf, simple: Simple, ) -> String { let string = format!("{}, {}, {}, {}, {}, {}", sky, name, a, query.field, path.normalized_str(), simple.0); let uri = uri!(post2: a, name.url_decode_lossy(), path, sky, query.into_inner()); format!("({}) ({})", string, uri.to_string()) } #[route(POST, path = "/<a>/<name>/name/<path..>?sky=blue&<sky>&<query..>", format = "json", data = "<simple>", rank = 138)] fn post2( sky: usize, name: &RawStr, a: String, query: Form<Inner<'_>>, path: PathBuf, simple: Simple, ) -> String { let string = format!("{}, {}, {}, {}, {}, {}", sky, name, a, query.field, path.normalized_str(), simple.0); let uri = uri!(post2: a, name.url_decode_lossy(), path, sky, query.into_inner()); format!("({}) ({})", string, uri.to_string()) } #[allow(dead_code)] #[post("/<_unused_param>?<_unused_query>", data="<_unused_data>")] fn test_unused_params(_unused_param: String, _unused_query: String, _unused_data: Data) { } #[test] fn test_full_route() { let rocket = rocket::ignite() .mount("/1", routes![post1]) .mount("/2", routes![post2]); let client = Client::tracked(rocket).unwrap(); let a = "A%20A"; let name = "Bob%20McDonald"; let path = "this/path/here"; let sky = 777; let query = "field=inside"; let simple = "data internals"; let path_part = format!("/{}/{}/name/{}", a, name, path); let query_part = format!("?sky={}&sky=blue&{}", sky, query); let uri = format!("{}{}", path_part, query_part); let expected_uri = format!("{}?sky=blue&sky={}&{}", path_part, sky, query); let response = client.post(&uri).body(simple).dispatch(); assert_eq!(response.status(), Status::NotFound); let response = client.post(format!("/1{}", uri)).body(simple).dispatch(); assert_eq!(response.status(), Status::NotFound); let response = client .post(format!("/1{}", uri)) .header(ContentType::JSON) .body(simple) .dispatch(); assert_eq!(response.into_string().unwrap(), format!("({}, {}, {}, {}, {}, {}) ({})", sky, name, "A A", "inside", path, simple, expected_uri)); let response = client.post(format!("/2{}", uri)).body(simple).dispatch(); assert_eq!(response.status(), Status::NotFound); let response = client .post(format!("/2{}", uri)) .header(ContentType::JSON) .body(simple) .dispatch(); assert_eq!(response.into_string().unwrap(), format!("({}, {}, {}, {}, {}, {}) ({})", sky, name, "A A", "inside", path, simple, expected_uri)); } mod scopes { mod other { #[get("/world")] pub fn world() -> &'static str { "Hello, world!" } } #[get("/hello")] pub fn hello() -> &'static str { "Hello, outside world!" } use other::world; fn _rocket() -> rocket::Rocket { rocket::ignite().mount("/", rocket::routes![hello, world]) } }
#![deny(non_snake_case)] #[macro_use] extern crate rocket; use std::path::PathBuf; use rocket::http::ext::Normalize; use rocket::local::blocking::Client; use rocket::data::{self, Data, FromData, ToByteUnit}; use rocket::request::{Request, Form}; use rocket::http::{Status, RawStr, ContentType}; #[derive(FromForm, UriDisplayQuery)] struct Inner<'r> { field: &'r RawStr } struct Simple(String); #[async_trait] impl FromData for Simple { type Error = (); async fn from_data(_: &Request<'_>, data: Data) -> data::Outcome<Self, ()> { let string = data.open(64.bytes()).stream_to_string().await.unwrap(); data::Outcome::Success(Simple(string)) } } #[post("/<a>/<name>/name/<path..>?sky=blue&<sky>&<query..>", format = "json", data = "<simple>", rank = 138)] fn post1( sky: usize, name: &RawStr, a: String, query: Form<Inner<'_>>, path: PathBuf, simple: Simple, ) -> String { let string = format!("{}, {}, {}, {}, {}, {}", sky, name, a, query.field, path.normalized_str(), simple.0); let uri = uri!(post2: a, name.url_decode_lossy(), path, sky, query.into_inner()); format!("({}) ({})", string, uri.to_string()) } #[route(POST, path = "/<a>/<name>/name/<path..>?sky=blue&<sky>&<query..>", format = "json", data = "<simple>", rank = 138)] fn post2( sky: usize, name: &RawStr, a: String, query: Form<Inner<'_>>, path: PathBuf, simple: Simple, ) -> String { let string = format!("{}, {}, {}, {}, {}, {}", sky, name, a, query.field, path.normalized_str(), simple.0); let uri = uri!(post2: a, name.url_decode_lossy(), path, sky, query.into_inner()); format!("({}) ({})", string, uri.to_string()) } #[allow(dead_code)] #[post("/<_unused_param>?<_unused_query>", data="<_unused_data>")] fn test_unused_params(_unused_param: String, _unused_query: String, _unused_data: Data) { } #[test] fn test_full_route() { let rocket = rocket::ignite() .mount("/1", routes![post1]) .mount("/2", routes![post2]); let client = Client::tracked(rocket).unwrap(); let a = "A%20A"; let name = "Bob%20McDonald"; let path = "this/path/here"; let sky = 777; let query = "field=inside"; let simple = "data internals"; let path_part = format!("/{}/{}/name/{}", a, name, path); let query_part = format!("?sky={}&sky=blue&{}", sky, query); let uri = format!("{}{}", path_part, query_part); let expected_uri = format!("{}?sky=blue&sky={}&{}", path_part, sky, query); let response = client.post(&uri).body(simple).dispatch(); assert_eq!(response.status(), Status::NotFound); let response = client.post(format!("/1{}", uri)).body(simple).dispatch(); assert_eq!(response.status(), Status::NotFound); let response = client .post(format!("/1{}", uri)) .header(ContentType::JSON) .body(simple) .dispatch(); assert_eq!(response.into_string().unwrap(), format!("({}, {}, {}, {}, {}, {}) ({})", sky, name, "A A", "inside", path, simple, expected_uri)); let response = client.post(format!("/2{}", uri)).body(simple).dispatch(); assert_eq!(response.status(), Status::NotFound); let response = client .post(format!("/2{}", uri)) .header(ContentType::JSON) .body(simple) .
mod scopes { mod other { #[get("/world")] pub fn world() -> &'static str { "Hello, world!" } } #[get("/hello")] pub fn hello() -> &'static str { "Hello, outside world!" } use other::world; fn _rocket() -> rocket::Rocket { rocket::ignite().mount("/", rocket::routes![hello, world]) } }
dispatch(); assert_eq!(response.into_string().unwrap(), format!("({}, {}, {}, {}, {}, {}) ({})", sky, name, "A A", "inside", path, simple, expected_uri)); }
function_block-function_prefix_line
[ { "content": "fn test(uri: String, expected: String) {\n\n let client = Client::tracked(super::rocket()).unwrap();\n\n let response = client.get(&uri).dispatch();\n\n assert_eq!(response.into_string(), Some(expected));\n\n}\n\n\n", "file_path": "examples/ranking/src/tests.rs", "rank": 0, "score": 402205.57584166434 }, { "content": "#[post(\"/f\", data = \"<form>\")]\n\nfn form(form: Form<Inner<'_>>) -> String { form.field.url_decode_lossy() }\n\n\n", "file_path": "core/codegen/tests/route-data.rs", "rank": 1, "score": 382022.7474168988 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn simple(id: i32, name: String) -> &'static str { \"\" }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uris-invalid-syntax.rs", "rank": 2, "score": 377757.6567336082 }, { "content": "#[post(\"/a/<id>/then/<path..>\")]\n\nfn param_and_segments(path: PathBuf, id: usize) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 3, "score": 377588.27881076885 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn simple2(id: i32, name: String) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 4, "score": 375040.41773590806 }, { "content": "#[post(\"/?<id>&<name>\")]\n\nfn simple4(id: i32, name: String) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 5, "score": 375040.41773590806 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn simple(id: i32, name: String) -> &'static str { \"\" }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uris-invalid-syntax.rs", "rank": 7, "score": 372764.8528848766 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn simple(id: i32, name: String) -> &'static str { \"\" }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uris-invalid-syntax.rs", "rank": 8, "score": 372764.85288487666 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn simple2_flipped(name: String, id: i32) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 9, "score": 369733.5648852195 }, { "content": "#[post(\"/?<id>&<name>\")]\n\nfn simple4_flipped(name: String, id: i32) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 10, "score": 369733.56488521944 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n\nuse rocket::request::{Query, FromQuery};\n\n\n\nimpl<'q> FromQuery<'q> for S {\n\n type Error = ();\n\n fn from_query(query: Query<'q>) -> Result<Self, Self::Error> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 11, "score": 364069.16980927635 }, { "content": "#[post(\"/?<id>&<name>\")]\n\nfn optionals_q(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 12, "score": 364069.16980927635 }, { "content": "#[get(\"/<name>/<age>\", format = \"json\")]\n\nfn get_hello(name: String, age: u8) -> Json<String> {\n\n // NOTE: In a real application, we'd use `rocket_contrib::json::Json`.\n\n let person = Person { name, age };\n\n Json(serde_json::to_string(&person).unwrap())\n\n}\n\n\n\n// In a `POST` request and all other payload supporting request types, the\n\n// content type is matched against the `format` in the route attribute.\n\n//\n\n// Note that `content::Json` simply sets the content-type to `application/json`.\n\n// In a real application, we wouldn't use `serde_json` directly; instead, we'd\n\n// use `contrib::Json` to automatically serialize a type into JSON.\n\n#[post(\"/<age>\", format = \"plain\", data = \"<name_data>\")]\n\nasync fn post_hello(age: u8, name_data: Data) -> Result<Json<String>, Debug<io::Error>> {\n\n let name = name_data.open(64.bytes()).stream_to_string().await?;\n\n let person = Person { name, age };\n\n // NOTE: In a real application, we'd use `rocket_contrib::json::Json`.\n\n Ok(Json(serde_json::to_string(&person).expect(\"valid JSON\")))\n\n}\n\n\n", "file_path": "examples/content_types/src/main.rs", "rank": 13, "score": 362289.8041777076 }, { "content": "#[post(\"/?<id>&<name>\")]\n\nfn optionals_q(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uri-bad-type.rs", "rank": 14, "score": 360161.6911325346 }, { "content": "#[post(\"/?<id>&<name>\")]\n\nfn optionals_q(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uri-bad-type.rs", "rank": 15, "score": 360161.6911325346 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n\nuse rocket::request::{Query, FromQuery};\n\n\n\nimpl<'q> FromQuery<'q> for S {\n\n type Error = ();\n\n fn from_query(query: Query<'q>) -> Result<Self, Self::Error> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uri-bad-type.rs", "rank": 16, "score": 360161.6911325346 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n\nuse rocket::request::{Query, FromQuery};\n\n\n\nimpl<'q> FromQuery<'q> for S {\n\n type Error = ();\n\n fn from_query(query: Query<'q>) -> Result<Self, Self::Error> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uri-bad-type.rs", "rank": 17, "score": 360161.6911325346 }, { "content": "#[post(\"/a/<path..>\")]\n\nfn segments(path: PathBuf) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 18, "score": 358474.92067239794 }, { "content": "#[post(\"/\", data = \"<form>\")]\n\nfn index(form: Form<Simple>) -> String {\n\n form.into_inner().value\n\n}\n\n\n\nmod limits_tests {\n\n use rocket;\n\n use rocket::local::blocking::Client;\n\n use rocket::http::{Status, ContentType};\n\n use rocket::data::Limits;\n\n\n\n fn rocket_with_forms_limit(limit: u64) -> rocket::Rocket {\n\n let limits = Limits::default().limit(\"forms\", limit.into());\n\n let config = rocket::Config::figment().merge((\"limits\", limits));\n\n rocket::custom(config).mount(\"/\", routes![super::index])\n\n }\n\n\n\n #[test]\n\n fn large_enough() {\n\n let client = Client::tracked(rocket_with_forms_limit(128)).unwrap();\n\n let response = client.post(\"/\")\n", "file_path": "core/lib/tests/limits.rs", "rank": 19, "score": 357120.21018473257 }, { "content": "#[get(\"/hello/<name>\")]\n\nfn hello(name: String) -> String {\n\n format!(\"Hello, {}!\", name)\n\n}\n\n\n", "file_path": "core/lib/tests/uri-percent-encoding-issue-808.rs", "rank": 20, "score": 354227.75872725114 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uris-bad-params.rs", "rank": 21, "score": 350939.81739932764 }, { "content": "fn test(uri: String, expected: String) {\n\n let client = Client::tracked(super::rocket()).unwrap();\n\n assert_eq!(client.get(&uri).dispatch().into_string(), Some(expected));\n\n}\n\n\n", "file_path": "examples/hello_person/src/tests.rs", "rank": 22, "score": 350517.4950980885 }, { "content": "#[post(\"/a/<id>/then/<path..>\")]\n\nfn guarded_segments(cookies: &CookieJar<'_>, path: PathBuf, id: usize) { }\n\n\n\nmacro_rules! assert_uri_eq {\n\n ($($uri:expr => $expected:expr,)+) => {\n\n $(assert_eq!($uri, rocket::http::uri::Origin::parse($expected).expect(\"valid origin URI\"));)+\n\n };\n\n}\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 23, "score": 348440.289425233 }, { "content": "#[post(\"/\", data = \"<form_data>\")]\n\nfn bug(form_data: Form<FormData>) -> String {\n\n form_data.into_inner().form_data\n\n}\n\n\n\nmod tests {\n\n use super::*;\n\n use rocket::local::blocking::Client;\n\n use rocket::http::ContentType;\n\n use rocket::http::Status;\n\n\n\n fn check_decoding(raw: &str, decoded: &str) {\n\n let client = Client::tracked(rocket::ignite().mount(\"/\", routes![bug])).unwrap();\n\n let response = client.post(\"/\")\n\n .header(ContentType::Form)\n\n .body(format!(\"form_data={}\", raw))\n\n .dispatch();\n\n\n\n assert_eq!(response.status(), Status::Ok);\n\n assert_eq!(Some(decoded.to_string()), response.into_string());\n\n }\n", "file_path": "core/lib/tests/form_value_decoding-issue-82.rs", "rank": 24, "score": 347028.9083644326 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uris-bad-params.rs", "rank": 25, "score": 346952.63291442074 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &RawStr>) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uris-bad-params.rs", "rank": 26, "score": 346952.63291442074 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn guard_2(name: String, cookies: &CookieJar<'_>, id: i32) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 27, "score": 344455.49298964744 }, { "content": "#[post(\"/a/<id>/hi/<name>/hey\")]\n\nfn guard_3(id: i32, name: String, cookies: &CookieJar<'_>) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 28, "score": 344455.39267533587 }, { "content": "#[post(\"/strict\", data = \"<form>\")]\n\nfn strict<'r>(form: Form<MyForm<'r>>) -> String {\n\n form.field.as_str().into()\n\n}\n\n\n", "file_path": "core/lib/tests/strict_and_lenient_forms.rs", "rank": 29, "score": 342630.74738011183 }, { "content": "fn test(uri: &str, content_type: ContentType, status: Status, body: String) {\n\n let client = Client::tracked(rocket()).unwrap();\n\n let response = client.get(uri).header(content_type).dispatch();\n\n assert_eq!(response.status(), status);\n\n assert_eq!(response.into_string(), Some(body));\n\n}\n\n\n", "file_path": "examples/manual_routes/src/tests.rs", "rank": 30, "score": 336780.8536286335 }, { "content": "#[get(\"/hello/<name>/<age>\", rank = 2)]\n\nfn hi(name: String, age: &RawStr) -> String {\n\n format!(\"Hi {}! Your age ({}) is kind of funky.\", name, age)\n\n}\n\n\n", "file_path": "examples/ranking/src/main.rs", "rank": 31, "score": 331226.55068574194 }, { "content": "#[derive(Debug, PartialEq, FromForm)]\n\nstruct FieldNamedV<'r> {\n\n v: &'r RawStr,\n\n}\n\n\n", "file_path": "core/codegen/tests/from_form.rs", "rank": 32, "score": 330714.81218104664 }, { "content": "struct Simple(String);\n\n\n\n#[async_trait]\n\nimpl FromData for Simple {\n\n type Error = ();\n\n\n\n async fn from_data(_: &Request<'_>, data: Data) -> data::Outcome<Self, ()> {\n\n match data.open(64.bytes()).stream_to_string().await {\n\n Ok(string) => data::Outcome::Success(Simple(string)),\n\n Err(_) => data::Outcome::Failure((Status::InternalServerError, ())),\n\n }\n\n }\n\n}\n\n\n", "file_path": "core/codegen/tests/route-data.rs", "rank": 33, "score": 330703.3781990044 }, { "content": "#[get(\"/test/<path..>\")]\n\nfn test(path: Segments<'_>) -> String {\n\n path.collect::<Vec<_>>().join(\"/\")\n\n}\n\n\n", "file_path": "core/lib/tests/segments-issues-41-86.rs", "rank": 34, "score": 330618.27299046796 }, { "content": "#[test]\n\nfn check_simple_named() {\n\n assert_uri_eq! {\n\n uri!(simple: id = 100) => \"/100\",\n\n uri!(simple: id = -23) => \"/-23\",\n\n uri!(unused_param: used = 1, _unused = 2) => \"/1/2\",\n\n }\n\n\n\n assert_uri_eq! {\n\n uri!(simple2: id = 100, name = \"hello\".to_string()) => \"/100/hello\",\n\n uri!(simple2: name = \"hi\".to_string(), id = 123) => \"/123/hi\",\n\n uri!(simple2_flipped: id = 1349, name = \"hey\".to_string()) => \"/1349/hey\",\n\n uri!(simple2_flipped: name = \"hello\".to_string(), id = 100) => \"/100/hello\",\n\n }\n\n\n\n // Ensure that `.from_uri_param()` is called.\n\n assert_uri_eq! {\n\n uri!(simple2: id = 100, name = \"hello\") => \"/100/hello\",\n\n uri!(simple2: id = 100, name = \"hi\") => \"/100/hi\",\n\n uri!(simple2: id = 1349, name = \"hey\") => \"/1349/hey\",\n\n uri!(simple2: name = \"hello\", id = 100) => \"/100/hello\",\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 35, "score": 328399.7520139258 }, { "content": "#[post(\"/lenient\", data = \"<form>\")]\n\nfn lenient<'r>(form: LenientForm<MyForm<'r>>) -> String {\n\n form.field.as_str().into()\n\n}\n\n\n\nmod strict_and_lenient_forms_tests {\n\n use super::*;\n\n use rocket::local::blocking::Client;\n\n use rocket::http::{Status, ContentType};\n\n\n\n const FIELD_VALUE: &str = \"just_some_value\";\n\n\n\n fn client() -> Client {\n\n Client::tracked(rocket::ignite().mount(\"/\", routes![strict, lenient])).unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_strict_form() {\n\n let client = client();\n\n let response = client.post(\"/strict\")\n\n .header(ContentType::Form)\n", "file_path": "core/lib/tests/strict_and_lenient_forms.rs", "rank": 36, "score": 327465.87116758333 }, { "content": "#[post(\"/<id>?<name>\")]\n\nfn has_two(cookies: &CookieJar<'_>, id: i32, name: String) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uris-bad-params.rs", "rank": 37, "score": 326450.5893785786 }, { "content": "#[post(\"/<id>?<name>\")]\n\nfn has_two(cookies: &CookieJar<'_>, id: i32, name: String) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uris-bad-params.rs", "rank": 38, "score": 322327.23220823985 }, { "content": "#[post(\"/<id>?<name>\")]\n\nfn has_two(cookies: &CookieJar<'_>, id: i32, name: String) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uris-bad-params.rs", "rank": 39, "score": 322327.23220823985 }, { "content": "#[post(\"/<id>\", data = \"<form>\")]\n\nfn no_uri_display_okay(id: i32, form: Form<Second>) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 40, "score": 314436.75060420384 }, { "content": "#[post(\"/<id>\")]\n\nfn simple(id: usize) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 41, "score": 313662.74732739193 }, { "content": "#[get(\"/<path..>\")]\n\nfn files(route: &Route, path: PathBuf) -> String {\n\n Path::new(route.base()).join(path).normalized_str().to_string()\n\n}\n\n\n\nmod route_guard_tests {\n\n use super::*;\n\n use rocket::local::blocking::Client;\n\n\n\n fn assert_path(client: &Client, path: &str) {\n\n let res = client.get(path).dispatch();\n\n assert_eq!(res.into_string(), Some(path.into()));\n\n }\n\n\n\n #[test]\n\n fn check_mount_path() {\n\n let rocket = rocket::ignite()\n\n .mount(\"/first\", routes![files])\n\n .mount(\"/second\", routes![files]);\n\n\n\n let client = Client::tracked(rocket).unwrap();\n\n assert_path(&client, \"/first/some/path\");\n\n assert_path(&client, \"/second/some/path\");\n\n assert_path(&client, \"/first/second/b/c\");\n\n assert_path(&client, \"/second/a/b/c\");\n\n }\n\n}\n", "file_path": "core/lib/tests/route_guard.rs", "rank": 42, "score": 313492.3143950791 }, { "content": "#[post(\"/s\", data = \"<simple>\")]\n\nfn simple(simple: Simple) -> String { simple.0 }\n\n\n", "file_path": "core/codegen/tests/route-data.rs", "rank": 43, "score": 312442.8914960411 }, { "content": "#[doc(hidden)]\n\npub fn async_test<R>(fut: impl std::future::Future<Output = R> + Send) -> R {\n\n tokio::runtime::Builder::new()\n\n .basic_scheduler()\n\n .enable_all()\n\n .build()\n\n .expect(\"create tokio runtime\")\n\n .block_on(fut)\n\n}\n\n\n\n/// WARNING: This is unstable! Do not use this method outside of Rocket!\n", "file_path": "core/lib/src/lib.rs", "rank": 44, "score": 310928.9712085322 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Foo7(String, usize);\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display.rs", "rank": 45, "score": 309576.6605640046 }, { "content": "#[post(\"/<id>\")]\n\nfn simple(id: usize) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uri-bad-type.rs", "rank": 46, "score": 308889.0529851581 }, { "content": "#[post(\"/<id>\")]\n\nfn simple(id: usize) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uri-bad-type.rs", "rank": 47, "score": 308889.0529851581 }, { "content": "fn test<H>(method: Method, uri: &str, header: H, status: Status, body: String)\n\n where H: Into<Header<'static>>\n\n{\n\n let client = Client::tracked(super::rocket()).unwrap();\n\n let response = client.req(method, uri).header(header).dispatch();\n\n assert_eq!(response.status(), status);\n\n assert_eq!(response.into_string(), Some(body));\n\n}\n\n\n", "file_path": "examples/content_types/src/tests.rs", "rank": 48, "score": 306767.30663456407 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Foo7(String, usize);\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display.rs", "rank": 49, "score": 305136.09896845266 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Foo7(String, usize);\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display.rs", "rank": 50, "score": 305136.09896845266 }, { "content": "fn uri_origin<'a>(path: &'a str, query: Option<&'a str>) -> Uri<'a> {\n\n Uri::Origin(Origin::new(path, query))\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/tests.rs", "rank": 51, "score": 304138.8032930836 }, { "content": "#[get(\"/hello/<name>/<age>\")]\n\nfn hello(name: String, age: i8) -> String {\n\n format!(\"Hello, {} year old named {}!\", age, name)\n\n}\n\n\n", "file_path": "examples/ranking/src/main.rs", "rank": 52, "score": 302616.9706202975 }, { "content": "#[get(\"/hello/<name>/<age>\")]\n\nfn hello(name: String, age: i8) -> String {\n\n format!(\"Hello, {} year old named {}!\", age, name)\n\n}\n\n\n", "file_path": "examples/errors/src/main.rs", "rank": 53, "score": 302393.47117883887 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn not_uri_display(id: i32, name: S) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 54, "score": 299127.41148637666 }, { "content": "fn download_paste(client: &Client, id: &str) -> String {\n\n let response = client.get(format!(\"/{}\", id)).dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n response.into_string().unwrap()\n\n}\n\n\n", "file_path": "examples/pastebin/src/tests.rs", "rank": 55, "score": 297056.3701794895 }, { "content": "fn upload_paste(client: &Client, body: &str) -> String {\n\n let response = client.post(\"/\").body(body).dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n assert_eq!(response.content_type(), Some(ContentType::Plain));\n\n extract_id(&response.into_string().unwrap()).unwrap()\n\n}\n\n\n", "file_path": "examples/pastebin/src/tests.rs", "rank": 56, "score": 297056.3701794895 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn not_uri_display(id: i32, name: S) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uri-bad-type.rs", "rank": 57, "score": 295681.0825594674 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn not_uri_display(id: i32, name: S) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uri-bad-type.rs", "rank": 58, "score": 295681.0825594674 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn not_uri_display_but_unused(id: i32, name: S) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 59, "score": 295681.0825594674 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn not_uri_display_but_unused(id: i32, name: S) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uri-bad-type.rs", "rank": 60, "score": 292337.74610172183 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn not_uri_display_but_unused(id: i32, name: S) { }\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uri-bad-type.rs", "rank": 61, "score": 292337.74610172183 }, { "content": "#[get(\"/static/<user>/is/<path..>\")]\n\nfn dual(user: String, path: Segments<'_>) -> String {\n\n user + \"/is/\" + &path.collect::<Vec<_>>().join(\"/\")\n\n}\n\n\n\nmod tests {\n\n use super::*;\n\n use rocket::local::blocking::Client;\n\n\n\n #[test]\n\n fn segments_works() {\n\n let rocket = rocket::ignite()\n\n .mount(\"/\", routes![test, two, one_two, none, dual])\n\n .mount(\"/point\", routes![test, two, one_two, dual]);\n\n let client = Client::tracked(rocket).unwrap();\n\n\n\n // We construct a path that matches each of the routes above. We ensure the\n\n // prefix is stripped, confirming that dynamic segments are working.\n\n for prefix in &[\"\", \"/test\", \"/two\", \"/one/two\",\n\n \"/point/test\", \"/point/two\", \"/point/one/two\",\n\n \"/static\", \"/point/static\"]\n\n {\n\n let path = \"this/is/the/path/we/want\";\n\n let response = client.get(format!(\"{}/{}\", prefix, path)).dispatch();\n\n assert_eq!(response.into_string(), Some(path.into()));\n\n }\n\n }\n\n}\n", "file_path": "core/lib/tests/segments-issues-41-86.rs", "rank": 62, "score": 291701.5728186554 }, { "content": "pub fn rocket() -> rocket::Rocket {\n\n rocket::ignite()\n\n .mount(\"/\", routes![rendezvous])\n\n .attach(AdHoc::on_attach(\"Add Channel\", |rocket| async {\n\n Ok(rocket.manage(Barrier::new(2)))\n\n }))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::rocket;\n\n use rocket::http::Status;\n\n\n\n #[rocket::async_test]\n\n async fn test_rendezvous() {\n\n use rocket::local::asynchronous::Client;\n\n\n\n let client = Client::tracked(rocket()).await.unwrap();\n\n let req = client.get(\"/barrier\");\n\n\n\n let (r1, r2) = rocket::tokio::join!(req.clone().dispatch(), req.dispatch());\n\n assert_eq!(r1.status(), r2.status());\n\n assert_eq!(r1.status(), Status::Ok);\n\n\n\n let (s1, s2) = (r1.into_string().await, r2.into_string().await);\n\n assert_eq!(s1, s2);\n\n assert_eq!(s1.unwrap(), \"Rendezvous reached.\");\n\n }\n\n}\n", "file_path": "examples/testing/src/async_required.rs", "rank": 63, "score": 290177.7994739399 }, { "content": "fn rocket() -> rocket::Rocket {\n\n rocket::ignite().mount(\"/\", routes![hello, uri_redirect, raw_redirect])\n\n}\n\n\n\n\n\nmod tests {\n\n use super::*;\n\n use rocket::local::blocking::Client;\n\n use rocket::http::{Status, uri::Uri};\n\n\n\n #[test]\n\n fn uri_percent_encoding_redirect() {\n\n let expected_location = vec![\"/hello/John%5B%5D%7C%5C%25@%5E\"];\n\n let client = Client::tracked(rocket()).unwrap();\n\n\n\n let response = client.get(\"/raw\").dispatch();\n\n let location: Vec<_> = response.headers().get(\"location\").collect();\n\n assert_eq!(response.status(), Status::SeeOther);\n\n assert_eq!(&location, &expected_location);\n\n\n", "file_path": "core/lib/tests/uri-percent-encoding-issue-808.rs", "rank": 64, "score": 290057.15556611575 }, { "content": "#[derive(FromForm)]\n\nstruct Other {\n\n field: Foo<usize>,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/from_form_type_errors.rs", "rank": 65, "score": 286414.21904432273 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Foo5(String, String);\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display.rs", "rank": 66, "score": 285590.1181045307 }, { "content": "#[get(\"/<path..>\", rank = 2)]\n\nfn none(path: Segments<'_>) -> String {\n\n path.collect::<Vec<_>>().join(\"/\")\n\n}\n\n\n", "file_path": "core/lib/tests/segments-issues-41-86.rs", "rank": 67, "score": 285199.24262353126 }, { "content": "#[get(\"/two/<path..>\")]\n\nfn two(path: Segments<'_>) -> String {\n\n path.collect::<Vec<_>>().join(\"/\")\n\n}\n\n\n", "file_path": "core/lib/tests/segments-issues-41-86.rs", "rank": 68, "score": 285193.29151507525 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 70, "score": 284513.2742262084 }, { "content": "#[get(\"/\", data = \"<_foo>\")]\n\nfn g0(_foo: rocket::Data) {}\n\n\n", "file_path": "core/codegen/tests/ui-fail/route-warnings.rs", "rank": 71, "score": 284381.4018810254 }, { "content": "#[head(\"/\", data = \"<_foo>\")]\n\nfn g1(_foo: rocket::Data) {}\n\n\n", "file_path": "core/codegen/tests/ui-fail/route-warnings.rs", "rank": 72, "score": 284381.4018810254 }, { "content": "#[derive(FromForm, UriDisplayQuery)]\n\nstruct Third<'r> {\n\n one: String,\n\n two: &'r RawStr,\n\n}\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 73, "score": 283451.97398096445 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Foo5(String, String);\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display.rs", "rank": 74, "score": 282235.46199509647 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Foo5(String, String);\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display.rs", "rank": 75, "score": 282235.46199509647 }, { "content": "#[derive(FromForm)]\n\nstruct Other {\n\n field: Foo<usize>,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/from_form_type_errors.rs", "rank": 76, "score": 281619.108285359 }, { "content": "#[derive(FromForm)]\n\nstruct Other {\n\n field: Foo<usize>,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/from_form_type_errors.rs", "rank": 77, "score": 281619.108285359 }, { "content": "struct Unknown;\n\n\n", "file_path": "core/codegen/tests/ui-fail/from_form_type_errors.rs", "rank": 78, "score": 281613.18260536424 }, { "content": "#[get(\"/one/two/<path..>\")]\n\nfn one_two(path: Segments<'_>) -> String {\n\n path.collect::<Vec<_>>().join(\"/\")\n\n}\n\n\n", "file_path": "core/lib/tests/segments-issues-41-86.rs", "rank": 79, "score": 281527.8005225548 }, { "content": "#[head(\"/\", data = \"<_foo>\")]\n\nfn g1(_foo: rocket::Data) {}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/route-warnings.rs", "rank": 80, "score": 280992.0321207817 }, { "content": "#[get(\"/\", data = \"<_foo>\")]\n\nfn g0(_foo: rocket::Data) {}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/route-warnings.rs", "rank": 81, "score": 280992.0321207817 }, { "content": "#[head(\"/\", data = \"<_foo>\")]\n\nfn g1(_foo: rocket::Data) {}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/route-warnings.rs", "rank": 82, "score": 280992.0321207817 }, { "content": "#[get(\"/\", data = \"<_foo>\")]\n\nfn g0(_foo: rocket::Data) {}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/route-warnings.rs", "rank": 83, "score": 280992.0321207817 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 84, "score": 280415.95139582123 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 85, "score": 280415.95139582123 }, { "content": "#[post(\"/\", format = \"json\")]\n\nfn json() -> &'static str { \"json\" }\n\n\n", "file_path": "core/codegen/tests/route-format.rs", "rank": 86, "score": 279796.1762320051 }, { "content": "fn get_count(client: &Client) -> usize {\n\n let response = client.get(\"/count\").dispatch();\n\n response.into_string().and_then(|s| s.parse().ok()).unwrap()\n\n}\n\n\n", "file_path": "examples/state/src/tests.rs", "rank": 87, "score": 279780.40144480544 }, { "content": "fn lenient<'f, T>(string: &'f str) -> Result<T, FormParseError<'f>>\n\n where T: FromForm<'f, Error = FormParseError<'f>>\n\n{\n\n parse(string, false)\n\n}\n\n\n", "file_path": "core/codegen/tests/from_form.rs", "rank": 88, "score": 279373.0463454934 }, { "content": "fn strict<'f, T>(string: &'f str) -> Result<T, FormParseError<'f>>\n\n where T: FromForm<'f, Error = FormParseError<'f>>\n\n{\n\n parse(string, true)\n\n}\n\n\n", "file_path": "core/codegen/tests/from_form.rs", "rank": 89, "score": 279373.0463454934 }, { "content": "#[test]\n\nfn check_simple_unnamed() {\n\n assert_uri_eq! {\n\n uri!(simple: 100) => \"/100\",\n\n uri!(simple: -23) => \"/-23\",\n\n uri!(unused_param: 1, 2) => \"/1/2\",\n\n }\n\n\n\n // The \"flipped\" test ensures that the order of parameters depends on the\n\n // route's URI, not on the order in the function signature.\n\n assert_uri_eq! {\n\n uri!(simple2: 100, \"hello\".to_string()) => \"/100/hello\",\n\n uri!(simple2: 1349, \"hey\".to_string()) => \"/1349/hey\",\n\n uri!(simple2_flipped: 100, \"hello\".to_string()) => \"/100/hello\",\n\n }\n\n\n\n // Ensure that `.from_uri_param()` is called.\n\n assert_uri_eq! {\n\n uri!(simple2: 100, \"hello\") => \"/100/hello\",\n\n uri!(simple2_flipped: 1349, \"hey\") => \"/1349/hey\",\n\n }\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 90, "score": 279051.5029878067 }, { "content": "/// Splits a path into a name that may be used to identify the template, and the\n\n/// template's data type, if any.\n\nfn split_path(root: &Path, path: &Path) -> (String, Option<String>) {\n\n let rel_path = path.strip_prefix(root).unwrap().to_path_buf();\n\n let path_no_ext = remove_extension(&rel_path);\n\n let data_type = path_no_ext.extension();\n\n let mut name = remove_extension(&path_no_ext).to_string_lossy().into_owned();\n\n\n\n // Ensure template name consistency on Windows systems\n\n if cfg!(windows) {\n\n name = name.replace(\"\\\\\", \"/\");\n\n }\n\n\n\n (name, data_type.map(|d| d.to_string_lossy().into_owned()))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn template_path_index_html() {\n", "file_path": "contrib/lib/src/templates/context.rs", "rank": 91, "score": 277587.1683045855 }, { "content": "fn validate_struct(_: &DeriveGenerator, data: Struct<'_>) -> Result<()> {\n\n if data.fields().is_empty() {\n\n return Err(data.fields.span().error(\"at least one field is required\"));\n\n }\n\n\n\n let mut names = ::std::collections::HashMap::new();\n\n for field in data.fields().iter() {\n\n let id = field.ident.as_ref().expect(\"named field\");\n\n let field = match Form::from_attrs(\"form\", &field.attrs) {\n\n Some(result) => result?.field,\n\n None => FormField { span: Spanned::span(&id), name: id.to_string() }\n\n };\n\n\n\n if let Some(span) = names.get(&field.name) {\n\n return Err(field.span.error(\"duplicate field name\")\n\n .span_note(*span, \"previous definition here\"));\n\n }\n\n\n\n names.insert(field.name, field.span);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "core/codegen/src/derive/from_form.rs", "rank": 92, "score": 277294.6306640092 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar1(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 93, "score": 277021.6482715862 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Baz(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 94, "score": 277021.62095421896 }, { "content": "#[derive(FromForm)]\n\nstruct BadType3 {\n\n field: Unknown,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/from_form_type_errors.rs", "rank": 95, "score": 277009.99616280716 }, { "content": "struct Unknown;\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/from_form_type_errors.rs", "rank": 96, "score": 277004.0704828125 }, { "content": "struct Unknown;\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/from_form_type_errors.rs", "rank": 97, "score": 277004.0704828124 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar3 {\n\n field: String,\n\n bad: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 98, "score": 276696.6253476595 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar2 {\n\n field: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 99, "score": 276696.6253476595 } ]
Rust
services/src/util/parsing.rs
2younis/geoengine
253eb7ff2c980511fecdec6ff8fb176d5365bc11
use geoengine_datatypes::primitives::SpatialResolution; use serde::de; use serde::de::Error; use serde::Deserialize; use std::fmt; use std::marker::PhantomData; use std::str::FromStr; use url::Url; pub fn parse_spatial_resolution<'de, D>(deserializer: D) -> Result<SpatialResolution, D::Error> where D: serde::Deserializer<'de>, { let s = String::deserialize(deserializer)?; let split: Result<Vec<f64>, <f64 as FromStr>::Err> = s.split(',').map(f64::from_str).collect(); match split.as_ref().map(Vec::as_slice) { Ok(&[x, y]) => SpatialResolution::new(x, y).map_err(D::Error::custom), Err(error) => Err(D::Error::custom(error)), Ok(..) => Err(D::Error::custom("Invalid spatial resolution")), } } pub fn string_or_string_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error> where D: serde::Deserializer<'de>, { struct StringOrVec(PhantomData<Vec<String>>); impl<'de> de::Visitor<'de> for StringOrVec { type Value = Vec<String>; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string or array of strings") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { Ok(vec![value.to_owned()]) } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(StringOrVec(PhantomData)) } pub fn deserialize_base_url<'de, D>(deserializer: D) -> Result<Url, D::Error> where D: serde::Deserializer<'de>, { let mut url_string = String::deserialize(deserializer)?; if !url_string.ends_with('/') { url_string.push('/'); } Url::parse(&url_string).map_err(D::Error::custom) } pub fn deserialize_base_url_option<'de, D>(deserializer: D) -> Result<Option<Url>, D::Error> where D: serde::Deserializer<'de>, { let mut url_string = if let Some(url_string) = Option::<String>::deserialize(deserializer)? { url_string } else { return Ok(None); }; if !url_string.ends_with('/') { url_string.push('/'); } Url::parse(&url_string) .map(Option::Some) .map_err(D::Error::custom) } #[cfg(test)] mod tests { use std::fmt::Display; use super::*; #[test] fn test_deserialize_base_url() { #[derive(Deserialize)] struct Test { #[serde(deserialize_with = "deserialize_base_url")] base_url: Url, } impl Display for Test { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.base_url.to_string()) } } assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de/"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert!(serde_json::from_str::<Test>(r#"{"base_url": "foo"}"#).is_err()); } #[test] fn test_deserialize_base_url_option() { #[derive(Deserialize)] struct Test { #[serde(deserialize_with = "deserialize_base_url_option")] base_url: Option<Url>, } impl Display for Test { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.base_url { Some(base_url) => f.write_str(&base_url.to_string()), None => f.write_str(""), } } } assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de/"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert!(serde_json::from_str::<Test>(r#"{"base_url": "foo"}"#).is_err()); assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": null}"#) .unwrap() .to_string(), "" ); } }
use geoengine_datatypes::primitives::SpatialResolution; use serde::de; use serde::de::Error; use serde::Deserialize; use std::fmt; use std::marker::PhantomData; use std::str::FromStr; use url::Url; pub fn parse_spatial_resolution<'de, D>(deserializer: D) -> Result<SpatialResolution, D::Error> where D: serde::Deserializer<'de>, { let s = String::deserialize(deserializer)?; let split: Result<Vec<f64>, <f64 as FromStr>::Err> = s.split(',').map(f64::from_str).collect(); match split.as_ref().map(Vec::as_slice) { Ok(&[x, y]) => SpatialResolution::new(x, y).map_err(D::Error::custom), Err(error) => Err(D::Error::custom(error)), Ok(..) => Err(D::Error::custom("Invalid spatial resolution")), } } pub fn string_or_string_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error> where D: serde::Deserializer<'de>, { struct StringOrVec(PhantomData<Vec<String>>); impl<'de> de::Visitor<'de> for StringOrVec { type Value = Vec<String>; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string or array of strings") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { Ok(vec![value.to_owned()]) } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(StringOrVec(PhantomData)) } pub fn deserialize_base_url<'de, D>(deserializer: D) -> Result<Url, D::Error> where D: serde::Deserializer<'de>, { let mut url_string = String::deserialize(deserializer)?; if !url_string.ends_with('/') { url_string.push('/'); } Url::parse(&url_string).map_err(D::Error::custom) } pub fn deserialize_base_url_option<'de, D>(deserializer: D) -> Result<Option<Url>, D::Error> where D: serde::Deserializer<'de>, { let mut url_string = if let Some(url_string) = Option::<String>::deserialize(deserializer)? { url_string } else { return Ok(None); }; if !url_string.ends_with('/') { url_string.push('/'); } Url::parse(&url_string) .map(Option::Some) .map_err(D::Error::custom) } #[cfg(test)] mod tests { use std::fmt::Display; use super::*; #[test] fn test_deserialize_base_url() { #[derive(Deserialize)]
ert_eq!( serde_json::from_str::<Test>(r#"{"base_url": null}"#) .unwrap() .to_string(), "" ); } }
struct Test { #[serde(deserialize_with = "deserialize_base_url")] base_url: Url, } impl Display for Test { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.base_url.to_string()) } } assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de/"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert!(serde_json::from_str::<Test>(r#"{"base_url": "foo"}"#).is_err()); } #[test] fn test_deserialize_base_url_option() { #[derive(Deserialize)] struct Test { #[serde(deserialize_with = "deserialize_base_url_option")] base_url: Option<Url>, } impl Display for Test { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.base_url { Some(base_url) => f.write_str(&base_url.to_string()), None => f.write_str(""), } } } assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de/"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert_eq!( serde_json::from_str::<Test>(r#"{"base_url": "https://www.endpoint.de"}"#) .unwrap() .to_string(), "https://www.endpoint.de/" ); assert!(serde_json::from_str::<Test>(r#"{"base_url": "foo"}"#).is_err()); ass
random
[ { "content": "pub fn spatial_reference_specification(srs_string: &str) -> Result<SpatialReferenceSpecification> {\n\n if let Some(sref) = custom_spatial_reference_specification(srs_string) {\n\n return Ok(sref);\n\n }\n\n\n\n let spatial_reference = SpatialReference::from_str(srs_string).context(error::DataType)?;\n\n let json = proj_json(srs_string).ok_or_else(|| Error::UnknownSrsString {\n\n srs_string: srs_string.to_owned(),\n\n })?;\n\n let proj_string = proj_proj_string(srs_string).ok_or_else(|| Error::UnknownSrsString {\n\n srs_string: srs_string.to_owned(),\n\n })?;\n\n\n\n let extent = spatial_reference\n\n .area_of_use_projected()\n\n .context(error::DataType)?;\n\n\n\n let axis_labels = json.coordinate_system.axis.as_ref().map(|axes| {\n\n let a0 = axes.get(0).map_or(\"\".to_owned(), |a| a.name.clone());\n\n let a1 = axes.get(1).map_or(\"\".to_owned(), |a| a.name.clone());\n", "file_path": "services/src/handlers/spatial_references.rs", "rank": 0, "score": 280971.9499672438 }, { "content": "/// Helper function to downcast a mutable arrow array from a builder\n\n///\n\n/// The caller must be sure of its type, otherwise it panics\n\n/// # Panics\n\n/// Panics if `array` is not of type `T`\n\n///\n\npub fn downcast_mut_array<T: Any>(array: &mut dyn ArrayBuilder) -> &mut T {\n\n array.as_any_mut().downcast_mut().unwrap() // must obey type\n\n}\n\n\n", "file_path": "datatypes/src/util/arrow.rs", "rank": 1, "score": 279552.5438154136 }, { "content": "/// snap `value` to next `step` multiple from `start`\n\npub fn snap_next(start: f64, step: f64, value: f64) -> f64 {\n\n start + ((value - start) / step).ceil() * step\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashMap;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn assert_approx_eq_for_floats() {\n\n assert_approx_eq!(&[1., 2., 3.], &[1., 2., 3.]);\n\n\n\n assert!(!approx_eq_floats(&[1., 2.], &[1., 2., 3.]));\n\n }\n\n\n\n #[test]\n\n #[allow(clippy::float_cmp)]\n\n fn it_snaps_right() {\n", "file_path": "datatypes/src/util/helpers.rs", "rank": 2, "score": 263284.43844034814 }, { "content": "/// snap `value` to previous `step` multiple from `start`\n\npub fn snap_prev(start: f64, step: f64, value: f64) -> f64 {\n\n start + ((value - start) / step).floor() * step\n\n}\n\n\n", "file_path": "datatypes/src/util/helpers.rs", "rank": 3, "score": 263284.43844034814 }, { "content": "/// Get the proj json information for the given `srs_string` if it is known.\n\n// TODO: expose method in proj crate instead\n\nfn proj_proj_string(srs_string: &str) -> Option<String> {\n\n unsafe {\n\n let c_definition = std::ffi::CString::new(srs_string).ok()?;\n\n\n\n let ctx = proj_sys::proj_context_create();\n\n let c_proj = proj_sys::proj_create(ctx, c_definition.as_ptr());\n\n\n\n let string = if c_proj.is_null() {\n\n None\n\n } else {\n\n let c_buf = proj_sys::proj_as_proj_string(\n\n ctx,\n\n c_proj,\n\n PJ_PROJ_STRING_TYPE_PJ_PROJ_4,\n\n std::ptr::null(),\n\n );\n\n\n\n std::ffi::CStr::from_ptr(c_buf)\n\n .to_str()\n\n .map(ToOwned::to_owned)\n", "file_path": "services/src/handlers/spatial_references.rs", "rank": 4, "score": 261396.43660138818 }, { "content": "/// Create an `arrow` struct from column meta data and data\n\npub fn struct_array_from_data(\n\n columns: Vec<Field>,\n\n column_values: Vec<ArrayRef>,\n\n number_of_features: usize,\n\n) -> Result<StructArray> {\n\n Ok(StructArray::from(\n\n ArrayData::builder(arrow::datatypes::DataType::Struct(columns))\n\n .child_data(\n\n column_values\n\n .into_iter()\n\n .map(|a| a.data().clone())\n\n .collect(),\n\n )\n\n .len(number_of_features)\n\n .build()?,\n\n ))\n\n}\n\n\n", "file_path": "datatypes/src/collections/feature_collection.rs", "rank": 5, "score": 242963.6243075753 }, { "content": "#[cfg(test)]\n\npub fn set_config<T>(key: &str, value: T) -> Result<()>\n\nwhere\n\n T: Into<config::Value>,\n\n{\n\n SETTINGS\n\n .write()\n\n .map_err(|_error| error::Error::ConfigLockFailed)?\n\n .set(key, value)\n\n .context(error::Config)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "services/src/util/config.rs", "rank": 6, "score": 224066.77365715633 }, { "content": "/// Parse a spatial resolution, format is: \"resolution\" or \"xResolution,yResolution\"\n\npub fn parse_spatial_resolution_option<'de, D>(\n\n deserializer: D,\n\n) -> Result<Option<SpatialResolution>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if s.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let split: Vec<Result<f64, std::num::ParseFloatError>> = s.split(',').map(str::parse).collect();\n\n\n\n let spatial_resolution = match *split.as_slice() {\n\n [Ok(resolution)] => {\n\n SpatialResolution::new(resolution, resolution).map_err(D::Error::custom)?\n\n }\n\n [Ok(x_resolution), Ok(y_resolution)] => {\n\n SpatialResolution::new(x_resolution, y_resolution).map_err(D::Error::custom)?\n\n }\n\n _ => return Err(D::Error::custom(\"Invalid spatial resolution\")),\n\n };\n\n\n\n Ok(Some(spatial_resolution))\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 7, "score": 223585.26469580253 }, { "content": "/// Helper function to downcast an arrow array\n\n///\n\n/// The caller must be sure of its type, otherwise it panics\n\n///\n\n/// # Panics\n\n/// Panics if `array` is not of type `T`\n\n///\n\npub fn downcast_dyn_array<T: Any>(array: &dyn Array) -> &T {\n\n array.as_any().downcast_ref().unwrap() // must obey type\n\n}\n\n\n", "file_path": "datatypes/src/util/arrow.rs", "rank": 9, "score": 215178.40440105938 }, { "content": "/// Initialize a basic logger within tests.\n\n/// You should only use this for debugging.\n\n///\n\n/// # Panics\n\n/// This function will panic if the logger cannot be initialized.\n\n///\n\npub fn initialize_debugging_in_test() {\n\n Logger::try_with_str(\"debug\").unwrap().start().unwrap();\n\n}\n\n\n", "file_path": "services/src/util/tests.rs", "rank": 10, "score": 214952.2769016459 }, { "content": "/// Get the proj json information for the given `srs_string` if it is known.\n\n// TODO: expose method in proj crate instead\n\nfn proj_json(srs_string: &str) -> Option<ProjJson> {\n\n unsafe {\n\n let c_definition = std::ffi::CString::new(srs_string).ok()?;\n\n\n\n let ctx = proj_sys::proj_context_create();\n\n let c_proj = proj_sys::proj_create(ctx, c_definition.as_ptr());\n\n\n\n let string = if c_proj.is_null() {\n\n None\n\n } else {\n\n let c_buf = proj_sys::proj_as_projjson(ctx, c_proj, std::ptr::null());\n\n\n\n std::ffi::CStr::from_ptr(c_buf)\n\n .to_str()\n\n .map(ToOwned::to_owned)\n\n .ok()\n\n };\n\n\n\n proj_sys::proj_destroy(c_proj);\n\n proj_sys::proj_context_destroy(ctx);\n\n proj_sys::proj_cleanup();\n\n\n\n string.and_then(|s| serde_json::from_str(&s).ok())\n\n }\n\n}\n\n\n", "file_path": "services/src/handlers/spatial_references.rs", "rank": 11, "score": 213611.7332667539 }, { "content": "#[must_use]\n\npub fn approx_eq_floats(left: &[f64], right: &[f64]) -> bool {\n\n if left.len() != right.len() {\n\n return false;\n\n }\n\n\n\n for (&l, &r) in left.iter().zip(right) {\n\n if !float_cmp::approx_eq!(f64, l, r) {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Create hash maps by specifying key-value pairs\n\n#[macro_export]\n\nmacro_rules! hashmap {\n\n (@void $($x:tt)*) => (());\n\n (@count $($tts:expr),*) => (<[()]>::len(&[$(hashmap!(@void $tts)),*]));\n\n\n", "file_path": "datatypes/src/util/helpers.rs", "rank": 13, "score": 211946.36086674558 }, { "content": "/// Helper function to downcast an arrow array\n\n///\n\n/// The caller must be sure of its type, otherwise it panics\n\n///\n\n/// # Panics\n\n/// Panics if `array` is not of type `T`\n\n///\n\npub fn downcast_array<T: Any>(array: &ArrayRef) -> &T {\n\n array.as_any().downcast_ref().unwrap() // must obey type\n\n}\n\n\n", "file_path": "datatypes/src/util/arrow.rs", "rank": 14, "score": 211620.5374920807 }, { "content": "pub fn fn_stream() -> impl Stream<Item = usize> {\n\n let mut counter: usize = 2;\n\n\n\n stream::poll_fn(move |_| -> Poll<Option<usize>> {\n\n if counter == 0 {\n\n return Poll::Ready(None);\n\n }\n\n counter -= 1;\n\n Poll::Ready(Some(counter))\n\n })\n\n}\n\n\n", "file_path": "operators/tests/streams.rs", "rank": 15, "score": 190715.08479512174 }, { "content": "/// Helper struct for deserializing a `SpatialReferencce`\n\nstruct SpatialReferenceDeserializeVisitor;\n\n\n\nimpl<'de> Visitor<'de> for SpatialReferenceDeserializeVisitor {\n\n type Value = SpatialReference;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a spatial reference in the form authority:code\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n v.parse().map_err(serde::de::Error::custom)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for SpatialReference {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error>\n\n where\n", "file_path": "datatypes/src/spatial_reference.rs", "rank": 16, "score": 189749.58700090062 }, { "content": "/// Serde deserializer <https://docs.rs/serde_qs/0.6.0/serde_qs/index.html#flatten-workaround>\n\npub fn from_str<'de, D, S>(deserializer: D) -> Result<S, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n S: std::str::FromStr,\n\n{\n\n let s = <&str as serde::Deserialize>::deserialize(deserializer)?;\n\n S::from_str(s).map_err(|_error| D::Error::custom(\"could not parse string\"))\n\n}\n\n\n", "file_path": "services/src/util/mod.rs", "rank": 17, "score": 188517.05643530376 }, { "content": "#[test]\n\nfn strings() {\n\n use arrow::datatypes::ToByteSlice;\n\n\n\n let array = {\n\n let mut strings = String::new();\n\n let mut offsets: Vec<i32> = Vec::new();\n\n\n\n for string in &[\"hello\", \"from\", \"the\", \"other\", \"side\"] {\n\n offsets.push(strings.len() as i32);\n\n strings.push_str(string);\n\n }\n\n offsets.push(strings.len() as i32);\n\n\n\n let data = ArrayData::builder(DataType::Utf8)\n\n .len(offsets.len() - 1) // number of strings\n\n .add_buffer(Buffer::from(offsets.to_byte_slice()))\n\n .add_buffer(Buffer::from(strings.as_bytes()))\n\n .build()\n\n .unwrap();\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 18, "score": 186996.47769611096 }, { "content": "pub fn get_config<'a, T>(key: &str) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n SETTINGS\n\n .read()\n\n .map_err(|_error| error::Error::ConfigLockFailed)?\n\n .get::<T>(key)\n\n .context(error::Config)\n\n}\n\n\n", "file_path": "services/src/util/config.rs", "rank": 19, "score": 186405.02712698077 }, { "content": "/// Helper struct for deserializing a `SpatialReferenceOption`\n\nstruct SpatialReferenceOptionDeserializeVisitor;\n\n\n\nimpl<'de> Visitor<'de> for SpatialReferenceOptionDeserializeVisitor {\n\n type Value = SpatialReferenceOption;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a spatial reference in the form authority:code\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n if v.is_empty() {\n\n return Ok(SpatialReferenceOption::Unreferenced);\n\n }\n\n\n\n let spatial_reference: SpatialReference = v.parse().map_err(serde::de::Error::custom)?;\n\n\n\n Ok(spatial_reference.into())\n", "file_path": "datatypes/src/spatial_reference.rs", "rank": 20, "score": 186104.14636806725 }, { "content": "// TODO: move test helper somewhere else?\n\npub fn add_ndvi_dataset(ctx: &mut MockExecutionContext) -> DatasetId {\n\n let id: DatasetId = InternalDatasetId::new().into();\n\n ctx.add_meta_data(id.clone(), Box::new(create_ndvi_meta_data()));\n\n id\n\n}\n\n\n", "file_path": "operators/src/util/gdal.rs", "rank": 21, "score": 185926.46928596514 }, { "content": "#[test]\n\nfn null_values() {\n\n let mut primitive_array_builder = Int32Builder::new(5);\n\n primitive_array_builder.append_value(1).unwrap();\n\n primitive_array_builder.append_null().unwrap();\n\n primitive_array_builder.append_slice(&[3, 4, 5]).unwrap();\n\n\n\n let primitive_array = primitive_array_builder.finish();\n\n\n\n assert_eq!(primitive_array.len(), 5);\n\n assert_eq!(primitive_array.null_count(), 1);\n\n\n\n let data = primitive_array.values();\n\n\n\n assert_eq!(data.len(), 5);\n\n\n\n assert_eq!(&data[0..1], &[1]);\n\n assert_eq!(&data[2..5], &[3, 4, 5]);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 22, "score": 182516.70105957566 }, { "content": "#[allow(clippy::float_cmp)] // allow since NO DATA is a specific value\n\nfn process_raster(number_statistics: &mut NumberStatistics, tile_grid: &Grid2D<f64>) {\n\n let no_data_value = tile_grid.no_data_value();\n\n\n\n if let Some(no_data_value) = no_data_value {\n\n for &value in &tile_grid.data {\n\n if value == no_data_value {\n\n number_statistics.add_no_data();\n\n } else {\n\n number_statistics.add(value);\n\n }\n\n }\n\n } else {\n\n for &value in &tile_grid.data {\n\n number_statistics.add(value);\n\n }\n\n }\n\n}\n\n\n\n/// The statistics summary output type for each raster input\n", "file_path": "operators/src/plot/statistics.rs", "rank": 23, "score": 180955.33650353664 }, { "content": "/// Serde deserializer <https://docs.rs/serde_qs/0.6.0/serde_qs/index.html#flatten-workaround>\n\npub fn from_str_option<'de, D, S>(deserializer: D) -> Result<Option<S>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n S: std::str::FromStr,\n\n{\n\n let s = <&str as serde::Deserialize>::deserialize(deserializer)?;\n\n if s.is_empty() {\n\n Ok(None)\n\n } else {\n\n S::from_str(s)\n\n .map(Some)\n\n .map_err(|_error| D::Error::custom(\"could not parse string\"))\n\n }\n\n}\n\n\n", "file_path": "services/src/util/mod.rs", "rank": 24, "score": 179879.98645952297 }, { "content": "pub fn create_random_user_session_helper() -> UserSession {\n\n let user_id = UserId::new();\n\n\n\n UserSession {\n\n id: SessionId::new(),\n\n user: UserInfo {\n\n id: user_id,\n\n email: Some(user_id.to_string()),\n\n real_name: Some(user_id.to_string()),\n\n },\n\n created: MIN_DATETIME,\n\n valid_until: MAX_DATETIME,\n\n project: None,\n\n view: None,\n\n roles: vec![user_id.into(), Role::user_role_id()],\n\n }\n\n}\n\n\n\n#[allow(clippy::missing_panics_doc)]\n\npub async fn create_project_helper<C: ProContext>(ctx: &C) -> (UserSession, ProjectId) {\n", "file_path": "services/src/pro/util/tests.rs", "rank": 25, "score": 178813.2689289509 }, { "content": "pub fn create_feature_aggregator<P: Pixel>(\n\n number_of_features: usize,\n\n aggregation: FeatureAggregationMethod,\n\n) -> TypedAggregator {\n\n match aggregation {\n\n FeatureAggregationMethod::First => match P::TYPE {\n\n RasterDataType::U8\n\n | RasterDataType::U16\n\n | RasterDataType::U32\n\n | RasterDataType::U64\n\n | RasterDataType::I8\n\n | RasterDataType::I16\n\n | RasterDataType::I32\n\n | RasterDataType::I64 => FirstValueIntAggregator::new(number_of_features).into_typed(),\n\n RasterDataType::F32 | RasterDataType::F64 => {\n\n FirstValueFloatAggregator::new(number_of_features).into_typed()\n\n }\n\n },\n\n FeatureAggregationMethod::Mean => MeanValueAggregator::new(number_of_features).into_typed(),\n\n }\n", "file_path": "operators/src/processing/raster_vector_join/mod.rs", "rank": 26, "score": 174184.6049041453 }, { "content": "pub fn check_allowed_http_methods<'a, T, TRes>(\n\n test_helper: T,\n\n allowed_methods: &'a [Method],\n\n) -> impl futures::Future + 'a\n\nwhere\n\n T: Fn(Method) -> TRes + 'a,\n\n TRes: futures::Future<Output = ServiceResponse> + 'a,\n\n{\n\n check_allowed_http_methods2(test_helper, allowed_methods, |res| res)\n\n}\n\n\n\npub async fn send_test_request<C: SimpleContext>(\n\n req: test::TestRequest,\n\n ctx: C,\n\n) -> ServiceResponse {\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(web::Data::new(ctx))\n\n .wrap(\n\n middleware::ErrorHandlers::default()\n", "file_path": "services/src/util/tests.rs", "rank": 27, "score": 173530.13513110578 }, { "content": "pub fn update_project_helper(project: ProjectId) -> UpdateProject {\n\n UpdateProject {\n\n id: project,\n\n name: Some(\"TestUpdate\".to_string()),\n\n description: None,\n\n layers: Some(vec![LayerUpdate::UpdateOrInsert(Layer {\n\n workflow: WorkflowId::new(),\n\n name: \"L1\".to_string(),\n\n visibility: Default::default(),\n\n symbology: Symbology::Raster(RasterSymbology {\n\n opacity: 1.0,\n\n colorizer: Colorizer::Rgba,\n\n }),\n\n })]),\n\n plots: None,\n\n bounds: None,\n\n time_step: None,\n\n }\n\n}\n\n\n", "file_path": "services/src/util/tests.rs", "rank": 28, "score": 170424.35771223722 }, { "content": "pub fn catch_unwind_silent<F: FnOnce() -> R + panic::UnwindSafe, R>(\n\n f: F,\n\n) -> std::thread::Result<R> {\n\n let prev_hook = panic::take_hook();\n\n panic::set_hook(Box::new(|_| {}));\n\n let result = panic::catch_unwind(f);\n\n panic::set_hook(prev_hook);\n\n result\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 29, "score": 170185.47676750185 }, { "content": "/// this method performs the transformation of a query rectangle in `target` projection\n\n/// to a new query rectangle with coordinates in the `source` projection\n\npub fn query_rewrite_fn(\n\n query: VectorQueryRectangle,\n\n source: SpatialReference,\n\n target: SpatialReference,\n\n) -> Result<VectorQueryRectangle> {\n\n let projector_source_target = CoordinateProjector::from_known_srs(source, target)?;\n\n let projector_target_source = CoordinateProjector::from_known_srs(target, source)?;\n\n\n\n let p_bbox = query\n\n .spatial_bounds\n\n .reproject_clipped(&projector_target_source)?;\n\n let s_bbox = p_bbox.reproject(&projector_source_target)?;\n\n\n\n let p_spatial_resolution =\n\n suggest_pixel_size_from_diag_cross_projected(s_bbox, p_bbox, query.spatial_resolution)?;\n\n Ok(VectorQueryRectangle {\n\n spatial_bounds: p_bbox,\n\n spatial_resolution: p_spatial_resolution,\n\n time_interval: query.time_interval,\n\n })\n", "file_path": "operators/src/processing/reprojection.rs", "rank": 30, "score": 168457.20489690598 }, { "content": "pub fn get_token(req: &HttpRequest) -> Result<SessionId> {\n\n let header = req\n\n .headers()\n\n .get(header::AUTHORIZATION)\n\n .ok_or(Error::Authorization {\n\n source: Box::new(Error::MissingAuthorizationHeader),\n\n })?;\n\n let scheme = Bearer::parse(header).map_err(|_| Error::Authorization {\n\n source: Box::new(Error::InvalidAuthorizationScheme),\n\n })?;\n\n SessionId::from_str(scheme.token()).map_err(|err| Error::Authorization {\n\n source: Box::new(err),\n\n })\n\n}\n", "file_path": "services/src/handlers/mod.rs", "rank": 31, "score": 166992.15178154816 }, { "content": "fn detect_columns(layer: &Layer) -> HashMap<String, ColumnDataType> {\n\n let mut columns = HashMap::default();\n\n\n\n for field in layer.defn().fields() {\n\n let field_type = field.field_type();\n\n\n\n let data_type = match field_type {\n\n OGRFieldType::OFTInteger | OGRFieldType::OFTInteger64 => ColumnDataType::Int,\n\n OGRFieldType::OFTReal => ColumnDataType::Float,\n\n OGRFieldType::OFTString => ColumnDataType::Text,\n\n OGRFieldType::OFTDate | OGRFieldType::OFTDateTime => ColumnDataType::Date,\n\n _ => ColumnDataType::Unknown,\n\n };\n\n\n\n columns.insert(field.name(), data_type);\n\n }\n\n\n\n columns\n\n}\n\n\n", "file_path": "services/src/handlers/datasets.rs", "rank": 32, "score": 166368.82802105494 }, { "content": "pub fn value_in_range<T>(value: T, min: T, max: T) -> bool\n\nwhere\n\n T: PartialOrd + Copy,\n\n{\n\n (value >= min) && (value < max)\n\n}\n\n\n", "file_path": "datatypes/src/util/ranges.rs", "rank": 33, "score": 166036.89998481257 }, { "content": "pub fn value_in_range_inv<T>(value: T, min: T, max: T) -> bool\n\nwhere\n\n T: PartialOrd + Copy,\n\n{\n\n (value > min) && (value <= max)\n\n}\n\n\n", "file_path": "datatypes/src/util/ranges.rs", "rank": 34, "score": 163398.9743558003 }, { "content": "pub fn value_in_range_inclusive<T>(value: T, min: T, max: T) -> bool\n\nwhere\n\n T: PartialOrd + Copy,\n\n{\n\n (value >= min) && (value <= max)\n\n}\n\n\n", "file_path": "datatypes/src/util/ranges.rs", "rank": 35, "score": 163398.9743558003 }, { "content": "#[test]\n\nfn fn_test() {\n\n let mut stream = block_on_stream(fn_stream());\n\n\n\n assert_eq!(stream.next(), Some(1));\n\n assert_eq!(stream.next(), Some(0));\n\n assert_eq!(stream.next(), None);\n\n}\n", "file_path": "operators/tests/streams.rs", "rank": 36, "score": 161411.7206676395 }, { "content": "fn column_map_to_column_vecs(columns: &HashMap<String, ColumnDataType>) -> Columns {\n\n let mut int = Vec::new();\n\n let mut float = Vec::new();\n\n let mut text = Vec::new();\n\n let mut date = Vec::new();\n\n\n\n for (k, v) in columns {\n\n match v {\n\n ColumnDataType::Int => int.push(k.clone()),\n\n ColumnDataType::Float => float.push(k.clone()),\n\n ColumnDataType::Text => text.push(k.clone()),\n\n ColumnDataType::Date => date.push(k.clone()),\n\n ColumnDataType::Unknown => {}\n\n }\n\n }\n\n\n\n Columns {\n\n int,\n\n float,\n\n text,\n", "file_path": "services/src/handlers/datasets.rs", "rank": 37, "score": 160864.58360716136 }, { "content": "#[allow(clippy::option_if_let_else)]\n\npub fn parse_type_names<'de, D>(deserializer: D) -> Result<TypeNames, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if let Some(pos) = s.find(':') {\n\n let namespace = Some(s[..pos].to_string());\n\n let feature_type = s[pos + 1..].to_string();\n\n\n\n Ok(TypeNames {\n\n namespace,\n\n feature_type,\n\n })\n\n } else {\n\n Ok(TypeNames {\n\n namespace: None,\n\n feature_type: s,\n\n })\n\n }\n", "file_path": "services/src/ogc/wfs/request.rs", "rank": 38, "score": 158317.9929695406 }, { "content": "/// This trait extends `FeatureCollection` `Stream`s with Geo-Engine-specific functionality.\n\n///\n\npub trait FeatureCollectionStreamExt<CollectionType>:\n\n Stream<Item = Result<FeatureCollection<CollectionType>>>\n\nwhere\n\n CollectionType: Geometry + ArrowTyped + 'static,\n\n{\n\n /// Transforms a `Stream` of `FeatureCollection`s and merges them in a way that they\n\n /// are `chunk_size_bytes` large.\n\n fn merge_chunks(\n\n self,\n\n chunk_size_bytes: usize,\n\n ) -> FeatureCollectionChunkMerger<Fuse<Self>, CollectionType>\n\n where\n\n Self: Sized,\n\n {\n\n FeatureCollectionChunkMerger::new(self.fuse(), chunk_size_bytes)\n\n }\n\n}\n\n\n\nimpl<T: ?Sized, CollectionType: Geometry + ArrowTyped + 'static>\n\n FeatureCollectionStreamExt<CollectionType> for T\n\nwhere\n\n T: Stream<Item = Result<FeatureCollection<CollectionType>>>,\n\n{\n\n}\n", "file_path": "operators/src/adapters/mod.rs", "rank": 39, "score": 156391.33340348786 }, { "content": "pub trait FirstValueOutputType {\n\n fn feature_data_type() -> FeatureDataType;\n\n fn typed_aggregator(aggregator: FirstValueAggregator<Self>) -> TypedAggregator\n\n where\n\n Self: Sized;\n\n}\n\n\n\nimpl FirstValueOutputType for i64 {\n\n fn feature_data_type() -> FeatureDataType {\n\n FeatureDataType::Int\n\n }\n\n\n\n fn typed_aggregator(aggregator: FirstValueAggregator<Self>) -> TypedAggregator {\n\n TypedAggregator::FirstValueInt(aggregator)\n\n }\n\n}\n\n\n\nimpl FirstValueOutputType for f64 {\n\n fn feature_data_type() -> FeatureDataType {\n\n FeatureDataType::Float\n", "file_path": "operators/src/processing/raster_vector_join/aggregator.rs", "rank": 40, "score": 155975.90915173566 }, { "content": "/// Get a lock for mutex and recover from poisoning\n\n/// TODO: proper poisoning handling\n\npub fn safe_lock_mutex<M, T>(lock: &M) -> MutexGuard<T>\n\nwhere\n\n M: Deref<Target = Mutex<T>>,\n\n{\n\n match lock.deref().lock() {\n\n Ok(guard) => guard,\n\n Err(poisoned) => poisoned.into_inner(),\n\n }\n\n}\n", "file_path": "operators/src/util/mod.rs", "rank": 41, "score": 155769.32897030318 }, { "content": "/// Create a `RasterResultDescriptor` for the given `band` and `dataset`. If the raster data type is\n\n/// unknown, the default is F64 unless it is otherwise specified by `default_data_type`. If the data\n\n/// type is a complex floating point type, an error is returned\n\npub fn raster_descriptor_from_dataset(\n\n dataset: &Dataset,\n\n band: isize,\n\n default_data_type: Option<RasterDataType>,\n\n) -> Result<RasterResultDescriptor> {\n\n let rasterband = &dataset.rasterband(band)?;\n\n\n\n let spatial_ref: SpatialReference =\n\n dataset.spatial_ref()?.try_into().context(error::DataType)?;\n\n\n\n let data_type = match rasterband.band_type() {\n\n GDALDataType::GDT_Byte => RasterDataType::U8,\n\n GDALDataType::GDT_UInt16 => RasterDataType::U16,\n\n GDALDataType::GDT_Int16 => RasterDataType::I16,\n\n GDALDataType::GDT_UInt32 => RasterDataType::U32,\n\n GDALDataType::GDT_Int32 => RasterDataType::I32,\n\n GDALDataType::GDT_Float32 => RasterDataType::F32,\n\n GDALDataType::GDT_Float64 => RasterDataType::F64,\n\n GDALDataType::GDT_Unknown => default_data_type.unwrap_or(RasterDataType::F64),\n\n _ => return Err(Error::GdalRasterDataTypeNotSupported),\n\n };\n\n\n\n Ok(RasterResultDescriptor {\n\n data_type,\n\n spatial_reference: spatial_ref.into(),\n\n measurement: Measurement::Unitless,\n\n no_data_value: rasterband.no_data_value(),\n\n })\n\n}\n\n\n", "file_path": "operators/src/util/gdal.rs", "rank": 42, "score": 155049.27054021988 }, { "content": "/// reorders the given tuple of coordinates, resolutions, etc. using the axis ordering for `spatial_reference` to give (x, y)\n\npub fn tuple_from_ogc_params(\n\n a: f64,\n\n b: f64,\n\n spatial_reference: SpatialReference,\n\n) -> Result<(f64, f64)> {\n\n match spatial_reference_specification(&spatial_reference.proj_string()?)?\n\n .axis_order\n\n .ok_or(error::Error::AxisOrderingNotKnownForSrs {\n\n srs_string: spatial_reference.srs_string(),\n\n })? {\n\n AxisOrder::EastNorth => Ok((a, b)),\n\n AxisOrder::NorthEast => Ok((b, a)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use chrono::{TimeZone, Utc};\n\n use geoengine_datatypes::spatial_reference::SpatialReferenceAuthority;\n\n use serde::de::value::StringDeserializer;\n", "file_path": "services/src/ogc/util.rs", "rank": 43, "score": 155049.228929093 }, { "content": "/// Create `GdalDatasetParameters` from the infos in the given `dataset` and its `band`.\n\n/// `path` is the location of the actual data, `band_out` allows optionally specifying a different\n\n/// band in the resulting parameters, otherwise `band` is used.\n\npub fn gdal_parameters_from_dataset(\n\n dataset: &Dataset,\n\n band: usize,\n\n path: &Path,\n\n band_out: Option<usize>,\n\n open_options: Option<Vec<String>>,\n\n) -> Result<GdalDatasetParameters> {\n\n let rasterband = &dataset.rasterband(band as isize)?;\n\n\n\n Ok(GdalDatasetParameters {\n\n file_path: PathBuf::from(path),\n\n rasterband_channel: band_out.unwrap_or(band),\n\n geo_transform: dataset.geo_transform().context(error::Gdal)?.into(),\n\n file_not_found_handling: FileNotFoundHandling::Error,\n\n no_data_value: rasterband.no_data_value(),\n\n properties_mapping: None,\n\n width: rasterband.x_size(),\n\n height: rasterband.y_size(),\n\n gdal_open_options: open_options,\n\n gdal_config_options: None,\n\n })\n\n}\n", "file_path": "operators/src/util/gdal.rs", "rank": 44, "score": 155037.3744089245 }, { "content": "/// parse wcs 1.1.1, format is like `urn:ogc:def:crs:EPSG::4326`\n\npub fn parse_wcs_crs<'de, D>(deserializer: D) -> Result<SpatialReference, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if let Some(crs) = s.strip_prefix(\"urn:ogc:def:crs:\") {\n\n SpatialReference::from_str(&crs.replace(\"::\", \":\")).map_err(D::Error::custom)\n\n } else {\n\n Err(D::Error::custom(\"cannot parse crs\"))\n\n }\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 45, "score": 146344.90143588872 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn last_tile_fold_fn<T>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n{\n\n let mut next_accu = tile;\n\n next_accu.time = acc.accu_tile.time;\n\n\n\n TemporalRasterAggregationTileAccu {\n\n accu_tile: next_accu,\n\n initial_state: false,\n\n }\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 46, "score": 145199.48236604483 }, { "content": "pub fn first_tile_fold_fn<T>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n{\n\n if acc.initial_state {\n\n let mut next_accu = tile;\n\n next_accu.time = acc.accu_tile.time;\n\n\n\n TemporalRasterAggregationTileAccu {\n\n accu_tile: next_accu,\n\n initial_state: false,\n\n }\n\n } else {\n\n acc\n\n }\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 47, "score": 145193.5122370174 }, { "content": "#[allow(dead_code)]\n\npub fn fold_by_blit_future<T>(\n\n accu: RasterTile2D<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<RasterTile2D<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| fold_by_blit_impl(accu, tile)).then(async move |x| match x {\n\n Ok(r) => r,\n\n Err(e) => Err(e.into()),\n\n })\n\n}\n\n\n", "file_path": "operators/src/adapters/raster_subquery_adapter.rs", "rank": 48, "score": 142375.83638625816 }, { "content": "pub fn fold_fn<T, C>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n C: AccFunction,\n\n{\n\n let mut accu_tile = acc.accu_tile;\n\n\n\n let grid = if acc.initial_state {\n\n tile.grid_array\n\n } else {\n\n match (accu_tile.grid_array, tile.grid_array) {\n\n (GridOrEmpty::Grid(mut a), GridOrEmpty::Grid(g)) => {\n\n a.data = a\n\n .inner_ref()\n\n .iter()\n\n .zip(g.inner_ref())\n\n .map(|(x, y)| C::acc(a.no_data_value, *x, *y))\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 49, "score": 142338.54646444926 }, { "content": "/// Serde deserializer for booleans with case insensitive strings\n\npub fn bool_option_case_insensitive<'de, D>(deserializer: D) -> Result<Option<bool>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = <&str as serde::Deserialize>::deserialize(deserializer)?;\n\n if s.is_empty() {\n\n Ok(None)\n\n } else {\n\n bool::from_str(&s.to_lowercase())\n\n .map(Some)\n\n .map_err(|_error| {\n\n D::Error::custom(format_args!(\"could not parse string as boolean: {}\", s))\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod mod_tests {\n\n use super::*;\n\n\n", "file_path": "services/src/util/mod.rs", "rank": 50, "score": 141993.32098868466 }, { "content": "struct StringOrNumberDeserializer;\n\nimpl<'de> Visitor<'de> for StringOrNumberDeserializer {\n\n type Value = StringOrNumber;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"an integer, float or string\")\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(StringOrNumber::Int(v))\n\n }\n\n\n\n fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n self.visit_i64(v as i64)\n", "file_path": "operators/src/util/input/string_or_number.rs", "rank": 51, "score": 140795.6514187395 }, { "content": "fn multi_point_collection_benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"MultiPointCollection\");\n\n\n\n group.bench_function(\"Builder Plain 100\", |b| {\n\n b.iter(|| {\n\n let mut builder = MultiPointCollection::builder().finish_header();\n\n for i in 0..100 {\n\n builder\n\n .push_geometry(Coordinate2D::new(i as f64, i as f64).into())\n\n .unwrap();\n\n builder\n\n .push_time_interval(TimeInterval::new_unchecked(i, i + 1))\n\n .unwrap();\n\n builder.finish_row();\n\n }\n\n black_box(builder.build())\n\n })\n\n });\n\n\n\n group.bench_function(\"Builder with Number 100\", |b| {\n", "file_path": "datatypes/benches/multi_point_collection.rs", "rank": 52, "score": 140508.25316487066 }, { "content": "#[allow(dead_code)]\n\n#[allow(clippy::type_complexity)]\n\n#[allow(clippy::needless_pass_by_value)]\n\npub fn fold_by_coordinate_lookup_impl<T>(\n\n accu: TileWithProjectionCoordinates<T>,\n\n tile: RasterTile2D<T>,\n\n) -> Result<TileWithProjectionCoordinates<T>>\n\nwhere\n\n T: Pixel,\n\n{\n\n let mut accu = accu;\n\n let t_union = accu.accu_tile.time.union(&tile.time)?;\n\n\n\n accu.tile_mut().time = t_union;\n\n\n\n if tile.grid_array.is_empty() {\n\n return Ok(accu);\n\n }\n\n\n\n let TileWithProjectionCoordinates { accu_tile, coords } = accu;\n\n\n\n let mut materialized_accu_tile = accu_tile.into_materialized_tile(); //in a fold chain the real materialization should only happen once. All other calls will be simple conversions.\n\n\n\n match insert_projected_pixels(&mut materialized_accu_tile, &tile, coords.iter()) {\n\n Ok(_) => Ok(TileWithProjectionCoordinates {\n\n accu_tile: materialized_accu_tile.into(),\n\n coords,\n\n }),\n\n Err(error) => Err(error),\n\n }\n\n}\n\n\n", "file_path": "operators/src/adapters/raster_subquery_adapter.rs", "rank": 53, "score": 140209.93208840946 }, { "content": "#[allow(dead_code)]\n\npub fn fold_by_coordinate_lookup_future<T>(\n\n accu: TileWithProjectionCoordinates<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl TryFuture<Ok = TileWithProjectionCoordinates<T>, Error = error::Error>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| fold_by_coordinate_lookup_impl(accu, tile)).then(\n\n async move |x| match x {\n\n Ok(r) => r,\n\n Err(e) => Err(e.into()),\n\n },\n\n )\n\n}\n\n\n", "file_path": "operators/src/adapters/raster_subquery_adapter.rs", "rank": 54, "score": 140198.78393961783 }, { "content": "pub trait TestDefault {\n\n /// Generate a default value used for testing. Use this instead of the `Default` trait\n\n /// if the default value only makes sense in tests and not in production code.\n\n fn test_default() -> Self;\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 55, "score": 139577.57682334312 }, { "content": "pub fn no_data_ignoring_fold_fn<T, C>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n C: NoDataIgnoringAccFunction,\n\n{\n\n let mut acc_tile = acc.into_tile();\n\n let grid = match (acc_tile.grid_array, tile.grid_array) {\n\n (GridOrEmpty::Grid(mut a), GridOrEmpty::Grid(g)) => {\n\n a.data = a\n\n .inner_ref()\n\n .iter()\n\n .zip(g.inner_ref())\n\n .map(|(x, y)| C::acc_ignore_no_data(a.no_data_value, *x, *y))\n\n .collect();\n\n GridOrEmpty::Grid(a)\n\n }\n\n // TODO: need to increase temporal validity?\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 56, "score": 139055.82287570235 }, { "content": "/// custom spatial references not known by proj or that shall be overriden\n\nfn custom_spatial_reference_specification(\n\n srs_string: &str,\n\n) -> Option<SpatialReferenceSpecification> {\n\n // TODO: provide a generic storage for custom spatial reference specifications\n\n match srs_string.to_uppercase().as_str() {\n\n \"SR-ORG:81\" => Some(SpatialReferenceSpecification {\n\n name: \"GEOS - GEOstationary Satellite\".to_owned(),\n\n spatial_reference: SpatialReference::new(SpatialReferenceAuthority::SrOrg, 81),\n\n proj_string: \"+proj=geos +lon_0=0 +h=-0 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs\"\n\n .into(),\n\n extent: BoundingBox2D::new_unchecked(\n\n (-5_568_748.276, -5_568_748.276).into(),\n\n (5_568_748.276, 5_568_748.276).into(),\n\n ),\n\n axis_labels: None,\n\n axis_order: Some(AxisOrder::EastNorth),\n\n }),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "services/src/handlers/spatial_references.rs", "rank": 57, "score": 138693.88895420698 }, { "content": "#[allow(clippy::missing_panics_doc)]\n\npub fn create_ndvi_meta_data() -> GdalMetaDataRegular {\n\n let no_data_value = Some(0.); // TODO: is it really 0?\n\n GdalMetaDataRegular {\n\n start: TimeInstance::from_millis(1_388_534_400_000).unwrap(),\n\n step: TimeStep {\n\n granularity: TimeGranularity::Months,\n\n step: 1,\n\n },\n\n time_placeholders: hashmap! {\n\n \"%_START_TIME_%\".to_string() => GdalSourceTimePlaceholder {\n\n format: \"%Y-%m-%d\".to_string(),\n\n reference: TimeReference::Start,\n\n },\n\n },\n\n params: GdalDatasetParameters {\n\n file_path: test_data!(\"raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF\").into(),\n\n rasterband_channel: 1,\n\n geo_transform: GdalDatasetGeoTransform {\n\n origin_coordinate: (-180., 90.).into(),\n\n x_pixel_size: 0.1,\n", "file_path": "operators/src/util/gdal.rs", "rank": 58, "score": 138129.95150314242 }, { "content": "/// create an axis aligned rectangle using the values \"a,b,c,d\" from OGC bbox-like parameters using the axis ordering for `spatial_reference`\n\npub fn rectangle_from_ogc_params<A: AxisAlignedRectangle>(\n\n values: [f64; 4],\n\n spatial_reference: SpatialReference,\n\n) -> Result<A> {\n\n let [a, b, c, d] = values;\n\n match spatial_reference_specification(&spatial_reference.proj_string()?)?\n\n .axis_order\n\n .ok_or(error::Error::AxisOrderingNotKnownForSrs {\n\n srs_string: spatial_reference.srs_string(),\n\n })? {\n\n AxisOrder::EastNorth => {\n\n A::from_min_max((a, b).into(), (c, d).into()).context(error::DataType)\n\n }\n\n AxisOrder::NorthEast => {\n\n A::from_min_max((b, a).into(), (d, c).into()).context(error::DataType)\n\n }\n\n }\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 59, "score": 137865.4776059595 }, { "content": "pub trait SpatialPartitioned {\n\n fn spatial_partition(&self) -> SpatialPartition2D;\n\n}\n\n\n\nimpl AxisAlignedRectangle for SpatialPartition2D {\n\n fn from_min_max(min: Coordinate2D, max: Coordinate2D) -> Result<Self> {\n\n SpatialPartition2D::new((min.x, max.y).into(), (max.x, min.y).into())\n\n }\n\n\n\n fn upper_left(&self) -> Coordinate2D {\n\n self.upper_left_coordinate\n\n }\n\n\n\n fn lower_right(&self) -> Coordinate2D {\n\n self.lower_right_coordinate\n\n }\n\n\n\n fn upper_right(&self) -> Coordinate2D {\n\n Coordinate2D {\n\n x: self.lower_right_coordinate.x,\n", "file_path": "datatypes/src/primitives/spatial_partition.rs", "rank": 60, "score": 137124.61130292696 }, { "content": "pub fn grid_eq_with_no_data<D, T>(g1: &Grid<D, T>, g2: &Grid<D, T>) -> bool\n\nwhere\n\n D: PartialEq,\n\n T: PartialEq + Copy,\n\n{\n\n if g1.data.len() != g2.data.len() || g1.shape.ne(&g2.shape) {\n\n return false;\n\n }\n\n\n\n if !match (g1.no_data_value, g2.no_data_value) {\n\n (None, None) => true,\n\n (Some(_), None) => false,\n\n (_, Some(y)) => g1.is_no_data(y),\n\n } {\n\n return false;\n\n }\n\n\n\n for (l, r) in g1.data.iter().zip(g2.data.iter()) {\n\n if g1.is_no_data(*l) && g1.is_no_data(*r) {\n\n continue;\n\n }\n\n if l != r {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 61, "score": 136604.65720721366 }, { "content": "fn suggest_main_file(upload: &Upload) -> Option<String> {\n\n let known_extensions = [\"csv\", \"shp\", \"json\", \"geojson\", \"gpkg\", \"sqlite\"]; // TODO: rasters\n\n\n\n if upload.files.len() == 1 {\n\n return Some(upload.files[0].name.clone());\n\n }\n\n\n\n let mut sorted_files = upload.files.clone();\n\n sorted_files.sort_by(|a, b| b.byte_size.cmp(&a.byte_size));\n\n\n\n for file in sorted_files {\n\n if known_extensions.iter().any(|ext| file.name.ends_with(ext)) {\n\n return Some(file.name);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "services/src/handlers/datasets.rs", "rank": 62, "score": 136405.91137286957 }, { "content": "struct StringOrNumberRangeDeserializer;\n\nimpl<'de> Visitor<'de> for StringOrNumberRangeDeserializer {\n\n type Value = StringOrNumberRange;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a 2-tuple of integers, floats or strings\")\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: SeqAccess<'de>,\n\n {\n\n let mut elements: Vec<StringOrNumber> = Vec::with_capacity(seq.size_hint().unwrap_or(2));\n\n\n\n while let Some(element) = seq.next_element()? {\n\n elements.push(element);\n\n }\n\n\n\n if elements.len() != 2 {\n\n return Err(A::Error::invalid_length(elements.len(), &Self));\n", "file_path": "operators/src/util/input/string_or_number_range.rs", "rank": 63, "score": 136389.22358860172 }, { "content": "#[test]\n\nfn simple() {\n\n let mut primitive_array_builder = Int32Builder::new(5);\n\n primitive_array_builder.append_value(1).unwrap();\n\n primitive_array_builder.append_value(2).unwrap();\n\n primitive_array_builder\n\n .append_slice(&(3..=5).collect::<Vec<i32>>())\n\n .unwrap();\n\n\n\n let primitive_array = primitive_array_builder.finish();\n\n\n\n assert_eq!(primitive_array.len(), 5);\n\n assert_eq!(primitive_array.null_count(), 0);\n\n\n\n let mask = vec![true, false, true, false, true].into();\n\n\n\n let filtered_array = filter(&primitive_array, &mask).unwrap();\n\n\n\n assert_eq!(filtered_array.len(), 3);\n\n assert_eq!(filtered_array.null_count(), 0);\n\n\n\n assert!(primitive_array.data().null_bitmap().is_none());\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 64, "score": 136220.04327264352 }, { "content": "#[test]\n\nfn multipoints() {\n\n use arrow::datatypes::ToByteSlice;\n\n\n\n let array = {\n\n let data = ArrayData::builder(DataType::List(Box::new(Field::new(\n\n \"\",\n\n DataType::FixedSizeList(Box::new(Field::new(\"\", DataType::Float64, false)), 2),\n\n false,\n\n ))))\n\n .len(2) // number of multipoints\n\n .add_buffer(Buffer::from(&[0_i32, 2, 5].to_byte_slice()))\n\n .add_child_data(\n\n ArrayData::builder(DataType::FixedSizeList(\n\n Box::new(Field::new(\"\", DataType::Float64, false)),\n\n 2,\n\n ))\n\n .len(5) // number of coordinates\n\n .add_child_data(\n\n ArrayData::builder(DataType::Float64)\n\n .len(10) // number of floats\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 65, "score": 136220.04327264352 }, { "content": "#[test]\n\nfn strings2() {\n\n let array = {\n\n let mut builder = StringBuilder::new(5);\n\n\n\n for string in &[\"hello\", \"from\", \"the\", \"other\", \"side\"] {\n\n builder.append_value(string).unwrap();\n\n }\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n assert_eq!(array.null_count(), 0);\n\n\n\n assert_eq!(array.value_offsets(), &[0, 5, 9, 12, 17, 21]);\n\n\n\n assert_eq!(array.value_length(0), 5);\n\n assert_eq!(array.value_length(1), \"from\".len() as i32);\n\n\n\n assert_eq!(array.value(0), \"hello\");\n\n assert_eq!(array.value(1), \"from\");\n\n assert_eq!(array.value(2), \"the\");\n\n assert_eq!(array.value(3), \"other\");\n\n assert_eq!(array.value(4), \"side\");\n\n\n\n assert_eq!(array.value_data().as_slice(), b\"hellofromtheotherside\");\n\n assert_eq!(array.value_offsets(), &[0, 5, 9, 12, 17, 21]);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 66, "score": 136220.04327264352 }, { "content": "#[test]\n\nfn table() {\n\n let schema = vec![\n\n Field::new(\"feature_start\", DataType::UInt64, false),\n\n Field::new(\"time_start\", DataType::Date64, false),\n\n ];\n\n\n\n let array = {\n\n let mut builder = StructBuilder::from_fields(schema, 5);\n\n\n\n for &(feature_start, time) in &[(0_u64, 0_i64), (1, 10), (2, 20), (3, 30), (4, 40)] {\n\n builder\n\n .field_builder(0)\n\n .and_then(|builder: &mut UInt64Builder| builder.append_value(feature_start).ok())\n\n .unwrap();\n\n builder\n\n .field_builder(1)\n\n .and_then(|builder: &mut Date64Builder| builder.append_value(time).ok())\n\n .unwrap();\n\n builder.append(true).unwrap();\n\n }\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 67, "score": 136220.04327264352 }, { "content": "#[test]\n\nfn serialize() {\n\n let array = {\n\n let mut builder = Int32Builder::new(5);\n\n builder\n\n .append_slice(&(1..=5).collect::<Vec<i32>>())\n\n .unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n\n\n // no serialization of arrays by now\n\n let json = serde_json::to_string(array.values()).unwrap();\n\n\n\n assert_eq!(json, \"[1,2,3,4,5]\");\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 68, "score": 136220.04327264352 }, { "content": "#[test]\n\nfn list() {\n\n let array = {\n\n let mut builder = ListBuilder::new(Int32Builder::new(0));\n\n\n\n builder.values().append_value(0).unwrap();\n\n builder.values().append_value(1).unwrap();\n\n builder.append(true).unwrap();\n\n builder.values().append_value(2).unwrap();\n\n builder.values().append_value(3).unwrap();\n\n builder.values().append_value(4).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 2);\n\n assert_eq!(array.value_offsets(), &[0, 2, 5]);\n\n assert_eq!(array.value_length(0), 2);\n\n assert_eq!(array.value_length(1), 3);\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 69, "score": 136220.04327264352 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn offset() {\n\n let array = {\n\n let mut array_builder = Float64Builder::new(5);\n\n array_builder\n\n .append_slice(&[2e10, 4e40, 20., 9.4, 0.])\n\n .unwrap();\n\n array_builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n assert_eq!(array.offset(), 0);\n\n\n\n let subarray = array.slice(2, 2);\n\n let typed_subarray: &Float64Array = subarray.as_any().downcast_ref().unwrap();\n\n\n\n assert_eq!(subarray.len(), 2);\n\n assert_eq!(subarray.offset(), 2);\n\n assert_eq!(typed_subarray.values().len(), 2);\n\n\n\n assert_eq!(typed_subarray.values(), &[20., 9.4]);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 70, "score": 136219.80225252148 }, { "content": "#[test]\n\n#[allow(clippy::cast_ptr_alignment)]\n\nfn ocl() {\n\n let array = {\n\n let mut builder = Int32Builder::new(5);\n\n builder\n\n .append_slice(&(1..=5).collect::<Vec<i32>>())\n\n .unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n\n\n let src = r#\"\n\n __kernel void add(__global int* buffer, int scalar) {\n\n buffer[get_global_id(0)] += scalar;\n\n }\n\n \"#;\n\n\n\n let pro_que = ProQue::builder()\n\n .src(src)\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 71, "score": 136219.74490578723 }, { "content": "#[test]\n\n#[allow(clippy::cast_ptr_alignment, clippy::identity_op)]\n\nfn binary() {\n\n let t1 = TimeInterval::new(0, 1).unwrap();\n\n let t2_bytes: [u8; 16] = unsafe { mem::transmute(t1) };\n\n let t2: TimeInterval = unsafe { mem::transmute(t2_bytes) };\n\n assert_eq!(t1, t2);\n\n\n\n let array = {\n\n let mut builder = FixedSizeBinaryBuilder::new(3, mem::size_of::<TimeInterval>() as i32);\n\n\n\n for &t in &[\n\n TimeInterval::new(0, 1).unwrap(),\n\n TimeInterval::new(1, 2).unwrap(),\n\n TimeInterval::new(2, 3).unwrap(),\n\n ] {\n\n let t_bytes: [u8; 16] = unsafe { mem::transmute(t) };\n\n builder.append_value(&t_bytes).unwrap();\n\n }\n\n\n\n builder.finish()\n\n };\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 72, "score": 136219.5793216336 }, { "content": "/// Tries to reproject all coordinates at once. If this fails, tries to reproject coordinate by coordinate.\n\n/// It returns all coordinates in input order.\n\n/// In case of success it returns `Some(Coordinate2D)` and `None` otherwise.\n\npub fn project_coordinates_fail_tolerant<P: CoordinateProjection>(\n\n i: &[Coordinate2D],\n\n p: &P,\n\n) -> Vec<Option<Coordinate2D>> {\n\n if let Ok(projected_all) = p.project_coordinates(&i) {\n\n return projected_all\n\n .into_iter()\n\n .map(Some)\n\n .collect::<Vec<Option<Coordinate2D>>>();\n\n }\n\n\n\n let individual_projected: Vec<Option<Coordinate2D>> = i\n\n .iter()\n\n .map(|&c| (c, c.reproject(p)))\n\n //.inspect(|(c, c_p)| {\n\n // dbg!(c, c_p);\n\n //})\n\n .map(|(_, c_p)| c_p.ok())\n\n .collect();\n\n // For debuging use this to find oput how many coordinates could be transformed.\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 73, "score": 135677.48691094102 }, { "content": "#[inline]\n\npub fn average_floor<I>(a: I, b: I) -> I\n\nwhere\n\n I: Copy\n\n + Add<I, Output = I>\n\n + Shr<usize, Output = I>\n\n + BitAnd<I, Output = I>\n\n + BitOr<I, Output = I>\n\n + BitXor<I, Output = I>,\n\n{\n\n (a & b) + ((a ^ b) >> 1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn average_floor_checks() {\n\n assert_eq!(\n\n average_floor(631_152_000_000_i64, 946_684_800_001_i64),\n\n 788_918_400_000_i64\n\n );\n\n\n\n assert_eq!(average_floor(i64::MIN, i64::MAX), -1);\n\n }\n\n}\n", "file_path": "operators/src/util/math.rs", "rank": 74, "score": 135178.32650841074 }, { "content": "/// Determine the vector data type of the collection\n\npub trait VectorDataTyped {\n\n fn vector_data_type(&self) -> VectorDataType;\n\n}\n\n\n\n/// A feature collection, wrapped by type info\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum TypedFeatureCollection {\n\n Data(DataCollection),\n\n MultiPoint(MultiPointCollection),\n\n MultiLineString(MultiLineStringCollection),\n\n MultiPolygon(MultiPolygonCollection),\n\n}\n\n\n\n/// A feature collection reference, wrapped by type info\n\n#[derive(Clone, Debug, PartialEq)]\n\n#[allow(dead_code)]\n\npub enum TypedFeatureCollectionRef<'c> {\n\n Data(&'c DataCollection),\n\n MultiPoint(&'c MultiPointCollection),\n\n MultiLineString(&'c MultiLineStringCollection),\n", "file_path": "datatypes/src/collections/data_types.rs", "rank": 75, "score": 134726.48025855326 }, { "content": "pub fn eq_with_no_data<D, T>(g1: &GridOrEmpty<D, T>, g2: &GridOrEmpty<D, T>) -> bool\n\nwhere\n\n D: PartialEq,\n\n T: PartialEq + Copy,\n\n{\n\n match (g1, g2) {\n\n (GridOrEmpty::Grid(g1), GridOrEmpty::Grid(g2)) => grid_eq_with_no_data(g1, g2),\n\n (GridOrEmpty::Empty(g1), GridOrEmpty::Empty(g2)) => empty_grid_eq_with_no_data(g1, g2),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 76, "score": 134359.69332428896 }, { "content": "fn calculate_esd(timestamp: &DateTime<Utc>) -> f64 {\n\n let perihelion = f64::from(timestamp.ordinal()) - 3.0;\n\n let e = 0.0167;\n\n let theta = std::f64::consts::TAU * (perihelion / 365.0);\n\n 1.0 - e * theta.cos()\n\n}\n\n\n\n#[async_trait]\n\nimpl<Q> QueryProcessor for ReflectanceProcessor<Q>\n\nwhere\n\n Q: QueryProcessor<Output = RasterTile2D<PixelOut>, SpatialBounds = SpatialPartition2D>,\n\n{\n\n type Output = RasterTile2D<PixelOut>;\n\n type SpatialBounds = SpatialPartition2D;\n\n\n\n async fn query<'a>(\n\n &'a self,\n\n query: RasterQueryRectangle,\n\n ctx: &'a dyn QueryContext,\n\n ) -> Result<BoxStream<'a, Result<Self::Output>>> {\n", "file_path": "operators/src/processing/meteosat/reflectance.rs", "rank": 77, "score": 134317.71184629409 }, { "content": "pub fn mean_tile_fold_future<T>(\n\n accu: TemporalMeanTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalMeanTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| {\n\n let mut accu = accu;\n\n accu.add_tile(tile)?;\n\n Ok(accu)\n\n })\n\n .then(async move |x| match x {\n\n Ok(r) => r,\n\n Err(e) => Err(e.into()),\n\n })\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TemporalMeanTileAccu<T> {\n", "file_path": "operators/src/processing/temporal_raster_aggregation/mean_aggregation_subquery.rs", "rank": 78, "score": 134286.19784411055 }, { "content": "#[test]\n\nfn nested_lists() {\n\n let array = {\n\n let mut builder = ListBuilder::new(ListBuilder::new(Int32Builder::new(0)));\n\n\n\n // [[[10, 11, 12], [20, 21]], [[30]]\n\n builder\n\n .values()\n\n .values()\n\n .append_slice(&[10, 11, 12])\n\n .unwrap();\n\n builder.values().append(true).unwrap();\n\n builder.values().values().append_slice(&[20, 21]).unwrap();\n\n builder.values().append(true).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.values().values().append_slice(&[30]).unwrap();\n\n builder.values().append(true).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.finish()\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 79, "score": 133167.20071081666 }, { "content": "#[test]\n\nfn null_bytes() {\n\n let mut primitive_array_builder = Int32Builder::new(2);\n\n primitive_array_builder.append_value(1).unwrap();\n\n primitive_array_builder.append_null().unwrap();\n\n primitive_array_builder.append_option(None).unwrap();\n\n primitive_array_builder.append_option(Some(4)).unwrap();\n\n primitive_array_builder.append_null().unwrap();\n\n\n\n let primitive_array = primitive_array_builder.finish();\n\n\n\n assert_eq!(primitive_array.len(), 5);\n\n assert_eq!(primitive_array.null_count(), 3);\n\n\n\n if let Some(null_bitmap) = primitive_array.data().null_bitmap() {\n\n assert_eq!(null_bitmap.len(), 8); // len returns number of bits\n\n\n\n assert_eq!(\n\n null_bitmap.clone().into_buffer().as_slice(), // must clone bitmap because there is no way to get a reference to the data\n\n &[0b0000_1001] // right most bit is first element, 1 = valid value, 0 = null or unset\n\n );\n\n }\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 80, "score": 133167.20071081666 }, { "content": "#[test]\n\nfn filter_example() {\n\n let a = Int32Array::from(vec![Some(1), Some(2), Some(3)]);\n\n\n\n // dbg!(&a);\n\n\n\n let b = filter(\n\n &a,\n\n &BooleanArray::from(vec![Some(true), Some(false), Some(true)]),\n\n )\n\n .unwrap();\n\n\n\n // dbg!(&b);\n\n\n\n assert_eq!(\n\n b.as_any().downcast_ref::<Int32Array>().unwrap(),\n\n &Int32Array::from(vec![Some(1), Some(3)])\n\n );\n\n\n\n let c = Int32Array::from(vec![Some(1), Some(2), None]);\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 81, "score": 133167.20071081666 }, { "content": "#[test]\n\n#[allow(clippy::eq_op)]\n\nfn float_equality() {\n\n let mut floats = Float64Builder::new(3);\n\n floats.append_value(4.0).unwrap();\n\n floats.append_null().unwrap();\n\n floats.append_value(f64::NAN).unwrap();\n\n\n\n let floats = floats.finish();\n\n\n\n assert_eq!(floats, floats);\n\n\n\n let mut floats2 = Float64Builder::new(3);\n\n floats2.append_value(4.0).unwrap();\n\n floats2.append_null().unwrap();\n\n floats2.append_value(f64::NAN).unwrap();\n\n\n\n let floats2 = floats2.finish();\n\n\n\n assert_eq!(floats, floats2);\n\n\n\n let mut floats3 = Float64Builder::new(3);\n\n floats3.append_value(f64::NAN).unwrap();\n\n floats3.append_null().unwrap();\n\n floats3.append_value(4.0).unwrap();\n\n\n\n let floats3 = floats3.finish();\n\n\n\n assert_ne!(floats, floats3);\n\n assert_ne!(floats2, floats3);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 82, "score": 133166.9596906946 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn multipoint_builder() {\n\n let float_builder = arrow::array::Float64Builder::new(0);\n\n let coordinate_builder = arrow::array::FixedSizeListBuilder::new(float_builder, 2);\n\n let mut multi_point_builder = arrow::array::ListBuilder::new(coordinate_builder);\n\n\n\n multi_point_builder\n\n .values()\n\n .values()\n\n .append_slice(&[0.0, 0.1])\n\n .unwrap();\n\n multi_point_builder.values().append(true).unwrap();\n\n multi_point_builder\n\n .values()\n\n .values()\n\n .append_slice(&[1.0, 1.1])\n\n .unwrap();\n\n multi_point_builder.values().append(true).unwrap();\n\n\n\n multi_point_builder.append(true).unwrap(); // first multi point\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 83, "score": 133166.9596906946 }, { "content": "fn colorizer_from_style(styles: &str) -> Result<Option<Colorizer>> {\n\n match styles.strip_prefix(\"custom:\") {\n\n None => Ok(None),\n\n Some(suffix) => serde_json::from_str(suffix).map_err(error::Error::from),\n\n }\n\n}\n\n\n", "file_path": "services/src/handlers/wms.rs", "rank": 84, "score": 132998.98209487402 }, { "content": "pub trait DynamicRasterDataType {\n\n fn raster_data_type(&self) -> RasterDataType;\n\n}\n\n\n\nimpl<R> DynamicRasterDataType for R\n\nwhere\n\n R: StaticRasterDataType,\n\n{\n\n fn raster_data_type(&self) -> RasterDataType {\n\n R::TYPE\n\n }\n\n}\n\n\n\nimpl RasterDataType {\n\n pub fn ocl_type(self) -> &'static str {\n\n match self {\n\n RasterDataType::U8 => \"uchar\",\n\n RasterDataType::U16 => \"ushort\",\n\n RasterDataType::U32 => \"uint\",\n\n RasterDataType::U64 => \"ulong\",\n", "file_path": "datatypes/src/raster/data_type.rs", "rank": 85, "score": 132472.6705286985 }, { "content": "fn parse_time_from_str<'de, D>(s: &str) -> Result<TimeInterval, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let split: Vec<_> = s\n\n .split('/')\n\n // use `from_str` instead of `parse_from_rfc3339` to use a relaxed form of RFC3339 that supports dates BC\n\n .map(chrono::DateTime::<FixedOffset>::from_str)\n\n .collect();\n\n\n\n match *split.as_slice() {\n\n [Ok(time)] => TimeInterval::new(time.timestamp_millis(), time.timestamp_millis())\n\n .map_err(D::Error::custom),\n\n [Ok(start), Ok(end)] => TimeInterval::new(start.timestamp_millis(), end.timestamp_millis())\n\n .map_err(D::Error::custom),\n\n _ => Err(D::Error::custom(format!(\"Invalid time {}\", s))),\n\n }\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 86, "score": 131183.73261954964 }, { "content": "pub fn first_tile_fold_future<T>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| first_tile_fold_fn(accu, tile)).then(async move |x| match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n })\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 87, "score": 130790.02920219688 }, { "content": "pub fn last_tile_fold_future<T>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| last_tile_fold_fn(accu, tile)).then(async move |x| match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n })\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TemporalRasterAggregationTileAccu<T> {\n\n accu_tile: RasterTile2D<T>,\n\n initial_state: bool,\n\n}\n\n\n\nimpl<T: Pixel> FoldTileAccu for TemporalRasterAggregationTileAccu<T> {\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 88, "score": 130790.02920219688 }, { "content": "pub fn get_config_element<'a, T>() -> Result<T>\n\nwhere\n\n T: ConfigElement + Deserialize<'a>,\n\n{\n\n get_config(T::KEY)\n\n}\n\n\n", "file_path": "services/src/util/config.rs", "rank": 89, "score": 130655.51165503245 }, { "content": "/// A trait that allows a common access to lines of `MultiLineString`s and its references\n\npub trait MultiLineStringAccess {\n\n type L: AsRef<[Coordinate2D]>;\n\n fn lines(&self) -> &[Self::L];\n\n}\n\n\n\n/// A representation of a simple feature multi line string\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct MultiLineString {\n\n coordinates: Vec<Vec<Coordinate2D>>,\n\n}\n\n\n\nimpl MultiLineString {\n\n pub fn new(coordinates: Vec<Vec<Coordinate2D>>) -> Result<Self> {\n\n ensure!(\n\n !coordinates.is_empty() && coordinates.iter().all(|c| c.len() >= 2),\n\n error::UnallowedEmpty\n\n );\n\n\n\n Ok(Self::new_unchecked(coordinates))\n\n }\n", "file_path": "datatypes/src/primitives/multi_line_string.rs", "rank": 90, "score": 130356.29601452203 }, { "content": "#[test]\n\nfn fixed_size_list() {\n\n let array = {\n\n let mut builder = FixedSizeListBuilder::new(Int32Builder::new(0), 2);\n\n\n\n builder.values().append_value(0).unwrap();\n\n builder.values().append_value(1).unwrap();\n\n builder.append(true).unwrap();\n\n builder.values().append_value(2).unwrap();\n\n builder.values().append_value(3).unwrap();\n\n builder.append(true).unwrap();\n\n builder.values().append_value(4).unwrap();\n\n builder.values().append_value(5).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 3);\n\n assert_eq!(array.value_offset(0), 0);\n\n assert_eq!(array.value_offset(1), 2);\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 91, "score": 130292.71055973889 }, { "content": "#[test]\n\nfn gt_eq_example() {\n\n let a = Int32Array::from(vec![Some(1), Some(2), None]);\n\n\n\n // dbg!(&a);\n\n\n\n let b = gt_eq_scalar(&a, 2).unwrap();\n\n\n\n // dbg!(&b);\n\n\n\n assert_eq!(&b, &BooleanArray::from(vec![Some(false), Some(true), None]));\n\n}\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 92, "score": 130292.71055973889 }, { "content": "#[test]\n\n#[allow(clippy::cast_ptr_alignment)]\n\nfn multipoint_builder_bytes() {\n\n use arrow::datatypes::ToByteSlice;\n\n\n\n let coordinate_builder =\n\n arrow::array::FixedSizeBinaryBuilder::new(0, std::mem::size_of::<[f64; 2]>() as i32);\n\n let mut multi_point_builder = arrow::array::ListBuilder::new(coordinate_builder);\n\n\n\n multi_point_builder\n\n .values()\n\n .append_value(&[0.0, 0.1].to_byte_slice())\n\n .unwrap();\n\n multi_point_builder\n\n .values()\n\n .append_value(&[1.0, 1.1].to_byte_slice())\n\n .unwrap();\n\n\n\n multi_point_builder.append(true).unwrap(); // first multi point\n\n\n\n multi_point_builder\n\n .values()\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 93, "score": 130292.41219288259 }, { "content": "pub fn empty_grid_eq_with_no_data<D, T>(g1: &EmptyGrid<D, T>, g2: &EmptyGrid<D, T>) -> bool\n\nwhere\n\n D: PartialEq,\n\n T: PartialEq + Copy,\n\n{\n\n g1.shape.eq(&g2.shape) && g1.is_no_data(g2.no_data_value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::raster::{EmptyGrid, Grid2D, GridShape2D};\n\n use crate::util::test::{empty_grid_eq_with_no_data, grid_eq_with_no_data};\n\n\n\n #[test]\n\n fn test_empty_grid_eq_with_no_data_integral_ok() {\n\n let d1: GridShape2D = [3, 2].into();\n\n let d2: GridShape2D = [3, 2].into();\n\n\n\n let ndv1 = 42;\n\n let ndv2 = 42;\n", "file_path": "datatypes/src/util/test.rs", "rank": 94, "score": 130143.35355291577 }, { "content": "/// A version of `suggest_pixel_size_from_diag_cross` that takes a `partition` and a projected counterpart as input\n\npub fn suggest_pixel_size_from_diag_cross_projected<B: AxisAlignedRectangle>(\n\n bbox: B,\n\n bbox_projected: B,\n\n spatial_resolution: SpatialResolution,\n\n) -> Result<SpatialResolution> {\n\n let diag_pixels = euclidian_pixel_distance(bbox, spatial_resolution)?;\n\n\n\n let proj_ul_lr_distance =\n\n diag_distance(bbox_projected.upper_left(), bbox_projected.lower_right());\n\n\n\n let proj_ll_ur_distance =\n\n diag_distance(bbox_projected.lower_left(), bbox_projected.upper_right());\n\n\n\n let min_dist_r = proj_ul_lr_distance.min(proj_ll_ur_distance);\n\n\n\n Ok(SpatialResolution::new_unchecked(\n\n min_dist_r / diag_pixels,\n\n min_dist_r / diag_pixels,\n\n ))\n\n}\n\n\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 95, "score": 129758.32623548197 }, { "content": "pub trait Plot {\n\n /// Creates a Vega string for embedding it into a Html page\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method fails on internal errors of the plot.\n\n ///\n\n fn to_vega_embeddable(&self, allow_interactions: bool) -> Result<PlotData>;\n\n\n\n // TODO: create some PNG output, cf. https://github.com/procyon-rs/vega_lite_3.rs/issues/18\n\n // fn to_png(&self, width_px: u16, height_px: u16) -> Vec<u8>;\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct PlotData {\n\n pub vega_string: String,\n\n pub metadata: PlotMetaData,\n\n}\n\n\n", "file_path": "datatypes/src/plots/mod.rs", "rank": 96, "score": 129116.85439915958 }, { "content": "/// Opens a Gdal Dataset with the given `path`.\n\n/// Other crates should use this method for Gdal Dataset access as a workaround to avoid strange errors.\n\npub fn gdal_open_dataset(path: &Path) -> Result<Dataset> {\n\n gdal_open_dataset_ex(path, DatasetOptions::default())\n\n}\n\n\n", "file_path": "operators/src/util/gdal.rs", "rank": 97, "score": 128483.95821828327 }, { "content": "pub fn fold_future<T, C>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n C: AccFunction,\n\n{\n\n tokio::task::spawn_blocking(|| fold_fn::<T, C>(accu, tile)).then(async move |x| match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n })\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 98, "score": 127969.8086578719 }, { "content": "/// Types that are suitable to act as filters\n\npub trait FilterArray: Into<BooleanArray> {\n\n fn len(&self) -> usize;\n\n}\n\n\n\nimpl FilterArray for Vec<bool> {\n\n fn len(&self) -> usize {\n\n Vec::<_>::len(self)\n\n }\n\n}\n\n\n\nimpl FilterArray for BooleanArray {\n\n fn len(&self) -> usize {\n\n <Self as arrow::array::Array>::len(self)\n\n }\n\n}\n\n\n", "file_path": "datatypes/src/collections/feature_collection.rs", "rank": 99, "score": 127717.10103923232 } ]
Rust
capstone-rs/examples/cstool.rs
froydnj/capstone-rs
5044ace8022b1651d1b7c93d41ad56993e0225f5
extern crate capstone; extern crate clap; #[macro_use] extern crate log; extern crate stderrlog; use capstone::prelude::*; use capstone::{Arch, Endian, EnumList, ExtraMode, Mode}; use clap::{App, Arg, ArgGroup}; use std::fmt::Display; use std::fs::File; use std::io::prelude::*; use std::io; use std::process::exit; use std::str::FromStr; const DEFAULT_CAPACITY: usize = 1024; trait ExpectExit<T> { fn expect_exit(self) -> T; } impl<T, E> ExpectExit<T> for Result<T, E> where E: Display, { fn expect_exit(self) -> T { match self { Ok(t) => t, Err(e) => { eprintln!("error: {}", e); exit(1); } } } } fn reg_names<T, I>(cs: &Capstone, regs: T) -> String where T: Iterator<Item = I>, I: Into<RegId>, { let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect(); names.join(", ") } fn group_names<T, I>(cs: &Capstone, regs: T) -> String where T: Iterator<Item = I>, I: Into<InsnGroupId>, { let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect(); names.join(", ") } fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> { let mut output: Vec<u8> = Vec::new(); let mut curr_byte_str = String::with_capacity(2); for b_u8 in input { let b = char::from(b_u8); if ('0' <= b && b <= '9') || ('a' <= b && b <= 'f') || ('A' <= b && b <= 'F') { curr_byte_str.push(b); } if curr_byte_str.len() == 2 { debug!(" curr_byte_str={:?}", curr_byte_str); let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error"); output.push(byte); curr_byte_str.clear(); } } if log::max_level() >= log::LevelFilter::Info { let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect(); info!("unhexed_output = {:?}", output_hex); } output } fn disasm<T: Iterator<Item = ExtraMode>>( arch: Arch, mode: Mode, extra_mode: T, endian: Option<Endian>, code: &[u8], addr: u64, show_detail: bool, ) { info!("Got {} bytes", code.len()); let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit(); if show_detail { cs.set_detail(true).expect("Failed to set detail"); } let stdout = io::stdout(); let mut handle = stdout.lock(); for i in cs.disasm_all(code, addr).expect_exit().iter() { let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect(); let bytes = bytes.join(" "); writeln!( &mut handle, "{:-10x}: {:35} {:7} {}", i.address(), bytes, i.mnemonic().unwrap(), i.op_str().unwrap_or("") ).is_ok(); if show_detail { let detail = cs.insn_detail(&i).expect("Failed to get insn detail"); let output: &[(&str, String)] = &[ ("insn id:", format!("{:?}", i.id().0)), ("read regs:", reg_names(&cs, detail.regs_read())), ("write regs:", reg_names(&cs, detail.regs_write())), ("insn groups:", group_names(&cs, detail.groups())), ]; for &(ref name, ref message) in output.iter() { writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok(); } } } } fn main() { let _arches: Vec<String> = Arch::variants() .iter() .map(|x| format!("{}", x).to_lowercase()) .collect(); let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect(); let _modes: Vec<String> = Mode::variants() .iter() .map(|x| format!("{}", x).to_lowercase()) .collect(); let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect(); let _extra_modes: Vec<String> = ExtraMode::variants() .iter() .map(|x| format!("{}", x).to_lowercase()) .collect(); let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect(); let matches = App::new("capstone-rs disassembler tool") .about("Disassembles binary file") .arg( Arg::with_name("file") .short("f") .long("file") .help("input file with binary instructions") .takes_value(true), ) .arg( Arg::with_name("stdin") .short("s") .long("stdin") .help("read binary instructions from stdin") .takes_value(false), ) .arg( Arg::with_name("code") .short("c") .long("code") .help("instruction bytes (implies --hex)") .takes_value(true), ) .arg( Arg::with_name("address") .short("r") .long("addr") .help("address of code") .takes_value(true), ) .arg( Arg::with_name("v") .short("v") .multiple(true) .help("Sets the level of verbosity"), ) .arg( Arg::with_name("hex") .short("x") .long("hex") .help("Treat input has hex; only select characters that are [a-fA-F0-9]") .takes_value(false), ) .arg( Arg::with_name("DETAIL") .short("d") .long("detail") .help("Print details about instructions") .takes_value(false), ) .arg( Arg::with_name("ARCH") .short("a") .long("arch") .help("Architecture") .takes_value(true) .required(true) .possible_values(arches.as_slice()) .case_insensitive(true), ) .arg( Arg::with_name("MODE") .short("m") .long("mode") .help("Mode") .takes_value(true) .required(true) .possible_values(modes.as_slice()) .case_insensitive(true), ) .arg( Arg::with_name("EXTRA_MODE") .short("e") .long("extra") .help("Extra Mode") .takes_value(true) .required(false) .possible_values(extra_modes.as_slice()) .case_insensitive(true) .multiple(true), ) .arg( Arg::with_name("ENDIAN") .short("n") .long("endian") .help("Endianness") .takes_value(true) .required(false) .possible_values(&["little", "big"]) .case_insensitive(true), ) .group( ArgGroup::with_name("INPUT") .arg("file") .arg("stdin") .arg("code") .required(true), ) .get_matches(); let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of("file") { let mut file = File::open(file_path).expect_exit(); let capacity = match file.metadata() { Err(_) => DEFAULT_CAPACITY, Ok(metadata) => metadata.len() as usize, }; let mut buf = Vec::with_capacity(capacity as usize); file.read_to_end(&mut buf).expect_exit(); buf } else if let Some(code) = matches.value_of("code") { code.as_bytes().iter().map(|x| *x).collect() } else { let mut buf = Vec::with_capacity(DEFAULT_CAPACITY); let stdin = std::io::stdin(); stdin.lock().read_to_end(&mut buf).expect_exit(); buf }; stderrlog::new() .verbosity(matches.occurrences_of("v") as usize) .init() .unwrap(); let is_hex = matches.is_present("hex") || matches.is_present("code"); info!("is_hex = {:?}", is_hex); let show_detail = matches.is_present("DETAIL"); info!("show_detail = {:?}", show_detail); let arch: Arch = Arch::from_str(matches.value_of("ARCH").unwrap()) .unwrap() .into(); info!("Arch = {:?}", arch); let mode: Mode = Mode::from_str(matches.value_of("MODE").unwrap()) .unwrap() .into(); info!("Mode = {:?}", mode); let extra_mode: Vec<_> = match matches.values_of("EXTRA_MODE") { None => Vec::with_capacity(0), Some(x) => x .map(|x| ExtraMode::from(ExtraMode::from_str(x).unwrap())) .collect(), }; info!("ExtraMode = {:?}", extra_mode); let endian: Option<Endian> = matches .value_of("ENDIAN") .map(|x| Endian::from_str(x).expect_exit()); info!("Endian = {:?}", endian); let address = u64::from_str_radix(matches.value_of("address").unwrap_or("1000"), 16).expect_exit(); info!("Address = 0x{:x}", address); let input_bytes = if is_hex { unhexed_bytes(direct_input_bytes) } else { direct_input_bytes }; disasm( arch, mode, extra_mode.iter().map(|x| *x), endian, input_bytes.as_slice(), address, show_detail, ); }
extern crate capstone; extern crate clap; #[macro_use] extern crate log; extern crate stderrlog; use capstone::prelude::*; use capstone::{Arch, Endian, EnumList, ExtraMode, Mode}; use clap::{App, Arg, ArgGroup}; use std::fmt::Display; use std::fs::File; use std::io::prelude::*; use std::io; use std::process::exit; use std::str::FromStr; const DEFAULT_CAPACITY: usize = 1024; trait ExpectExit<T> { fn expect_exit(self) -> T; } impl<T, E> ExpectExit<T> for Result<T, E> where E: Display, { fn expect_exit(self) -> T { match self { Ok(t) => t, Err(e) => { eprintln!("error: {}", e); exit(1); } } } } fn reg_names<T, I>(cs: &Capstone, regs: T) -> String where T: Iterator<Item = I>, I: Into<RegId>, { let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect(); names.join(", ") } fn group_names<T, I>(cs: &Capstone, regs: T) -> String where T: Iterator<Item = I>, I: Into<InsnGroupId>, { let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect(); names.join(", ") } fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> { let mut output: Vec<u8> = Vec::new(); let mut curr_byte_str = String::with_capacity(2); for b_u8 in input { let b = char::from(b_u8); if ('0' <= b && b <= '9') || ('a' <= b && b <= 'f') || ('A' <= b && b <= 'F') { curr_byte_str.push(b); } if curr_byte_str.len() == 2 { debug!(" curr_byte_str={:?}", curr_byte_str); let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error"); output.push(byte); curr_byte_str.clear(); } } if log::max_level() >= log::LevelFilter::Info { let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect(); info!("unhexed_output = {:?}", output_hex); } output } fn disasm<T: Iterator<Item = ExtraMode>>( arch: Arch, mode: Mode, extra_mode: T, endian: Option<Endian>, code: &[u8], addr: u64, show_detail: bool, ) { info!("Got {} bytes", code.len()); let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit(); if show_detail { cs.set_detail(true).expect("Failed to set detail"); } let stdout = io::stdout(); let mut handle = stdout.lock(); for i in cs.disasm_all(code, addr).expect_exit().iter() { let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect(); let bytes = bytes.join(" "); writeln!( &mut handle, "{:-10x}: {:35} {:7} {}", i.address(), bytes, i.mnemonic().unwrap(), i.op_str().
("code") .required(true), ) .get_matches(); let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of("file") { let mut file = File::open(file_path).expect_exit(); let capacity = match file.metadata() { Err(_) => DEFAULT_CAPACITY, Ok(metadata) => metadata.len() as usize, }; let mut buf = Vec::with_capacity(capacity as usize); file.read_to_end(&mut buf).expect_exit(); buf } else if let Some(code) = matches.value_of("code") { code.as_bytes().iter().map(|x| *x).collect() } else { let mut buf = Vec::with_capacity(DEFAULT_CAPACITY); let stdin = std::io::stdin(); stdin.lock().read_to_end(&mut buf).expect_exit(); buf }; stderrlog::new() .verbosity(matches.occurrences_of("v") as usize) .init() .unwrap(); let is_hex = matches.is_present("hex") || matches.is_present("code"); info!("is_hex = {:?}", is_hex); let show_detail = matches.is_present("DETAIL"); info!("show_detail = {:?}", show_detail); let arch: Arch = Arch::from_str(matches.value_of("ARCH").unwrap()) .unwrap() .into(); info!("Arch = {:?}", arch); let mode: Mode = Mode::from_str(matches.value_of("MODE").unwrap()) .unwrap() .into(); info!("Mode = {:?}", mode); let extra_mode: Vec<_> = match matches.values_of("EXTRA_MODE") { None => Vec::with_capacity(0), Some(x) => x .map(|x| ExtraMode::from(ExtraMode::from_str(x).unwrap())) .collect(), }; info!("ExtraMode = {:?}", extra_mode); let endian: Option<Endian> = matches .value_of("ENDIAN") .map(|x| Endian::from_str(x).expect_exit()); info!("Endian = {:?}", endian); let address = u64::from_str_radix(matches.value_of("address").unwrap_or("1000"), 16).expect_exit(); info!("Address = 0x{:x}", address); let input_bytes = if is_hex { unhexed_bytes(direct_input_bytes) } else { direct_input_bytes }; disasm( arch, mode, extra_mode.iter().map(|x| *x), endian, input_bytes.as_slice(), address, show_detail, ); }
unwrap_or("") ).is_ok(); if show_detail { let detail = cs.insn_detail(&i).expect("Failed to get insn detail"); let output: &[(&str, String)] = &[ ("insn id:", format!("{:?}", i.id().0)), ("read regs:", reg_names(&cs, detail.regs_read())), ("write regs:", reg_names(&cs, detail.regs_write())), ("insn groups:", group_names(&cs, detail.groups())), ]; for &(ref name, ref message) in output.iter() { writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok(); } } } } fn main() { let _arches: Vec<String> = Arch::variants() .iter() .map(|x| format!("{}", x).to_lowercase()) .collect(); let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect(); let _modes: Vec<String> = Mode::variants() .iter() .map(|x| format!("{}", x).to_lowercase()) .collect(); let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect(); let _extra_modes: Vec<String> = ExtraMode::variants() .iter() .map(|x| format!("{}", x).to_lowercase()) .collect(); let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect(); let matches = App::new("capstone-rs disassembler tool") .about("Disassembles binary file") .arg( Arg::with_name("file") .short("f") .long("file") .help("input file with binary instructions") .takes_value(true), ) .arg( Arg::with_name("stdin") .short("s") .long("stdin") .help("read binary instructions from stdin") .takes_value(false), ) .arg( Arg::with_name("code") .short("c") .long("code") .help("instruction bytes (implies --hex)") .takes_value(true), ) .arg( Arg::with_name("address") .short("r") .long("addr") .help("address of code") .takes_value(true), ) .arg( Arg::with_name("v") .short("v") .multiple(true) .help("Sets the level of verbosity"), ) .arg( Arg::with_name("hex") .short("x") .long("hex") .help("Treat input has hex; only select characters that are [a-fA-F0-9]") .takes_value(false), ) .arg( Arg::with_name("DETAIL") .short("d") .long("detail") .help("Print details about instructions") .takes_value(false), ) .arg( Arg::with_name("ARCH") .short("a") .long("arch") .help("Architecture") .takes_value(true) .required(true) .possible_values(arches.as_slice()) .case_insensitive(true), ) .arg( Arg::with_name("MODE") .short("m") .long("mode") .help("Mode") .takes_value(true) .required(true) .possible_values(modes.as_slice()) .case_insensitive(true), ) .arg( Arg::with_name("EXTRA_MODE") .short("e") .long("extra") .help("Extra Mode") .takes_value(true) .required(false) .possible_values(extra_modes.as_slice()) .case_insensitive(true) .multiple(true), ) .arg( Arg::with_name("ENDIAN") .short("n") .long("endian") .help("Endianness") .takes_value(true) .required(false) .possible_values(&["little", "big"]) .case_insensitive(true), ) .group( ArgGroup::with_name("INPUT") .arg("file") .arg("stdin") .arg
random
[ { "content": "/// Disassemble code and print information\n\nfn arch_example(cs: &mut Capstone, code: &[u8]) -> CsResult<()> {\n\n let insns = cs.disasm_all(code, 0x1000)?;\n\n println!(\"Found {} instructions\", insns.len());\n\n for i in insns.iter() {\n\n println!();\n\n println!(\"{}\", i);\n\n\n\n let detail: InsnDetail = cs.insn_detail(&i)?;\n\n let arch_detail: ArchDetail = detail.arch_detail();\n\n let ops = arch_detail.operands();\n\n\n\n let output: &[(&str, String)] = &[\n\n (\"insn id:\", format!(\"{:?}\", i.id().0)),\n\n (\"bytes:\", format!(\"{:?}\", i.bytes())),\n\n (\"read regs:\", reg_names(&cs, detail.regs_read())),\n\n (\"write regs:\", reg_names(&cs, detail.regs_write())),\n\n (\"insn groups:\", group_names(&cs, detail.groups())),\n\n ];\n\n\n\n for &(ref name, ref message) in output.iter() {\n", "file_path": "capstone-rs/examples/demo.rs", "rank": 0, "score": 455255.8621551578 }, { "content": "fn test_insns_match(cs: &mut Capstone, insns: &[(&str, &[u8])]) {\n\n for &(mnemonic, bytes) in insns.iter() {\n\n let insns = cs.disasm_all(bytes, 0x1000).unwrap();\n\n assert_eq!(insns.len(), 1);\n\n let insn = insns.iter().next().unwrap();\n\n assert_eq!(insn.mnemonic(), Some(mnemonic));\n\n }\n\n}\n\n\n", "file_path": "capstone-rs/src/test.rs", "rank": 1, "score": 368338.6473680629 }, { "content": "/// Print register names\n\nfn reg_names<T, I>(cs: &Capstone, regs: T) -> String\n\nwhere\n\n T: Iterator<Item = I>,\n\n I: Into<RegId>,\n\n{\n\n let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect();\n\n names.join(\", \")\n\n}\n\n\n", "file_path": "capstone-rs/examples/demo.rs", "rank": 3, "score": 368139.31459637143 }, { "content": "/// Print instruction group names\n\nfn group_names<T, I>(cs: &Capstone, regs: T) -> String\n\nwhere\n\n T: Iterator<Item = I>,\n\n I: Into<InsnGroupId>,\n\n{\n\n let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect();\n\n names.join(\", \")\n\n}\n\n\n", "file_path": "capstone-rs/examples/demo.rs", "rank": 5, "score": 355318.59866536176 }, { "content": "fn test_arch_mode_endian_insns_detail<T>(\n\n cs: &mut Capstone,\n\n arch: Arch,\n\n mode: Mode,\n\n endian: Option<Endian>,\n\n extra_mode: &[ExtraMode],\n\n insns: &[DetailedInsnInfo<T>],\n\n) where\n\n T: Into<ArchOperand> + Clone,\n\n{\n\n let extra_mode = extra_mode.iter().map(|x| *x);\n\n let mut cs_raw = Capstone::new_raw(arch, mode, extra_mode, endian).unwrap();\n\n\n\n instructions_match_detail(&mut cs_raw, insns, true);\n\n instructions_match_detail(cs, insns, true);\n\n}\n\n\n", "file_path": "capstone-rs/src/test.rs", "rank": 6, "score": 350375.98655119166 }, { "content": "/// Provides architecture-specific details about an instruction\n\npub trait DetailsArchInsn: PartialEq + Debug {\n\n type Operand: Into<ArchOperand> + Default + Clone + Debug + PartialEq;\n\n type OperandIterator: Iterator<Item = Self::Operand>;\n\n\n\n fn operands(&self) -> Self::OperandIterator;\n\n}\n\n\n\n/// Define PartialEq for a type given representation getter methods\n\nmacro_rules! impl_PartialEq_repr_fields {\n\n // With generic parameters\n\n (\n\n $name:ty [ $( $lifetime:tt ),* ];\n\n $( $field:ident),*\n\n ) => {\n\n impl<$( $lifetime ),*> ::std::cmp::PartialEq for $name {\n\n fn eq(&self, other: &Self) -> bool {\n\n $(\n\n if self.$field() != other.$field() {\n\n return false;\n\n }\n", "file_path": "capstone-rs/src/arch/mod.rs", "rank": 8, "score": 305992.599135774 }, { "content": "/// Implies that a `CapstoneBuilder` architecture has a configurable endianness\n\npub trait BuildsCapstoneEndian<ArchMode>: BuildsCapstone<ArchMode> {\n\n /// Set architecture endianness\n\n fn endian(self, endian: Endian) -> Self;\n\n}\n\n\n\n/// Contains builder-pattern implementations\n\npub(crate) mod arch_builder {\n\n use super::*;\n\n\n\n arch_info_base!(define_arch_builder);\n\n}\n\n\n\n/// Builds `Capstone` object\n\n#[derive(Debug)]\n\npub struct CapstoneBuilder(\n\n /// Hidden field to prevent users from instantiating `CapstoneBuilder`\n\n PhantomData<()>,\n\n);\n\n\n\nimpl CapstoneBuilder {\n\n /// Create a `CapstoneBuilder`\n\n pub(crate) fn new() -> Self {\n\n CapstoneBuilder(PhantomData)\n\n }\n\n}\n\n\n", "file_path": "capstone-rs/src/arch/mod.rs", "rank": 9, "score": 299872.7094081907 }, { "content": "fn test_arch_mode_endian_insns(\n\n cs: &mut Capstone,\n\n arch: Arch,\n\n mode: Mode,\n\n endian: Option<Endian>,\n\n extra_mode: &[ExtraMode],\n\n insns: &[(&str, &[u8])],\n\n) {\n\n let expected_insns: Vec<(&str, &[u8])> = insns\n\n .iter()\n\n .map(|&(mnemonic, bytes)| (mnemonic, bytes))\n\n .collect();\n\n\n\n let mut cs_raw =\n\n Capstone::new_raw(arch, mode, extra_mode.iter().map(|x| *x), endian).unwrap();\n\n let mut cs_raw_endian_set =\n\n Capstone::new_raw(arch, mode, extra_mode.iter().map(|x| *x), None).unwrap();\n\n if let Some(some_endian) = endian {\n\n cs_raw_endian_set\n\n .set_endian(some_endian)\n\n .expect(\"Failed to set endianness\");\n\n }\n\n\n\n instructions_match(cs, expected_insns.as_slice(), true);\n\n instructions_match(&mut cs_raw, expected_insns.as_slice(), true);\n\n instructions_match(&mut cs_raw_endian_set, expected_insns.as_slice(), true);\n\n}\n\n\n", "file_path": "capstone-rs/src/test.rs", "rank": 10, "score": 298398.9558034723 }, { "content": "inline static const char *ARMII_AddrModeToString(ARMII_AddrMode addrmode)\n\n{\n\n\tswitch (addrmode) {\n\n\t\tcase ARMII_AddrModeNone: return \"AddrModeNone\";\n\n\t\tcase ARMII_AddrMode1: return \"AddrMode1\";\n\n\t\tcase ARMII_AddrMode2: return \"AddrMode2\";\n\n\t\tcase ARMII_AddrMode3: return \"AddrMode3\";\n\n\t\tcase ARMII_AddrMode4: return \"AddrMode4\";\n\n\t\tcase ARMII_AddrMode5: return \"AddrMode5\";\n\n\t\tcase ARMII_AddrMode6: return \"AddrMode6\";\n\n\t\tcase ARMII_AddrModeT1_1: return \"AddrModeT1_1\";\n\n\t\tcase ARMII_AddrModeT1_2: return \"AddrModeT1_2\";\n\n\t\tcase ARMII_AddrModeT1_4: return \"AddrModeT1_4\";\n\n\t\tcase ARMII_AddrModeT1_s: return \"AddrModeT1_s\";\n\n\t\tcase ARMII_AddrModeT2_i12: return \"AddrModeT2_i12\";\n\n\t\tcase ARMII_AddrModeT2_i8: return \"AddrModeT2_i8\";\n\n\t\tcase ARMII_AddrModeT2_so: return \"AddrModeT2_so\";\n\n\t\tcase ARMII_AddrModeT2_pc: return \"AddrModeT2_pc\";\n\n\t\tcase ARMII_AddrModeT2_i8s4: return \"AddrModeT2_i8s4\";\n\n\t\tcase ARMII_AddrMode_i12: return \"AddrMode_i12\";\n\n\t}\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMBaseInfo.h", "rank": 11, "score": 252229.91028230145 }, { "content": "static DecodeStatus DecodeT2AddrModeSOReg(MCInst *Inst, unsigned Val,\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 12, "score": 251484.91421990897 }, { "content": "static void printT2AddrModeSoRegOperand(MCInst *MI, unsigned OpNum, SStream *O);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMInstPrinter.c", "rank": 13, "score": 246765.39401916947 }, { "content": "/// Implies that a `CapstoneBuilder` architecture has extra modes\n\npub trait BuildsCapstoneExtraMode<ArchMode, ArchExtraMode>: BuildsCapstone<ArchMode> {\n\n /// Set architecture endianness\n\n fn extra_mode<T: Iterator<Item = ArchExtraMode>>(self, extra_mode: T) -> Self;\n\n}\n\n\n", "file_path": "capstone-rs/src/arch/mod.rs", "rank": 14, "score": 244035.07822944102 }, { "content": "/// Implies that a `CapstoneBuilder` has different syntax options\n\npub trait BuildsCapstoneSyntax<ArchMode, ArchSyntax>: BuildsCapstone<ArchMode> {\n\n /// Set the disassembly syntax\n\n fn syntax(self, syntax: ArchSyntax) -> Self;\n\n}\n\n\n", "file_path": "capstone-rs/src/arch/mod.rs", "rank": 15, "score": 236781.3191201981 }, { "content": " public UnionArch arch;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 16, "score": 234865.4424829083 }, { "content": "#[test]\n\nfn test_arch_ppc_detail() {\n\n use arch::ppc::PpcOperand::*;\n\n use arch::ppc::PpcReg::*;\n\n use arch::ppc::*;\n\n use capstone_sys::ppc_op_mem;\n\n\n\n test_arch_mode_endian_insns_detail(\n\n &mut Capstone::new()\n\n .ppc()\n\n .mode(ppc::ArchMode::Mode64)\n\n .endian(Endian::Big)\n\n .build()\n\n .unwrap(),\n\n Arch::PPC,\n\n Mode::Mode64,\n\n Some(Endian::Big),\n\n &[],\n\n &[\n\n // lwz r1, 0(0)\n\n DII::new(\n", "file_path": "capstone-rs/src/test.rs", "rank": 17, "score": 234783.1503565685 }, { "content": "#[test]\n\nfn test_arch_sparc_detail() {\n\n use arch::sparc::SparcOperand::*;\n\n use arch::sparc::SparcReg::*;\n\n use arch::sparc::*;\n\n use capstone_sys::sparc_op_mem;\n\n\n\n test_arch_mode_endian_insns_detail(\n\n &mut Capstone::new()\n\n .sparc()\n\n .mode(sparc::ArchMode::Default)\n\n .build()\n\n .unwrap(),\n\n Arch::SPARC,\n\n Mode::Default,\n\n None,\n\n &[],\n\n &[\n\n // cmp %g1, %g2\n\n DII::new(\n\n \"cmp\",\n", "file_path": "capstone-rs/src/test.rs", "rank": 18, "score": 234783.1503565685 }, { "content": "#[test]\n\nfn test_arch_x86_detail() {\n\n use arch::x86::X86OperandType::*;\n\n use arch::x86::X86Reg::*;\n\n use arch::x86::*;\n\n use capstone_sys::*;\n\n\n\n // X86 16bit (Intel syntax)\n\n test_arch_mode_endian_insns_detail(\n\n &mut Capstone::new()\n\n .x86()\n\n .mode(x86::ArchMode::Mode16)\n\n .build()\n\n .unwrap(),\n\n Arch::X86,\n\n Mode::Mode16,\n\n None,\n\n &[],\n\n &[\n\n // lea cx, word ptr [si + 0x32]\n\n DII::new(\n", "file_path": "capstone-rs/src/test.rs", "rank": 19, "score": 234783.1503565685 }, { "content": "#[test]\n\nfn test_arch_arm_detail() {\n\n use arch::arm::ArmOperandType::*;\n\n use arch::arm::*;\n\n use capstone_sys::arm_op_mem;\n\n\n\n let r0_op = ArmOperand {\n\n op_type: Reg(RegId(ArmReg::ARM_REG_R0 as RegIdInt)),\n\n ..Default::default()\n\n };\n\n\n\n test_arch_mode_endian_insns_detail(\n\n &mut Capstone::new()\n\n .arm()\n\n .mode(arm::ArchMode::Arm)\n\n .build()\n\n .unwrap(),\n\n Arch::ARM,\n\n Mode::Arm,\n\n Some(Endian::Little),\n\n &[],\n", "file_path": "capstone-rs/src/test.rs", "rank": 20, "score": 234783.1503565685 }, { "content": "#[test]\n\nfn test_arch_xcore_detail() {\n\n use arch::xcore::XcoreOperand::*;\n\n use arch::xcore::XcoreReg::*;\n\n use arch::xcore::*;\n\n use capstone_sys::xcore_op_mem;\n\n\n\n test_arch_mode_endian_insns_detail(\n\n &mut Capstone::new()\n\n .xcore()\n\n .mode(xcore::ArchMode::Default)\n\n .build()\n\n .unwrap(),\n\n Arch::XCORE,\n\n Mode::Default,\n\n None,\n\n &[],\n\n &[\n\n // get r11, ed\n\n DII::new(\n\n \"get\",\n", "file_path": "capstone-rs/src/test.rs", "rank": 21, "score": 234783.1503565685 }, { "content": "#[test]\n\nfn test_arch_arm64_detail() {\n\n use arch::arm64::Arm64OperandType::*;\n\n use arch::arm64::Arm64Pstate::*;\n\n use arch::arm64::Arm64Reg::*;\n\n use arch::arm64::Arm64Sysreg::*;\n\n use arch::arm64::Arm64Vas::*;\n\n use arch::arm64::Arm64Vess::*;\n\n use arch::arm64::*;\n\n use capstone_sys::arm64_op_mem;\n\n\n\n let s0 = Arm64Operand {\n\n op_type: Reg(RegId(ARM64_REG_S0 as RegIdInt)),\n\n ..Default::default()\n\n };\n\n let x0 = Arm64Operand {\n\n op_type: Reg(RegId(ARM64_REG_X0 as RegIdInt)),\n\n ..Default::default()\n\n };\n\n let x1 = Arm64Operand {\n\n op_type: Reg(RegId(ARM64_REG_X1 as RegIdInt)),\n", "file_path": "capstone-rs/src/test.rs", "rank": 22, "score": 234783.1503565685 }, { "content": "#[test]\n\nfn test_arch_mips_detail() {\n\n use arch::mips::MipsOperand::*;\n\n use arch::mips::*;\n\n use capstone_sys::mips_op_mem;\n\n\n\n test_arch_mode_endian_insns_detail(\n\n &mut Capstone::new()\n\n .mips()\n\n .mode(mips::ArchMode::Mips32R6)\n\n .build()\n\n .unwrap(),\n\n Arch::MIPS,\n\n Mode::Mips32R6,\n\n Some(Endian::Little),\n\n &[],\n\n &[\n\n DII::new(\n\n \"ori\",\n\n b\"\\x56\\x34\\x21\\x34\",\n\n &[Reg(RegId(3)), Reg(RegId(3)), Imm(13398)],\n", "file_path": "capstone-rs/src/test.rs", "rank": 23, "score": 234783.1503565685 }, { "content": "/// Builds a `Capstone` struct\n\npub trait BuildsCapstone<ArchMode> {\n\n /// Set the disassembly mode\n\n fn mode(self, mode: ArchMode) -> Self;\n\n\n\n /// Enable detailed output\n\n fn detail(self, enable_detail: bool) -> Self;\n\n\n\n /// Get final `Capstone`\n\n fn build<'a>(self) -> CsResult<Capstone>;\n\n}\n\n\n", "file_path": "capstone-rs/src/arch/mod.rs", "rank": 24, "score": 232420.51732572902 }, { "content": "fn instructions_match_detail<T>(\n\n cs: &mut Capstone,\n\n info: &[DetailedInsnInfo<T>],\n\n has_default_syntax: bool,\n\n) where\n\n T: Into<ArchOperand> + Clone,\n\n{\n\n let insns_buf: Vec<u8> = info\n\n .iter()\n\n .flat_map(|ref info| info.bytes)\n\n .map(|x| *x)\n\n .collect();\n\n\n\n // Details required to get groups information\n\n cs.set_detail(true).unwrap();\n\n\n\n // todo(tmfink) eliminate check\n\n if info.len() == 0 {\n\n // Input was empty, which will cause disasm_all() to fail\n\n return;\n", "file_path": "capstone-rs/src/test.rs", "rank": 25, "score": 228995.14571231543 }, { "content": "CAPSTONE_EXPORT\n\nconst char * CAPSTONE_API cs_reg_name(csh ud, unsigned int reg)\n\n{\n\n\tstruct cs_struct *handle = (struct cs_struct *)(uintptr_t)ud;\n\n\n\n\tif (!handle || handle->reg_name == NULL) {\n\n\t\treturn NULL;\n\n\t}\n\n\n\n\treturn handle->reg_name(ud, reg);\n", "file_path": "capstone-sys/capstone/cs.c", "rank": 26, "score": 212214.97039419363 }, { "content": "/// Disassemble code and print information\n\nfn arch_bench<T: Iterator<Item = ExtraMode>>(\n\n code: &[u8],\n\n arch: Arch,\n\n mode: Mode,\n\n extra_mode: T,\n\n endian: Option<Endian>,\n\n detail: bool,\n\n) {\n\n let mut cs =\n\n Capstone::new_raw(arch, mode, extra_mode, endian).expect(\"failed to make capstone\");\n\n cs.set_detail(detail).expect(\"failed to set detail\");\n\n\n\n let insns = cs.disasm_all(code, 0x1000).expect(\"failed to disassemble\");\n\n for i in insns.iter() {\n\n black_box(i);\n\n }\n\n}\n\n\n", "file_path": "capstone-rs/benches/my_benchmark.rs", "rank": 27, "score": 211673.5611455434 }, { "content": "static cs_mode cs_arch_disallowed_mode_mask[MAX_ARCH] = {\n\n#ifdef CAPSTONE_HAS_ARM\n\n\t~(CS_MODE_LITTLE_ENDIAN | CS_MODE_ARM | CS_MODE_V8 | CS_MODE_MCLASS\n\n\t | CS_MODE_THUMB | CS_MODE_BIG_ENDIAN),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_ARM64\n\n\t~(CS_MODE_LITTLE_ENDIAN | CS_MODE_ARM | CS_MODE_BIG_ENDIAN),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_MIPS\n\n\t~(CS_MODE_LITTLE_ENDIAN | CS_MODE_32 | CS_MODE_64 | CS_MODE_MICRO\n\n\t | CS_MODE_MIPS32R6 | CS_MODE_BIG_ENDIAN | CS_MODE_MIPS2 | CS_MODE_MIPS3),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_X86\n\n\t~(CS_MODE_LITTLE_ENDIAN | CS_MODE_32 | CS_MODE_64 | CS_MODE_16),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_POWERPC\n\n\t~(CS_MODE_LITTLE_ENDIAN | CS_MODE_32 | CS_MODE_64 | CS_MODE_BIG_ENDIAN\n\n\t | CS_MODE_QPX),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_SPARC\n\n\t~(CS_MODE_BIG_ENDIAN | CS_MODE_V9),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_SYSZ\n\n\t~(CS_MODE_BIG_ENDIAN),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_XCORE\n\n\t~(CS_MODE_BIG_ENDIAN),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_M68K\n\n\t~(CS_MODE_BIG_ENDIAN | CS_MODE_M68K_000 | CS_MODE_M68K_010 | CS_MODE_M68K_020\n\n\t | CS_MODE_M68K_030 | CS_MODE_M68K_040 | CS_MODE_M68K_060),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_TMS320C64X\n\n\t~(CS_MODE_BIG_ENDIAN),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_M680X\n\n\t~(CS_MODE_M680X_6301 | CS_MODE_M680X_6309 | CS_MODE_M680X_6800\n\n\t | CS_MODE_M680X_6801 | CS_MODE_M680X_6805 | CS_MODE_M680X_6808\n\n\t | CS_MODE_M680X_6809 | CS_MODE_M680X_6811 | CS_MODE_M680X_CPU12\n\n\t | CS_MODE_M680X_HCS08),\n\n#else\n\n\t0,\n\n#endif\n\n#ifdef CAPSTONE_HAS_EVM\n\n\t0,\n\n#else\n\n\t0,\n\n#endif\n", "file_path": "capstone-sys/capstone/cs.c", "rank": 28, "score": 208233.192198557 }, { "content": " public String cs_reg_name(NativeLong csh, int id);\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 29, "score": 202405.48288167748 }, { "content": "\tGetName_t reg_name;\n", "file_path": "capstone-sys/capstone/cs_priv.h", "rank": 30, "score": 199707.0321337809 }, { "content": "static void print_string_hex(unsigned char *str, size_t len)\n\n{\n\n\tunsigned char *c;\n\n\n\n\tprintf(\"Code: \");\n\n\tfor (c = str; c < str + len; c++) {\n\n\t\tprintf(\"0x%02x \", *c & 0xff);\n\n\t}\n\n\tprintf(\"\\n\");\n", "file_path": "capstone-sys/capstone/tests/test_detail.c", "rank": 31, "score": 197144.2983145792 }, { "content": "inline static const char *SPARCCondCodeToString(sparc_cc CC)\n\n{\n\n\tswitch (CC) {\n\n\t\tdefault:\treturn NULL;\t// unreachable\n\n\t\tcase SPARC_CC_ICC_A: return \"a\";\n\n\t\tcase SPARC_CC_ICC_N: return \"n\";\n\n\t\tcase SPARC_CC_ICC_NE: return \"ne\";\n\n\t\tcase SPARC_CC_ICC_E: return \"e\";\n\n\t\tcase SPARC_CC_ICC_G: return \"g\";\n\n\t\tcase SPARC_CC_ICC_LE: return \"le\";\n\n\t\tcase SPARC_CC_ICC_GE: return \"ge\";\n\n\t\tcase SPARC_CC_ICC_L: return \"l\";\n\n\t\tcase SPARC_CC_ICC_GU: return \"gu\";\n\n\t\tcase SPARC_CC_ICC_LEU: return \"leu\";\n\n\t\tcase SPARC_CC_ICC_CC: return \"cc\";\n\n\t\tcase SPARC_CC_ICC_CS: return \"cs\";\n\n\t\tcase SPARC_CC_ICC_POS: return \"pos\";\n\n\t\tcase SPARC_CC_ICC_NEG: return \"neg\";\n\n\t\tcase SPARC_CC_ICC_VC: return \"vc\";\n\n\t\tcase SPARC_CC_ICC_VS: return \"vs\";\n\n\n\n\t\tcase SPARC_CC_FCC_A: return \"a\";\n\n\t\tcase SPARC_CC_FCC_N: return \"n\";\n\n\t\tcase SPARC_CC_FCC_U: return \"u\";\n\n\t\tcase SPARC_CC_FCC_G: return \"g\";\n\n\t\tcase SPARC_CC_FCC_UG: return \"ug\";\n\n\t\tcase SPARC_CC_FCC_L: return \"l\";\n\n\t\tcase SPARC_CC_FCC_UL: return \"ul\";\n\n\t\tcase SPARC_CC_FCC_LG: return \"lg\";\n\n\t\tcase SPARC_CC_FCC_NE: return \"ne\";\n\n\t\tcase SPARC_CC_FCC_E: return \"e\";\n\n\t\tcase SPARC_CC_FCC_UE: return \"ue\";\n\n\t\tcase SPARC_CC_FCC_GE: return \"ge\";\n\n\t\tcase SPARC_CC_FCC_UGE: return \"uge\";\n\n\t\tcase SPARC_CC_FCC_LE: return \"le\";\n\n\t\tcase SPARC_CC_FCC_ULE: return \"ule\";\n\n\t\tcase SPARC_CC_FCC_O: return \"o\";\n\n\t}\n", "file_path": "capstone-sys/capstone/arch/Sparc/Sparc.h", "rank": 32, "score": 194103.7267852154 }, { "content": "\tpublic static final int X86_REG_CS = 11;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 33, "score": 193881.45844517354 }, { "content": "static const unsigned FRegs[] = {\n\n\tPPC_F0, PPC_F1, PPC_F2, PPC_F3,\n\n\tPPC_F4, PPC_F5, PPC_F6, PPC_F7,\n\n\tPPC_F8, PPC_F9, PPC_F10, PPC_F11,\n\n\tPPC_F12, PPC_F13, PPC_F14, PPC_F15,\n\n\tPPC_F16, PPC_F17, PPC_F18, PPC_F19,\n\n\tPPC_F20, PPC_F21, PPC_F22, PPC_F23,\n\n\tPPC_F24, PPC_F25, PPC_F26, PPC_F27,\n\n\tPPC_F28, PPC_F29, PPC_F30, PPC_F31\n", "file_path": "capstone-sys/capstone/arch/PowerPC/PPCDisassembler.c", "rank": 34, "score": 193365.44604246167 }, { "content": "const char *AArch64_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/AArch64/AArch64Mapping.h", "rank": 35, "score": 193322.5355056284 }, { "content": "const char *Sparc_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/Sparc/SparcMapping.h", "rank": 36, "score": 193322.5355056284 }, { "content": "const char *TMS320C64x_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/TMS320C64x/TMS320C64xMapping.h", "rank": 37, "score": 193322.5355056284 }, { "content": "const char *Mips_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/Mips/MipsMapping.h", "rank": 38, "score": 193322.5355056284 }, { "content": "const char *ARM_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMMapping.h", "rank": 39, "score": 193322.5355056284 }, { "content": "const char *X86_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/X86/X86Mapping.h", "rank": 40, "score": 193322.5355056284 }, { "content": "const char *TMS320C64x_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/TMS320C64x/TMS320C64xMapping.c", "rank": 41, "score": 193310.86219783287 }, { "content": "const char *X86_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tcs_struct *ud = (cs_struct *)handle;\n\n\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\tif (reg == X86_REG_EFLAGS) {\n\n\t\tif (ud->mode & CS_MODE_32)\n\n\t\t\treturn \"eflags\";\n\n\t\tif (ud->mode & CS_MODE_64)\n\n\t\t\treturn \"rflags\";\n\n\t}\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/X86/X86Mapping.c", "rank": 42, "score": 193310.86219783287 }, { "content": "const char *Sparc_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/Sparc/SparcMapping.c", "rank": 43, "score": 193310.86219783287 }, { "content": "const char *ARM_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMMapping.c", "rank": 44, "score": 193310.86219783287 }, { "content": "const char *AArch64_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/AArch64/AArch64Mapping.c", "rank": 45, "score": 193310.86219783287 }, { "content": "static name_map reg_name_maps[] = {\n\n\t{ TMS320C64X_REG_INVALID, NULL },\n\n\n\n\t{ TMS320C64X_REG_AMR, \"amr\" },\n\n\t{ TMS320C64X_REG_CSR, \"csr\" },\n\n\t{ TMS320C64X_REG_DIER, \"dier\" },\n\n\t{ TMS320C64X_REG_DNUM, \"dnum\" },\n\n\t{ TMS320C64X_REG_ECR, \"ecr\" },\n\n\t{ TMS320C64X_REG_GFPGFR, \"gfpgfr\" },\n\n\t{ TMS320C64X_REG_GPLYA, \"gplya\" },\n\n\t{ TMS320C64X_REG_GPLYB, \"gplyb\" },\n\n\t{ TMS320C64X_REG_ICR, \"icr\" },\n\n\t{ TMS320C64X_REG_IER, \"ier\" },\n\n\t{ TMS320C64X_REG_IERR, \"ierr\" },\n\n\t{ TMS320C64X_REG_ILC, \"ilc\" },\n\n\t{ TMS320C64X_REG_IRP, \"irp\" },\n\n\t{ TMS320C64X_REG_ISR, \"isr\" },\n\n\t{ TMS320C64X_REG_ISTP, \"istp\" },\n\n\t{ TMS320C64X_REG_ITSR, \"itsr\" },\n\n\t{ TMS320C64X_REG_NRP, \"nrp\" },\n\n\t{ TMS320C64X_REG_NTSR, \"ntsr\" },\n\n\t{ TMS320C64X_REG_REP, \"rep\" },\n\n\t{ TMS320C64X_REG_RILC, \"rilc\" },\n\n\t{ TMS320C64X_REG_SSR, \"ssr\" },\n\n\t{ TMS320C64X_REG_TSCH, \"tsch\" },\n\n\t{ TMS320C64X_REG_TSCL, \"tscl\" },\n\n\t{ TMS320C64X_REG_TSR, \"tsr\" },\n\n\t{ TMS320C64X_REG_A0, \"a0\" },\n\n\t{ TMS320C64X_REG_A1, \"a1\" },\n\n\t{ TMS320C64X_REG_A2, \"a2\" },\n\n\t{ TMS320C64X_REG_A3, \"a3\" },\n\n\t{ TMS320C64X_REG_A4, \"a4\" },\n\n\t{ TMS320C64X_REG_A5, \"a5\" },\n\n\t{ TMS320C64X_REG_A6, \"a6\" },\n\n\t{ TMS320C64X_REG_A7, \"a7\" },\n\n\t{ TMS320C64X_REG_A8, \"a8\" },\n\n\t{ TMS320C64X_REG_A9, \"a9\" },\n\n\t{ TMS320C64X_REG_A10, \"a10\" },\n\n\t{ TMS320C64X_REG_A11, \"a11\" },\n\n\t{ TMS320C64X_REG_A12, \"a12\" },\n\n\t{ TMS320C64X_REG_A13, \"a13\" },\n\n\t{ TMS320C64X_REG_A14, \"a14\" },\n\n\t{ TMS320C64X_REG_A15, \"a15\" },\n\n\t{ TMS320C64X_REG_A16, \"a16\" },\n\n\t{ TMS320C64X_REG_A17, \"a17\" },\n\n\t{ TMS320C64X_REG_A18, \"a18\" },\n\n\t{ TMS320C64X_REG_A19, \"a19\" },\n\n\t{ TMS320C64X_REG_A20, \"a20\" },\n\n\t{ TMS320C64X_REG_A21, \"a21\" },\n\n\t{ TMS320C64X_REG_A22, \"a22\" },\n\n\t{ TMS320C64X_REG_A23, \"a23\" },\n\n\t{ TMS320C64X_REG_A24, \"a24\" },\n\n\t{ TMS320C64X_REG_A25, \"a25\" },\n\n\t{ TMS320C64X_REG_A26, \"a26\" },\n\n\t{ TMS320C64X_REG_A27, \"a27\" },\n\n\t{ TMS320C64X_REG_A28, \"a28\" },\n\n\t{ TMS320C64X_REG_A29, \"a29\" },\n\n\t{ TMS320C64X_REG_A30, \"a30\" },\n\n\t{ TMS320C64X_REG_A31, \"a31\" },\n\n\t{ TMS320C64X_REG_B0, \"b0\" },\n\n\t{ TMS320C64X_REG_B1, \"b1\" },\n\n\t{ TMS320C64X_REG_B2, \"b2\" },\n\n\t{ TMS320C64X_REG_B3, \"b3\" },\n\n\t{ TMS320C64X_REG_B4, \"b4\" },\n\n\t{ TMS320C64X_REG_B5, \"b5\" },\n\n\t{ TMS320C64X_REG_B6, \"b6\" },\n\n\t{ TMS320C64X_REG_B7, \"b7\" },\n\n\t{ TMS320C64X_REG_B8, \"b8\" },\n\n\t{ TMS320C64X_REG_B9, \"b9\" },\n\n\t{ TMS320C64X_REG_B10, \"b10\" },\n\n\t{ TMS320C64X_REG_B11, \"b11\" },\n\n\t{ TMS320C64X_REG_B12, \"b12\" },\n\n\t{ TMS320C64X_REG_B13, \"b13\" },\n\n\t{ TMS320C64X_REG_B14, \"b14\" },\n\n\t{ TMS320C64X_REG_B15, \"b15\" },\n\n\t{ TMS320C64X_REG_B16, \"b16\" },\n\n\t{ TMS320C64X_REG_B17, \"b17\" },\n\n\t{ TMS320C64X_REG_B18, \"b18\" },\n\n\t{ TMS320C64X_REG_B19, \"b19\" },\n\n\t{ TMS320C64X_REG_B20, \"b20\" },\n\n\t{ TMS320C64X_REG_B21, \"b21\" },\n\n\t{ TMS320C64X_REG_B22, \"b22\" },\n\n\t{ TMS320C64X_REG_B23, \"b23\" },\n\n\t{ TMS320C64X_REG_B24, \"b24\" },\n\n\t{ TMS320C64X_REG_B25, \"b25\" },\n\n\t{ TMS320C64X_REG_B26, \"b26\" },\n\n\t{ TMS320C64X_REG_B27, \"b27\" },\n\n\t{ TMS320C64X_REG_B28, \"b28\" },\n\n\t{ TMS320C64X_REG_B29, \"b29\" },\n\n\t{ TMS320C64X_REG_B30, \"b30\" },\n\n\t{ TMS320C64X_REG_B31, \"b31\" },\n\n\t{ TMS320C64X_REG_PCE1, \"pce1\" },\n", "file_path": "capstone-sys/capstone/arch/TMS320C64x/TMS320C64xMapping.c", "rank": 46, "score": 193310.86219783287 }, { "content": "const char *Mips_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/Mips/MipsMapping.c", "rank": 47, "score": 193310.86219783287 }, { "content": "\tpublic static final int M680X_REG_E = 3;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/M680x_const.java", "rank": 48, "score": 192906.91837224455 }, { "content": "\tpublic static final int M680X_REG_F = 4;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/M680x_const.java", "rank": 49, "score": 192902.0065966938 }, { "content": "\tpublic static final int X86_REG_R11B = 221;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 50, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R12B = 222;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 51, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R9B = 219;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 52, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R8B = 218;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 53, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R14B = 224;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 54, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R10B = 220;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 55, "score": 192898.74017428918 }, { "content": "\tpublic static final int M680X_REG_B = 2;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/M680x_const.java", "rank": 56, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R15B = 225;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 57, "score": 192898.74017428918 }, { "content": "\tpublic static final int X86_REG_R13B = 223;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/X86_const.java", "rank": 58, "score": 192898.74017428918 }, { "content": "static DecodeStatus DecodeThumbAddrModeIS(MCInst *Inst, unsigned Val,\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 59, "score": 192162.64954016166 }, { "content": " public static final int CS_ERR_HANDLE = 3;\t// Invalid handle\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 60, "score": 191505.58929773624 }, { "content": " public static final int CS_ERR_DETAIL = 7; // Invalid/unsupported option: cs_option()\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 61, "score": 191469.61999894906 }, { "content": "const char *PPC_reg_name(csh handle, unsigned int reg);\n", "file_path": "capstone-sys/capstone/arch/PowerPC/PPCMapping.h", "rank": 62, "score": 191463.454879703 }, { "content": "static void printRegName(SStream *OS, unsigned RegNo)\n\n{\n\n\tSStream_concat0(OS, \"%\");\n\n\tSStream_concat0(OS, getRegisterName(RegNo));\n", "file_path": "capstone-sys/capstone/arch/Sparc/SparcInstPrinter.c", "rank": 63, "score": 191451.7815719075 }, { "content": "static void printRegName(cs_struct *h, SStream *OS, unsigned RegNo);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMInstPrinter.c", "rank": 64, "score": 191451.7815719075 }, { "content": "static void printRegName(SStream *OS, unsigned RegNo)\n\n{\n\n\tSStream_concat(OS, \"$%s\", getRegisterName(RegNo));\n", "file_path": "capstone-sys/capstone/arch/Mips/MipsInstPrinter.c", "rank": 65, "score": 191451.7815719075 }, { "content": "const char *PPC_reg_name(csh handle, unsigned int reg)\n\n{\n\n#ifndef CAPSTONE_DIET\n\n\tif (reg >= ARR_SIZE(reg_name_maps))\n\n\t\treturn NULL;\n\n\n\n\treturn reg_name_maps[reg].name;\n\n#else\n\n\treturn NULL;\n\n#endif\n", "file_path": "capstone-sys/capstone/arch/PowerPC/PPCMapping.c", "rank": 66, "score": 191451.7815719075 }, { "content": "static inline unsigned getSORegOpc(ARM_AM_ShiftOpc ShOp, unsigned Imm)\n\n{\n\n\treturn ShOp | (Imm << 3);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMAddressingModes.h", "rank": 67, "score": 191427.3799944314 }, { "content": "static inline unsigned getSORegOffset(unsigned Op)\n\n{\n\n\treturn Op >> 3;\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMAddressingModes.h", "rank": 68, "score": 191427.3799944314 }, { "content": " public static final int CS_ERR_MODE = 5;\t // Invalid/unsupported mode\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 69, "score": 191426.21682089922 }, { "content": "\tpublic static final int M68K_AM_REG_DIRECT_ADDR = 2;\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/M68k_const.java", "rank": 70, "score": 191346.80062469235 }, { "content": "inline static const char *getCondCodeName(A64CC_CondCode CC)\n\n{\n\n\tswitch (CC) {\n\n\t\tdefault: return NULL;\t// never reach\n\n\t\tcase A64CC_EQ: return \"eq\";\n\n\t\tcase A64CC_NE: return \"ne\";\n\n\t\tcase A64CC_HS: return \"hs\";\n\n\t\tcase A64CC_LO: return \"lo\";\n\n\t\tcase A64CC_MI: return \"mi\";\n\n\t\tcase A64CC_PL: return \"pl\";\n\n\t\tcase A64CC_VS: return \"vs\";\n\n\t\tcase A64CC_VC: return \"vc\";\n\n\t\tcase A64CC_HI: return \"hi\";\n\n\t\tcase A64CC_LS: return \"ls\";\n\n\t\tcase A64CC_GE: return \"ge\";\n\n\t\tcase A64CC_LT: return \"lt\";\n\n\t\tcase A64CC_GT: return \"gt\";\n\n\t\tcase A64CC_LE: return \"le\";\n\n\t\tcase A64CC_AL: return \"al\";\n\n\t\tcase A64CC_NV: return \"nv\";\n\n\t}\n", "file_path": "capstone-sys/capstone/arch/AArch64/AArch64BaseInfo.h", "rank": 71, "score": 190368.82522400035 }, { "content": " public static final int CS_ERR_ARCH = 2;\t // Unsupported architecture\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 72, "score": 190364.5822619911 }, { "content": "static void printAddrModeTBB(MCInst *MI, unsigned OpNum, SStream *O);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMInstPrinter.c", "rank": 73, "score": 190332.4382766421 }, { "content": "static DecodeStatus DecodeT2AddrModeImm8(MCInst *Inst, unsigned Val,\n\n\t\tuint64_t Address, const void *Decoder)\n\n{\n\n\tDecodeStatus S = MCDisassembler_Success;\n\n\n\n\tunsigned Rn = fieldFromInstruction_4(Val, 9, 4);\n\n\tunsigned imm = fieldFromInstruction_4(Val, 0, 9);\n\n\n\n\t// Thumb stores cannot use PC as dest register.\n\n\tswitch (MCInst_getOpcode(Inst)) {\n\n\t\tcase ARM_t2STRT:\n\n\t\tcase ARM_t2STRBT:\n\n\t\tcase ARM_t2STRHT:\n\n\t\tcase ARM_t2STRi8:\n\n\t\tcase ARM_t2STRHi8:\n\n\t\tcase ARM_t2STRBi8:\n\n\t\t\tif (Rn == 15)\n\n\t\t\t\treturn MCDisassembler_Fail;\n\n\t\t\tbreak;\n\n\t\tdefault:\n\n\t\t\tbreak;\n\n\t}\n\n\n\n\t// Some instructions always use an additive offset.\n\n\tswitch (MCInst_getOpcode(Inst)) {\n\n\t\tcase ARM_t2LDRT:\n\n\t\tcase ARM_t2LDRBT:\n\n\t\tcase ARM_t2LDRHT:\n\n\t\tcase ARM_t2LDRSBT:\n\n\t\tcase ARM_t2LDRSHT:\n\n\t\tcase ARM_t2STRT:\n\n\t\tcase ARM_t2STRBT:\n\n\t\tcase ARM_t2STRHT:\n\n\t\t\timm |= 0x100;\n\n\t\t\tbreak;\n\n\t\tdefault:\n\n\t\t\tbreak;\n\n\t}\n\n\n\n\tif (!Check(&S, DecodeGPRRegisterClass(Inst, Rn, Address, Decoder)))\n\n\t\treturn MCDisassembler_Fail;\n\n\tif (!Check(&S, DecodeT2Imm8(Inst, imm, Address, Decoder)))\n\n\t\treturn MCDisassembler_Fail;\n\n\n\n\treturn S;\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 74, "score": 190332.43827664212 }, { "content": "static DecodeStatus DecodeAddrModeImm12Operand(MCInst *Inst, unsigned Val,\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 75, "score": 190332.43827664212 }, { "content": "static DecodeStatus DecodeT2AddrModeImm8s4(MCInst *Inst, unsigned Val,\n\n\t\tuint64_t Address, const void *Decoder)\n\n{\n\n\tDecodeStatus S = MCDisassembler_Success;\n\n\n\n\tunsigned Rn = fieldFromInstruction_4(Val, 9, 4);\n\n\tunsigned imm = fieldFromInstruction_4(Val, 0, 9);\n\n\n\n\tif (!Check(&S, DecodeGPRRegisterClass(Inst, Rn, Address, Decoder)))\n\n\t\treturn MCDisassembler_Fail;\n\n\tif (!Check(&S, DecodeT2Imm8S4(Inst, imm, Address, Decoder)))\n\n\t\treturn MCDisassembler_Fail;\n\n\n\n\treturn S;\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 76, "score": 190332.43827664212 }, { "content": "static DecodeStatus DecodeThumbAddrModeRR(MCInst *Inst, unsigned Val,\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 77, "score": 190332.43827664212 }, { "content": "static DecodeStatus DecodeThumbAddrModeSP(MCInst *Inst, unsigned Val,\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 78, "score": 190332.43827664212 }, { "content": "static DecodeStatus DecodeT2AddrModeImm12(MCInst *Inst, unsigned Val,\n\n\t\tuint64_t Address, const void *Decoder)\n\n{\n\n\tDecodeStatus S = MCDisassembler_Success;\n\n\n\n\tunsigned Rn = fieldFromInstruction_4(Val, 13, 4);\n\n\tunsigned imm = fieldFromInstruction_4(Val, 0, 12);\n\n\n\n\t// Thumb stores cannot use PC as dest register.\n\n\tswitch (MCInst_getOpcode(Inst)) {\n\n\t\tcase ARM_t2STRi12:\n\n\t\tcase ARM_t2STRBi12:\n\n\t\tcase ARM_t2STRHi12:\n\n\t\t\tif (Rn == 15)\n\n\t\t\t\treturn MCDisassembler_Fail;\n\n\t\tdefault:\n\n\t\t\tbreak;\n\n\t}\n\n\n\n\tif (!Check(&S, DecodeGPRRegisterClass(Inst, Rn, Address, Decoder)))\n\n\t\treturn MCDisassembler_Fail;\n\n\tMCOperand_CreateImm0(Inst, imm);\n\n\n\n\treturn S;\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 79, "score": 190332.43827664212 }, { "content": "static DecodeStatus DecodeThumbAddrModePC(MCInst *Inst, unsigned Val,\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMDisassembler.c", "rank": 80, "score": 190332.43827664212 }, { "content": "static void printAddrModeTBH(MCInst *MI, unsigned OpNum, SStream *O);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMInstPrinter.c", "rank": 81, "score": 190332.4382766421 }, { "content": " public static final int CS_MODE_LITTLE_ENDIAN = 0; // little-endian mode (default mode)\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 82, "score": 189647.09381894142 }, { "content": " public static final int CS_MODE_BIG_ENDIAN = 1 << 31; // big-endian mode\n", "file_path": "capstone-sys/capstone/bindings/java/capstone/Capstone.java", "rank": 83, "score": 189641.11196232584 }, { "content": "void ARM_getRegName(cs_struct *handle, int value);\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMInstPrinter.h", "rank": 84, "score": 189634.36398697228 }, { "content": "void ARM_getRegName(cs_struct *handle, int value)\n\n{\n\n\tif (value == CS_OPT_SYNTAX_NOREGNAME) {\n\n\t\thandle->get_regname = getRegisterName2;\n\n\t\thandle->reg_name = ARM_reg_name2;;\n\n\t} else {\n\n\t\thandle->get_regname = getRegisterName;\n\n\t\thandle->reg_name = ARM_reg_name;;\n\n\t}\n", "file_path": "capstone-sys/capstone/arch/ARM/ARMInstPrinter.c", "rank": 85, "score": 189628.4591984602 }, { "content": "static void printRegName(SStream *OS, unsigned RegNo)\n\n{\n\n\tSStream_concat0(OS, getRegisterName(RegNo));\n", "file_path": "capstone-sys/capstone/arch/X86/X86IntelInstPrinter.c", "rank": 86, "score": 189628.4591984602 }, { "content": "# Example\n\n\n\n```rust\n\nextern crate capstone;\n\n\n\nuse capstone::prelude::*;\n\n\n\nconst X86_CODE: &'static [u8] =\n\n b\"\\x55\\x48\\x8b\\x05\\xb8\\x13\\x00\\x00\\xe9\\x14\\x9e\\x08\\x00\\x45\\x31\\xe4\";\n\n\n\n/// Print register names\n\nfn reg_names<T, I>(cs: &Capstone, regs: T) -> String\n\nwhere\n\n T: Iterator<Item = I>,\n\n I: Into<RegId>,\n\n{\n\n let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect();\n\n names.join(\", \")\n\n}\n\n\n\n/// Print instruction group names\n\nfn group_names<T, I>(cs: &Capstone, regs: T) -> String\n\nwhere\n\n T: Iterator<Item = I>,\n\n I: Into<InsnGroupId>,\n\n{\n\n let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect();\n\n names.join(\", \")\n\n}\n\n\n\nfn example() -> CsResult<()> {\n\n let cs = Capstone::new()\n\n .x86()\n\n .mode(arch::x86::ArchMode::Mode64)\n\n .syntax(arch::x86::ArchSyntax::Att)\n\n .detail(true)\n\n .build()?;\n\n\n\n let insns = cs.disasm_all(X86_CODE, 0x1000)?;\n\n println!(\"Found {} instructions\", insns.len());\n\n for i in insns.iter() {\n\n println!(\"\");\n\n println!(\"{}\", i);\n\n\n\n let detail: InsnDetail = cs.insn_detail(&i)?;\n\n let output: &[(&str, String)] =\n\n &[\n\n (\"read regs:\", reg_names(&cs, detail.regs_read())),\n\n (\"write regs:\", reg_names(&cs, detail.regs_write())),\n\n (\"insn groups:\", group_names(&cs, detail.groups())),\n\n ];\n\n\n\n for &(ref name, ref message) in output.iter() {\n\n println!(\" {:12} {}\", name, message);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nfn main() {\n\n if let Err(err) = example() {\n\n println!(\"Error: {}\", err);\n\n }\n\n}\n\n```\n\n\n\nProduces:\n\n\n\n```\n\nFound 4 instructions\n\n\n\n0x1000: pushq %rbp\n\n read regs: rsp\n\n write regs: rsp\n\n insn groups: mode64\n\n\n\n0x1001: movq 0x13b8(%rip), %rax\n\n read regs:\n\n write regs:\n\n insn groups:\n\n\n\n0x1008: jmp 0x8ae21\n\n read regs:\n\n write regs:\n\n insn groups: jump\n\n\n\n0x100d: xorl %r12d, %r12d\n\n read regs:\n\n write regs: rflags\n\n insn groups:\n\n```\n\n\n\nTo see more demos, see the [`examples/`](examples) directory.\n\nMore complex demos welcome!\n\n\n", "file_path": "capstone-rs/README.md", "rank": 87, "score": 65.19158113196117 }, { "content": "# Example\n\n\n\n```rust\n\nextern crate capstone;\n\n\n\nuse capstone::prelude::*;\n\n\n\nconst X86_CODE: &'static [u8] =\n\n b\"\\x55\\x48\\x8b\\x05\\xb8\\x13\\x00\\x00\\xe9\\x14\\x9e\\x08\\x00\\x45\\x31\\xe4\";\n\n\n\n/// Print register names\n\nfn reg_names<T, I>(cs: &Capstone, regs: T) -> String\n\nwhere\n\n T: Iterator<Item = I>,\n\n I: Into<RegId>,\n\n{\n\n let names: Vec<String> = regs.map(|x| cs.reg_name(x.into()).unwrap()).collect();\n\n names.join(\", \")\n\n}\n\n\n\n/// Print instruction group names\n\nfn group_names<T, I>(cs: &Capstone, regs: T) -> String\n\nwhere\n\n T: Iterator<Item = I>,\n\n I: Into<InsnGroupId>,\n\n{\n\n let names: Vec<String> = regs.map(|x| cs.group_name(x.into()).unwrap()).collect();\n\n names.join(\", \")\n\n}\n\n\n\nfn example() -> CsResult<()> {\n\n let cs = Capstone::new()\n\n .x86()\n\n .mode(arch::x86::ArchMode::Mode64)\n\n .syntax(arch::x86::ArchSyntax::Att)\n\n .detail(true)\n\n .build()?;\n\n\n\n let insns = cs.disasm_all(X86_CODE, 0x1000)?;\n\n println!(\"Found {} instructions\", insns.len());\n\n for i in insns.iter() {\n\n println!(\"\");\n\n println!(\"{}\", i);\n\n\n\n let detail: InsnDetail = cs.insn_detail(&i)?;\n\n let output: &[(&str, String)] =\n\n &[\n\n (\"read regs:\", reg_names(&cs, detail.regs_read())),\n\n (\"write regs:\", reg_names(&cs, detail.regs_write())),\n\n (\"insn groups:\", group_names(&cs, detail.groups())),\n\n ];\n\n\n\n for &(ref name, ref message) in output.iter() {\n\n println!(\" {:12} {}\", name, message);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nfn main() {\n\n if let Err(err) = example() {\n\n println!(\"Error: {}\", err);\n\n }\n\n}\n\n```\n\n\n\nProduces:\n\n\n\n```\n\nFound 4 instructions\n\n\n\n0x1000: pushq %rbp\n\n read regs: rsp\n\n write regs: rsp\n\n insn groups: mode64\n\n\n\n0x1001: movq 0x13b8(%rip), %rax\n\n read regs:\n\n write regs:\n\n insn groups:\n\n\n\n0x1008: jmp 0x8ae21\n\n read regs:\n\n write regs:\n\n insn groups: jump\n\n\n\n0x100d: xorl %r12d, %r12d\n\n read regs:\n\n write regs: rflags\n\n insn groups:\n\n```\n\n\n\nTo see more demos, see the [`examples/`](examples) directory.\n\nMore complex demos welcome!\n\n\n", "file_path": "README.md", "rank": 88, "score": 65.19158113196117 }, { "content": "extern crate capstone;\n\n#[macro_use]\n\nextern crate criterion;\n\n\n\nuse capstone::prelude::*;\n\nuse capstone::{Arch, Endian, ExtraMode, Mode, NO_EXTRA_MODE};\n\nuse criterion::{black_box, Criterion};\n\n\n\nconst X86_CODE: &[u8] = include_bytes!(\"../test-inputs/x86_64.bin_ls.bin\");\n\n\n\n/// Disassemble code and print information\n", "file_path": "capstone-rs/benches/my_benchmark.rs", "rank": 90, "score": 55.451704738999375 }, { "content": " self\n\n }\n\n\n\n fn detail(mut self, enable_detail: bool) -> Self {\n\n self.is_detail = enable_detail;\n\n self\n\n }\n\n\n\n fn build(self) -> CsResult<Capstone> {\n\n let mode = match self.mode {\n\n Some(mode) => mode,\n\n None => {\n\n let msg: &'static str = concat!(\n\n \"Must specify mode for \",\n\n stringify!($arch),\n\n \"::ArchCapstoneBuilder with `mode()` method\",\n\n );\n\n return Err(Error::CustomError(msg));\n\n }\n\n };\n", "file_path": "capstone-rs/src/arch/mod.rs", "rank": 91, "score": 54.87038566033444 }, { "content": "#![no_main]\n\n#[macro_use] extern crate libfuzzer_sys;\n\nextern crate capstone;\n\n\n\nuse capstone::prelude::*;\n\n\n\nfuzz_target!(|data: &[u8]| {\n\n let mut cs = Capstone::new()\n\n .x86()\n\n .mode(arch::x86::ArchMode::Mode64)\n\n .detail(true)\n\n .build()\n\n .unwrap();\n\n for i in cs.disasm_all(data, 0x1000).unwrap().iter() {\n\n let detail: InsnDetail = cs.insn_detail(&i).unwrap();\n\n let arch_detail: ArchDetail = detail.arch_detail();\n\n arch_detail.operands().iter().for_each(drop);\n\n detail.regs_read().for_each(drop);\n\n detail.regs_write().for_each(drop);\n\n detail.groups().for_each(drop);\n\n }\n\n});\n", "file_path": "capstone-rs/fuzz/fuzz_targets/fuzz_target_disasm_x86_64.rs", "rank": 92, "score": 54.48514665092297 }, { "content": " #[doc = \"@return CS_ERR_OK on success, or other value on failure (refer to cs_err enum\"]\n\n #[doc = \"for detailed error).\"]\n\n pub fn cs_open(arch: cs_arch, mode: cs_mode, handle: *mut csh) -> cs_err::Type;\n\n}\n\nextern \"C\" {\n\n #[doc = \"Close CS handle: MUST do to release the handle when it is not used anymore.\"]\n\n #[doc = \"NOTE: this must be only called when there is no longer usage of Capstone,\"]\n\n #[doc = \"not even access to cs_insn array. The reason is the this API releases some\"]\n\n #[doc = \"cached memory, thus access to any Capstone API after cs_close() might crash\"]\n\n #[doc = \"your application.\"]\n\n #[doc = \"\"]\n\n #[doc = \"In fact,this API invalidate @handle by ZERO out its value (i.e *handle = 0).\"]\n\n #[doc = \"\"]\n\n #[doc = \"@handle: pointer to a handle returned by cs_open()\"]\n\n #[doc = \"\"]\n\n #[doc = \"@return CS_ERR_OK on success, or other value on failure (refer to cs_err enum\"]\n\n #[doc = \"for detailed error).\"]\n\n pub fn cs_close(handle: *mut csh) -> cs_err::Type;\n\n}\n\nextern \"C\" {\n", "file_path": "capstone-sys/pre_generated/capstone.rs", "rank": 94, "score": 51.297806818855136 }, { "content": "\t\tprintf \"\\tRegisters modified:\";\n\n\t\tArray.iter (print_reg handle) insn.regs_write;\n\n\t\tprintf \"\\n\";\n\n\tend;\n\n\tif (Array.length insn.groups) > 0 then\n\n\t\tprintf \"\\tgroups_count: %d\\n\" (Array.length insn.groups);\n\n\tprintf \"\\n\"\n\n\n\nlet print_arch x =\n\n\tlet (arch, mode, code, comment) = x in\n\n\t\tlet handle = cs_open arch mode in\n\n\t\tlet err = cs_option handle CS_OPT_DETAIL _CS_OPT_ON in\n\n\t\tmatch err with\n\n\t\t| _ -> ();\n\n\t\tlet insns = cs_disasm handle code 0x1000L 0L in\n\n\t\t\tprintf \"********************\\n\";\n\n\t\t\tprintf \"Platform: %s\\n\" comment;\n\n\t\t\tprint_string_hex \"Code: \" code;\n\n\t\t\tprintf \"Disasm:\\n\";\n\n\t\t\tList.iter (print_insn handle) insns;\n\n\t\tmatch cs_close handle with\n\n\t\t| 0 -> ();\n\n\t\t| _ -> printf \"Failed to close handle\";\n\n\t\t;;\n\n\n\nList.iter print_arch all_tests;;\n\n\n", "file_path": "capstone-sys/capstone/bindings/ocaml/test_m680x.ml", "rank": 95, "score": 50.27175029835254 }, { "content": " #[doc = \"@return: the number of successfully disassembled instructions,\"]\n\n #[doc = \"or 0 if this function failed to disassemble the given code\"]\n\n #[doc = \"\"]\n\n #[doc = \"On failure, call cs_errno() for error code.\"]\n\n pub fn cs_disasm(\n\n handle: csh,\n\n code: *const u8,\n\n code_size: usize,\n\n address: u64,\n\n count: usize,\n\n insn: *mut *mut cs_insn,\n\n ) -> usize;\n\n}\n\nextern \"C\" {\n\n #[doc = \"Deprecated function - to be retired in the next version!\"]\n\n #[doc = \"Use cs_disasm() instead of cs_disasm_ex()\"]\n\n pub fn cs_disasm_ex(\n\n handle: csh,\n\n code: *const u8,\n\n code_size: usize,\n", "file_path": "capstone-sys/pre_generated/capstone.rs", "rank": 96, "score": 47.971885236163274 }, { "content": "use arch::CapstoneBuilder;\n\nuse capstone_sys::cs_opt_value::*;\n\nuse capstone_sys::*;\n\nuse constants::{Arch, Endian, ExtraMode, Mode, OptValue, Syntax};\n\nuse error::*;\n\nuse instruction::{Insn, InsnDetail, InsnGroupId, InsnId, Instructions, RegId};\n\nuse std::convert::From;\n\nuse std::ffi::CStr;\n\nuse std::marker::PhantomData;\n\nuse std::mem;\n\nuse std::os::raw::{c_int, c_uint, c_void};\n\n\n\n/// An instance of the capstone disassembler\n\n#[derive(Debug)]\n\npub struct Capstone {\n\n /// Opaque handle to cs_engine\n\n /// Stored as a pointer to ensure `Capstone` is `!Send`/`!Sync`\n\n csh: *mut c_void,\n\n\n\n /// Internal mode bitfield\n", "file_path": "capstone-rs/src/capstone.rs", "rank": 97, "score": 47.83377413735897 }, { "content": " /// Pass `count = 0` to disassemble all instructions in the buffer.\n\n fn disasm<'a>(&'a self, code: &[u8], addr: u64, count: usize) -> CsResult<Instructions<'a>> {\n\n let mut ptr: *mut cs_insn = unsafe { mem::zeroed() };\n\n let insn_count = unsafe {\n\n cs_disasm(\n\n self.csh(),\n\n code.as_ptr(),\n\n code.len() as usize,\n\n addr,\n\n count as usize,\n\n &mut ptr,\n\n )\n\n };\n\n if insn_count == 0 {\n\n match self.error_result() {\n\n Ok(_) => Ok(Instructions::new_empty()),\n\n Err(err) => Err(err),\n\n }\n\n } else {\n\n Ok(unsafe { Instructions::from_raw_parts(ptr, insn_count) })\n", "file_path": "capstone-rs/src/capstone.rs", "rank": 98, "score": 47.26642281210775 }, { "content": "//! .detail(true)\n\n//! .build()\n\n//! .expect(\"Failed to create Capstone object\");\n\n//!\n\n//! let insns = cs.disasm_all(X86_CODE, 0x1000)\n\n//! .expect(\"Failed to disassemble\");\n\n//! println!(\"Found {} instructions\", insns.len());\n\n//! for i in insns.iter() {\n\n//! println!();\n\n//! println!(\"{}\", i);\n\n//!\n\n//! let detail: InsnDetail = cs.insn_detail(&i).expect(\"Failed to get insn detail\");\n\n//! let arch_detail: ArchDetail = detail.arch_detail();\n\n//! let ops = arch_detail.operands();\n\n//!\n\n//! let output: &[(&str, String)] = &[\n\n//! (\"insn id:\", format!(\"{:?}\", i.id().0)),\n\n//! (\"bytes:\", format!(\"{:?}\", i.bytes())),\n\n//! (\"read regs:\", reg_names(&cs, detail.regs_read())),\n\n//! (\"write regs:\", reg_names(&cs, detail.regs_write())),\n", "file_path": "capstone-rs/src/lib.rs", "rank": 99, "score": 47.05618932412247 } ]
Rust
compiler/context.rs
mwatts/arret
3b3bae27ca7283376d420fa7d69fe5073ecf9ef0
use crate::hir::PackagePaths; use crate::rfi; use crate::source::SourceLoader; use std::collections::{HashMap, HashSet}; use std::sync::Arc; use std::{hash, path}; use codespan_reporting::diagnostic::Diagnostic; use arret_syntax::datum::Datum; use arret_syntax::span::{FileId, Span}; use crate::hir; use crate::hir::exports::Exports; use crate::hir::import; use crate::hir::loader::{LoadedModule, ModuleName}; use crate::hir::lowering::LoweredModule; use crate::promise::PromiseMap; use crate::reporting::diagnostic_for_syntax_error; use crate::reporting::errors_to_diagnostics; use crate::source::SourceFile; use crate::ty; use crate::typeck::infer; new_global_id_type!( ModuleId, u32, std::sync::atomic::AtomicU32, std::num::NonZeroU32 ); pub(crate) type ModuleImports = HashMap<ModuleName, Arc<Module>>; pub struct LinkedLibrary { _loaded: libloading::Library, target_path: Box<path::Path>, } impl LinkedLibrary { pub fn target_path(&self) -> &path::Path { &self.target_path } } pub(crate) struct Module { pub module_id: ModuleId, pub imports: ModuleImports, pub defs: Vec<hir::Def<hir::Inferred>>, pub inferred_locals: Arc<HashMap<hir::LocalId, ty::Ref<ty::Poly>>>, pub exports: Exports, pub main_local_id: Option<hir::LocalId>, pub linked_library: Option<Arc<LinkedLibrary>>, } impl PartialEq for Module { fn eq(&self, other: &Self) -> bool { self.module_id == other.module_id } } impl Eq for Module {} impl hash::Hash for Module { fn hash<H: hash::Hasher>(&self, state: &mut H) { state.write_u32(self.module_id.get()); } } type CachedModule = Result<Arc<Module>, Vec<Diagnostic<FileId>>>; type UncachedModule = Result<Module, Vec<Diagnostic<FileId>>>; fn transitive_deps(imports: &ModuleImports) -> HashSet<Arc<Module>> { let mut all_deps: HashSet<Arc<Module>> = imports.values().cloned().collect(); for import in imports.values() { all_deps.extend(transitive_deps(&import.imports).into_iter()); } all_deps } pub(crate) fn prims_to_module(exports: Exports) -> Module { Module { module_id: ModuleId::alloc(), imports: HashMap::new(), defs: vec![], inferred_locals: Arc::new(HashMap::new()), exports, main_local_id: None, linked_library: None, } } fn rfi_library_to_module(span: Span, rfi_library: rfi::Library) -> Module { use crate::hir::var_id::LocalIdAlloc; use crate::ty::Ty; use arret_syntax::datum::DataStr; let rfi::Library { loaded, target_path, exported_funs, } = rfi_library; let mut lia = LocalIdAlloc::new(); let mut exports = HashMap::with_capacity(exported_funs.len()); let mut defs = Vec::with_capacity(exported_funs.len()); let mut inferred_locals = HashMap::with_capacity(exported_funs.len()); for (fun_name, rust_fun) in exported_funs.into_vec().into_iter() { let local_id = lia.alloc_mut(); let arret_type: ty::Ref<ty::Poly> = Ty::Fun(Box::new(rust_fun.arret_fun_type().clone())).into(); let fun_name_data_str: DataStr = fun_name.into(); let def = hir::Def::<hir::Inferred> { span, macro_invocation_span: None, destruc: hir::destruc::Destruc::Scalar( span, hir::destruc::Scalar::new( Some(local_id), fun_name_data_str.clone(), arret_type.clone(), ), ), value_expr: hir::Expr { result_ty: arret_type.clone(), kind: hir::ExprKind::RustFun(rust_fun), }, }; defs.push(def); inferred_locals.insert(local_id, arret_type); exports.insert(fun_name_data_str, hir::scope::Binding::Var(None, local_id)); } Module { module_id: ModuleId::alloc(), imports: HashMap::new(), defs, inferred_locals: Arc::new(inferred_locals), exports, main_local_id: None, linked_library: Some(Arc::new(LinkedLibrary { _loaded: loaded, target_path, })), } } pub struct CompileCtx { package_paths: PackagePaths, enable_optimisations: bool, source_loader: SourceLoader, rfi_loader: rfi::Loader, modules_by_name: PromiseMap<ModuleName, CachedModule>, } impl CompileCtx { pub fn new(package_paths: PackagePaths, enable_optimisations: bool) -> Self { use crate::hir::exports; use std::iter; let initial_modules = iter::once(("primitives", exports::prims_exports())) .chain(iter::once(("types", exports::tys_exports()))) .map(|(terminal_name, exports)| { let prims_module = prims_to_module(exports); ( ModuleName::new( "arret".into(), vec!["internal".into()], (*terminal_name).into(), ), Ok(Arc::new(prims_module)), ) }); Self { package_paths, enable_optimisations, source_loader: SourceLoader::new(), rfi_loader: rfi::Loader::new(), modules_by_name: PromiseMap::new(initial_modules), } } pub fn package_paths(&self) -> &PackagePaths { &self.package_paths } pub fn enable_optimisations(&self) -> bool { self.enable_optimisations } pub fn source_loader(&self) -> &SourceLoader { &self.source_loader } pub(crate) fn rfi_loader(&self) -> &rfi::Loader { &self.rfi_loader } fn get_module_by_name(&self, span: Span, module_name: ModuleName) -> CachedModule { self.modules_by_name .get_or_insert_with( module_name.clone(), move || match hir::loader::load_module_by_name(self, span, &module_name) { Ok(LoadedModule::Source(source_file)) => { self.source_file_to_module(&source_file).map(Arc::new) } Ok(LoadedModule::Rust(rfi_library)) => { Ok(Arc::new(rfi_library_to_module(span, rfi_library))) } Err(err) => Err(vec![err.into()]), }, ) } pub(crate) fn source_file_to_module(&self, source_file: &SourceFile) -> UncachedModule { let data = source_file .parsed() .map_err(|err| vec![diagnostic_for_syntax_error(&err)])?; self.data_to_module(data) } pub(crate) fn imports_for_data<'a>( &self, data: impl Iterator<Item = &'a Datum>, ) -> Result<ModuleImports, Vec<Diagnostic<FileId>>> { let imported_module_names = import::collect_imported_module_names(data).map_err(errors_to_diagnostics)?; let import_count = imported_module_names.len(); let loaded_module_results: Vec<(ModuleName, CachedModule)> = imported_module_names .into_iter() .map(|(module_name, span)| { let module = self.get_module_by_name(span, module_name.clone()); (module_name, module) }) .collect(); let mut diagnostics = Vec::<Diagnostic<FileId>>::new(); let mut imports = HashMap::<ModuleName, Arc<Module>>::with_capacity(import_count); for (module_name, loaded_module_result) in loaded_module_results { match loaded_module_result { Ok(module) => { imports.insert(module_name, module); } Err(mut new_diagnostics) => diagnostics.append(&mut new_diagnostics), } } if !diagnostics.is_empty() { return Err(diagnostics); } Ok(imports) } fn data_to_module(&self, data: &[Datum]) -> UncachedModule { let imports = self.imports_for_data(data.iter())?; let lowered_module = hir::lowering::lower_data(&imports, data).map_err(errors_to_diagnostics)?; let LoweredModule { defs: lowered_defs, exports, main_local_id, } = lowered_module; let imported_inferred_vars = transitive_deps(&imports) .into_iter() .map(|module| (module.module_id, module.inferred_locals.clone())) .collect(); let inferred_module = infer::infer_module(&imported_inferred_vars, lowered_defs) .map_err(errors_to_diagnostics)?; let infer::InferredModule { defs: inferred_defs, inferred_locals, } = inferred_module; Ok(Module { module_id: ModuleId::alloc(), imports, defs: inferred_defs, inferred_locals: Arc::new(inferred_locals), exports, main_local_id, linked_library: None, }) } }
use crate::hir::PackagePaths; use crate::rfi; use crate::source::SourceLoader; use std::collections::{HashMap, HashSet}; use std::sync::Arc; use std::{hash, path}; use codespan_reporting::diagnostic::Diagnostic; use arret_syntax::datum::Datum; use arret_syntax::span::{FileId, Span}; use crate::hir; use crate::hir::exports::Exports; use crate::hir::import; use crate::hir::loader::{LoadedModule, ModuleName}; use crate::hir::lowering::LoweredModule; use crate::promise::PromiseMap; use crate::reporting::diagnostic_for_syntax_error; use crate::reporting::errors_to_diagnostics; use crate::source::SourceFile; use crate::ty; use crate::typeck::infer; new_global_id_type!( ModuleId, u32, std::sync::atomic::AtomicU32, std::num::NonZeroU32 ); pub(crate) type ModuleImports = HashMap<ModuleName, Arc<Module>>; pub struct LinkedLibrary { _loaded: libloading::Library, target_path: Box<path::Path>, } impl LinkedLibrary { pub fn target_path(&self) -> &path::Path { &self.target_path } } pub(crate) struct Module { pub module_id: ModuleId, pub imports: ModuleImports, pub defs: Vec<hir::Def<hir::Inferred>>, pub inferred_locals: Arc<HashMap<hir::LocalId, ty::Ref<ty::Poly>>>, pub exports: Exports, pub main_local_id: Option<hir::LocalId>, pub linked_library: Option<Arc<LinkedLibrary>>, } impl PartialEq for Module { fn eq(&self, other: &Self) -> bool { self.module_id == other.module_id } } impl Eq for Module {} impl hash::Hash for Module { fn hash<H: hash::Hasher>(&self, state: &mut H) { state.write_u32(self.module_id.get()); } } type CachedModule = Result<Arc<Module>, Vec<Diagnostic<FileId>>>; type UncachedModule = Result<Module, Vec<Diagnostic<FileId>>>; fn transitive_deps(imports: &ModuleImports) -> HashSet<Arc<Module>> { let mut all_deps: HashSet<Arc<Module>> = imports.values().cloned().collect(); for import in imports.values() { all_deps.extend(transitive_deps(&import.imports).into_iter()); } all_deps } pub(crate) fn prims_to_module(exports: Exports) -> Module { Module { module_id: ModuleId::alloc(), imports: HashMap::new(), defs: vec![], inferred_locals: Arc::new(HashMap::new()), exports, main_local_id: None, linked_library: None, } } fn rfi_library_to_module(span: Span, rfi_library: rfi::Library) -> Module { use crate::hir::var_id::LocalIdAlloc; use crate::ty::Ty; use arret_syntax::datum::DataStr; let rfi::Library { loaded, target_path, exported_funs, } = rfi_library; let mut lia = LocalIdAlloc::new(); let mut exports = HashMap::with_capacity(exported_funs.len()); let mut defs = Vec::with_capacity(exported_funs.len()); let mut inferred_locals = HashMap::with_capacity(exported_funs.len()); for (fun_name, rust_fun) in exported_funs.into_vec().into_iter() { let local_id = lia.alloc_mut(); let arret_type: ty::Ref<ty::Poly> = Ty::Fun(Box::new(rust_fun.arret_fun_type().clone())).into(); let fun_name_data_str: DataStr = fun_name.into();
defs.push(def); inferred_locals.insert(local_id, arret_type); exports.insert(fun_name_data_str, hir::scope::Binding::Var(None, local_id)); } Module { module_id: ModuleId::alloc(), imports: HashMap::new(), defs, inferred_locals: Arc::new(inferred_locals), exports, main_local_id: None, linked_library: Some(Arc::new(LinkedLibrary { _loaded: loaded, target_path, })), } } pub struct CompileCtx { package_paths: PackagePaths, enable_optimisations: bool, source_loader: SourceLoader, rfi_loader: rfi::Loader, modules_by_name: PromiseMap<ModuleName, CachedModule>, } impl CompileCtx { pub fn new(package_paths: PackagePaths, enable_optimisations: bool) -> Self { use crate::hir::exports; use std::iter; let initial_modules = iter::once(("primitives", exports::prims_exports())) .chain(iter::once(("types", exports::tys_exports()))) .map(|(terminal_name, exports)| { let prims_module = prims_to_module(exports); ( ModuleName::new( "arret".into(), vec!["internal".into()], (*terminal_name).into(), ), Ok(Arc::new(prims_module)), ) }); Self { package_paths, enable_optimisations, source_loader: SourceLoader::new(), rfi_loader: rfi::Loader::new(), modules_by_name: PromiseMap::new(initial_modules), } } pub fn package_paths(&self) -> &PackagePaths { &self.package_paths } pub fn enable_optimisations(&self) -> bool { self.enable_optimisations } pub fn source_loader(&self) -> &SourceLoader { &self.source_loader } pub(crate) fn rfi_loader(&self) -> &rfi::Loader { &self.rfi_loader } fn get_module_by_name(&self, span: Span, module_name: ModuleName) -> CachedModule { self.modules_by_name .get_or_insert_with( module_name.clone(), move || match hir::loader::load_module_by_name(self, span, &module_name) { Ok(LoadedModule::Source(source_file)) => { self.source_file_to_module(&source_file).map(Arc::new) } Ok(LoadedModule::Rust(rfi_library)) => { Ok(Arc::new(rfi_library_to_module(span, rfi_library))) } Err(err) => Err(vec![err.into()]), }, ) } pub(crate) fn source_file_to_module(&self, source_file: &SourceFile) -> UncachedModule { let data = source_file .parsed() .map_err(|err| vec![diagnostic_for_syntax_error(&err)])?; self.data_to_module(data) } pub(crate) fn imports_for_data<'a>( &self, data: impl Iterator<Item = &'a Datum>, ) -> Result<ModuleImports, Vec<Diagnostic<FileId>>> { let imported_module_names = import::collect_imported_module_names(data).map_err(errors_to_diagnostics)?; let import_count = imported_module_names.len(); let loaded_module_results: Vec<(ModuleName, CachedModule)> = imported_module_names .into_iter() .map(|(module_name, span)| { let module = self.get_module_by_name(span, module_name.clone()); (module_name, module) }) .collect(); let mut diagnostics = Vec::<Diagnostic<FileId>>::new(); let mut imports = HashMap::<ModuleName, Arc<Module>>::with_capacity(import_count); for (module_name, loaded_module_result) in loaded_module_results { match loaded_module_result { Ok(module) => { imports.insert(module_name, module); } Err(mut new_diagnostics) => diagnostics.append(&mut new_diagnostics), } } if !diagnostics.is_empty() { return Err(diagnostics); } Ok(imports) } fn data_to_module(&self, data: &[Datum]) -> UncachedModule { let imports = self.imports_for_data(data.iter())?; let lowered_module = hir::lowering::lower_data(&imports, data).map_err(errors_to_diagnostics)?; let LoweredModule { defs: lowered_defs, exports, main_local_id, } = lowered_module; let imported_inferred_vars = transitive_deps(&imports) .into_iter() .map(|module| (module.module_id, module.inferred_locals.clone())) .collect(); let inferred_module = infer::infer_module(&imported_inferred_vars, lowered_defs) .map_err(errors_to_diagnostics)?; let infer::InferredModule { defs: inferred_defs, inferred_locals, } = inferred_module; Ok(Module { module_id: ModuleId::alloc(), imports, defs: inferred_defs, inferred_locals: Arc::new(inferred_locals), exports, main_local_id, linked_library: None, }) } }
let def = hir::Def::<hir::Inferred> { span, macro_invocation_span: None, destruc: hir::destruc::Destruc::Scalar( span, hir::destruc::Scalar::new( Some(local_id), fun_name_data_str.clone(), arret_type.clone(), ), ), value_expr: hir::Expr { result_ty: arret_type.clone(), kind: hir::ExprKind::RustFun(rust_fun), }, };
assignment_statement
[ { "content": "/// Adds internal member fields common to all inline and external records\n\npub fn append_common_internal_members(tcx: &mut TargetCtx, members: &mut Vec<LLVMTypeRef>) {\n\n unsafe {\n\n members.extend_from_slice(&[\n\n // is_inline\n\n LLVMInt8TypeInContext(tcx.llx),\n\n // may_contain_gc_refs\n\n LLVMInt8TypeInContext(tcx.llx),\n\n // record_class_id\n\n tcx.record_class_id_llvm_type(),\n\n ]);\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TargetRecordStruct {\n\n pub data_layout: Option<alloc::Layout>,\n\n pub record_storage: boxed::RecordStorage,\n\n pub llvm_data_type: LLVMTypeRef,\n\n pub classmap_class: class_map::BoxedClass,\n\n}\n", "file_path": "compiler/codegen/record_struct.rs", "rank": 0, "score": 293168.9660732904 }, { "content": "/// Hashes the passed value, poorly\n\n///\n\n/// This can only distinguish constants; regs hash to the same value. It's possible for constants\n\n/// would compare as equal to receive different hashes depending on their representation.\n\nfn hash_value<H: Hasher>(heap: &Heap, value: &Value, state: &mut H) {\n\n match value {\n\n Value::List(fixed, rest) => {\n\n state.write_u8(0);\n\n\n\n state.write_usize(fixed.len());\n\n for member in fixed.iter() {\n\n hash_value(heap, member, state);\n\n }\n\n\n\n state.write_u8(rest.is_some() as u8);\n\n if let Some(rest_value) = rest {\n\n hash_value(heap, rest_value, state);\n\n }\n\n }\n\n Value::Record(record_cons, fields) => {\n\n state.write_u8(1);\n\n record_cons.hash(state);\n\n for field in fields.iter() {\n\n hash_value(heap, field, state);\n", "file_path": "compiler/mir/inliner.rs", "rank": 1, "score": 250337.38295808254 }, { "content": "pub fn new_primary_label(span: Span, message: impl Into<String>) -> Label<FileId> {\n\n Label::primary(span.file_id().unwrap(), span.byte_range()).with_message(message)\n\n}\n\n\n", "file_path": "compiler/reporting.rs", "rank": 2, "score": 248075.9041017931 }, { "content": "pub fn new_secondary_label(span: Span, message: impl Into<String>) -> Label<FileId> {\n\n Label::secondary(span.file_id().unwrap(), span.byte_range()).with_message(message)\n\n}\n\n\n", "file_path": "compiler/reporting.rs", "rank": 3, "score": 248075.9041017931 }, { "content": "pub fn handle_non_lifecycle_request(_state: &mut State, request: Request) -> Response {\n\n // We only support lifecycle requests at the moment\n\n Response::new_err(request.id, ErrorCode::MethodNotFound, \"Method not found\")\n\n}\n", "file_path": "lsp-server/handler/mod.rs", "rank": 4, "score": 242437.79936467286 }, { "content": "pub fn expect_one_arg(span: Span, mut iter: NsDataIter) -> Result<NsDatum> {\n\n expect_arg_count(span, 1, iter.len())?;\n\n Ok(iter.next().unwrap())\n\n}\n\n\n", "file_path": "compiler/hir/util.rs", "rank": 5, "score": 241955.75296378136 }, { "content": "fn is_arret_root(path: &path::Path) -> bool {\n\n path.join(\"./.arret-root\").is_file()\n\n}\n\n\n\npub struct InvalidOptionError {\n\n invalid_path: path::PathBuf,\n\n}\n\n\n\nimpl InvalidOptionError {\n\n /// Path to the invalid Arret root\n\n pub fn invalid_path(&self) -> &path::Path {\n\n &self.invalid_path\n\n }\n\n}\n\n\n\npub struct InvalidEnvVarError {\n\n invalid_path: path::PathBuf,\n\n}\n\n\n\nimpl InvalidEnvVarError {\n", "file_path": "compiler/arret_root.rs", "rank": 6, "score": 241142.77746259974 }, { "content": "/// Applies the parsed import to the passed exports\n\n///\n\n/// If there are no filters to apply then `exports` will be directly returned.\n\npub fn filter_imported_exports(\n\n parsed_import_set: ParsedImportSet,\n\n exports: &Exports,\n\n) -> Result<Exports> {\n\n match parsed_import_set {\n\n ParsedImportSet::Module(_, _) => Ok(exports.clone()),\n\n ParsedImportSet::Filter(filter, inner_parsed_import) => {\n\n let inner_exports = filter_imported_exports(*inner_parsed_import, exports)?;\n\n Ok(apply_filter(filter, &inner_exports)?)\n\n }\n\n }\n\n}\n", "file_path": "compiler/hir/import/filter.rs", "rank": 7, "score": 238449.80471589393 }, { "content": "fn run_single_test(ccx: &CompileCtx, input_path: &path::Path, test_type: TestType) -> bool {\n\n let source = fs::read_to_string(input_path).unwrap();\n\n\n\n let source_file = ccx.source_loader().load_string(\n\n input_path.as_os_str().to_owned(),\n\n SourceText::Shared(source.into()),\n\n );\n\n\n\n if test_type == TestType::CompileError {\n\n run_single_compile_fail_test(ccx, &source_file)\n\n } else {\n\n run_single_pass_test(ccx, &source_file, test_type)\n\n }\n\n}\n\n\n", "file_path": "compiler/tests/integration.rs", "rank": 8, "score": 235806.0379040599 }, { "content": "pub fn load_module_by_name(\n\n ccx: &CompileCtx,\n\n span: Span,\n\n module_name: &ModuleName,\n\n) -> Result<LoadedModule> {\n\n let package_path = if let Some(package_path) = ccx\n\n .package_paths()\n\n .paths\n\n .get(module_name.package_name.as_ref())\n\n {\n\n package_path\n\n } else {\n\n return Err(Error::new(span, ErrorKind::PackageNotFound));\n\n };\n\n\n\n if module_name.is_rfi() {\n\n ccx.rfi_loader()\n\n .load(\n\n span,\n\n ccx.source_loader(),\n", "file_path": "compiler/hir/loader.rs", "rank": 9, "score": 231014.6133015584 }, { "content": "/// Returns all unique imported module names for the passed module data\n\n///\n\n/// The value of the `HashMap` will be the first span where that module name occurs. This is\n\n/// intended to provide a stable location for error reporting.\n\npub fn collect_imported_module_names<'a>(\n\n data: impl Iterator<Item = &'a Datum>,\n\n) -> Result<HashMap<ModuleName, Span>, Vec<Error>> {\n\n let mut imported_module_names = HashMap::new();\n\n let mut errors = vec![];\n\n\n\n for datum in data {\n\n if let Some(arg_data) = try_extract_import_set(datum) {\n\n for arg_datum in arg_data {\n\n match parse_import_set(arg_datum) {\n\n Ok(parsed_import) => {\n\n let (span, module_name) = parsed_import.into_spanned_module_name();\n\n imported_module_names.entry(module_name).or_insert(span);\n\n }\n\n Err(error) => {\n\n errors.push(error);\n\n }\n\n }\n\n }\n\n }\n", "file_path": "compiler/hir/import/mod.rs", "rank": 10, "score": 228774.83418394934 }, { "content": "// This isn't #[cfg(test)] because it's used in other crates\n\npub fn t2s(v: &str) -> Span {\n\n let (start, end) = if v.is_empty() {\n\n // Used for empty files\n\n (0, 0)\n\n } else if let Some(zero_size_off) = v.find('>') {\n\n let byte_pos = (zero_size_off + 1) as ByteIndex;\n\n (byte_pos, byte_pos)\n\n } else {\n\n let start = v.find('^').expect(\"Positioning character not found\") as ByteIndex;\n\n let end = v.rfind('^').map(|i| i + 1).unwrap() as ByteIndex;\n\n\n\n (start, end)\n\n };\n\n\n\n Span::new(None, start, end)\n\n}\n", "file_path": "syntax/span.rs", "rank": 11, "score": 226909.9915002847 }, { "content": "fn parse_module_name(span: Span, name: &[Datum]) -> Result<ModuleName> {\n\n if name.len() < 2 {\n\n return Err(Error::new(span, ErrorKind::ShortModuleName));\n\n }\n\n\n\n let mut name_idents = name\n\n .iter()\n\n .map(|datum| Ok(expect_ident(datum, \"module name component\")?.clone()));\n\n\n\n let package_name = name_idents.next().unwrap()?;\n\n let terminal_name = name_idents.next_back().unwrap()?;\n\n let name_components = name_idents.collect::<result::Result<Vec<_>, Error>>()?;\n\n\n\n Ok(ModuleName::new(\n\n package_name,\n\n name_components,\n\n terminal_name,\n\n ))\n\n}\n\n\n", "file_path": "compiler/hir/import/parse.rs", "rank": 12, "score": 221314.7516950695 }, { "content": "/// Converts body data from a `#()` reader macro in to an anonymous function\n\npub fn convert_anon_fun(outer_span: Span, body_data: impl Iterator<Item = Datum>) -> Result<Datum> {\n\n use std::iter;\n\n\n\n let mut found_arity = FoundArity {\n\n fixed_args: 0,\n\n has_rest: false,\n\n };\n\n\n\n let replaced_body = body_data\n\n .map(|body_datum| visit_arg_literals(&mut found_arity, body_datum))\n\n .collect::<Result<Vec<Datum>>>()?;\n\n\n\n let mut param_list: Vec<Datum> = (0..found_arity.fixed_args)\n\n .map(|param_index| {\n\n let param_ordinal = param_index + 1;\n\n Datum::Sym(outer_span, format!(\"%{}\", param_ordinal).into())\n\n })\n\n .collect();\n\n\n\n if found_arity.has_rest {\n", "file_path": "syntax/anon_fun.rs", "rank": 13, "score": 219457.06121679462 }, { "content": "/// Gets the full path to where our REPL history should be stored\n\n///\n\n/// This does very little error handling as history is a \"nice to have\" feature\n\npub fn repl_history_path() -> Option<path::PathBuf> {\n\n let project_dirs = directories_next::ProjectDirs::from(\"org.arret-lang\", \"\", \"arret\")?;\n\n let data_dir = project_dirs.data_dir();\n\n\n\n fs::create_dir_all(data_dir).ok()?;\n\n Some(data_dir.join(\"repl-history\"))\n\n}\n", "file_path": "driver/subcommand/repl/history.rs", "rank": 14, "score": 212227.4842138518 }, { "content": "pub fn callee_call_conv(mcx: &mut ModCtx<'_, '_, '_>, callee: &ops::Callee) -> u32 {\n\n match callee {\n\n ops::Callee::BoxedFunThunk(_) | ops::Callee::StaticSymbol(_) => {\n\n LLVMCallConv::LLVMCCallConv as u32\n\n }\n\n ops::Callee::PrivateFun(private_fun_id) => unsafe {\n\n LLVMGetFunctionCallConv(mcx.llvm_private_fun(*private_fun_id))\n\n },\n\n }\n\n}\n", "file_path": "compiler/codegen/callee.rs", "rank": 15, "score": 211775.3871685197 }, { "content": "pub fn is_identifier_char(c: char) -> bool {\n\n matches!(c,\n\n 'A'..='Z' | 'a'..='z' | '0'..='9' |\n\n // Punctuation allowed at beginning of an identifier\n\n '.' | '*' | '+' | '!' | '-' | '_' | '?' | '$' | '%' | '&' | '=' | '<' | '>' | ':' |\n\n // Punctuation allowed anywhere\n\n '#' |\n\n // We don't support namespacing so we treat this as a normal char\n\n '/'\n\n )\n\n}\n\n\n\npub struct Parser<'input> {\n\n file_id: Option<FileId>,\n\n input: &'input str,\n\n consumed_bytes: ByteIndex,\n\n}\n\n\n\nimpl<'input> Parser<'input> {\n\n fn from_str(file_id: Option<FileId>, input: &'input str, span_offset: ByteIndex) -> Self {\n", "file_path": "syntax/parser.rs", "rank": 16, "score": 209731.57229111507 }, { "content": "fn read_or_empty_vec(filename: &path::Path) -> Result<Vec<u8>, io::Error> {\n\n match fs::read(filename) {\n\n Ok(data) => Ok(data),\n\n Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(vec![]),\n\n Err(err) => Err(err),\n\n }\n\n}\n\n\n", "file_path": "compiler/tests/integration.rs", "rank": 17, "score": 209504.9799859852 }, { "content": "/// Places a syntax datum on a box heap\n\npub fn box_syntax_datum(heap: &mut impl boxed::AsHeap, datum: &Datum) -> Gc<boxed::Any> {\n\n match datum {\n\n Datum::Bool(_, value) => boxed::Bool::singleton_ref(*value).as_any_ref(),\n\n Datum::Int(_, val) => boxed::Int::new(heap, *val).as_any_ref(),\n\n Datum::Float(_, val) => boxed::Float::new(heap, *val).as_any_ref(),\n\n Datum::Char(_, val) => boxed::Char::new(heap, *val).as_any_ref(),\n\n Datum::Str(_, val) => boxed::Str::new(heap, val.as_ref()).as_any_ref(),\n\n Datum::Sym(_, val) => boxed::Sym::new(heap, val.as_ref()).as_any_ref(),\n\n Datum::List(_, vs) => {\n\n boxed::List::from_values(heap, vs.iter(), box_syntax_datum).as_any_ref()\n\n }\n\n Datum::Vector(_, vs) => {\n\n boxed::Vector::from_values(heap, vs.iter(), box_syntax_datum).as_any_ref()\n\n }\n\n Datum::Set(_, vs) => {\n\n boxed::Set::from_values(heap, vs.iter(), box_syntax_datum).as_any_ref()\n\n }\n\n Datum::Map(_, vs) => boxed::Map::from_values(heap, vs.iter(), |heap, (key, value)| {\n\n (box_syntax_datum(heap, key), box_syntax_datum(heap, value))\n\n })\n\n .as_any_ref(),\n\n }\n\n}\n\n\n\n// This is indirectly tested by `writer`\n", "file_path": "runtime-syntax/reader.rs", "rank": 18, "score": 208336.29938524292 }, { "content": "fn lower_deftype(scope: &mut Scope<'_>, span: Span, mut arg_iter: NsDataIter) -> Result<()> {\n\n if arg_iter.len() != 2 {\n\n return Err(Error::new(span, ErrorKind::WrongDefLikeArgCount(\"deftype\")));\n\n }\n\n\n\n let self_datum = arg_iter.next().unwrap();\n\n let ty_datum = arg_iter.next().unwrap();\n\n\n\n lower_type(scope, self_datum, ty_datum)\n\n}\n\n\n", "file_path": "compiler/hir/lowering.rs", "rank": 19, "score": 205707.90444060994 }, { "content": "fn lower_defrecord(scope: &mut Scope<'_>, span: Span, mut arg_iter: NsDataIter) -> Result<()> {\n\n if arg_iter.len() != 2 {\n\n return Err(Error::new(span, ErrorKind::WrongDefRecordArgCount));\n\n }\n\n\n\n let ty_cons_datum = arg_iter.next().unwrap();\n\n let value_cons_datum = arg_iter.next().unwrap();\n\n\n\n lower_record(scope, ty_cons_datum, value_cons_datum)\n\n}\n\n\n", "file_path": "compiler/hir/lowering.rs", "rank": 20, "score": 205707.90444060994 }, { "content": "fn lower_defmacro(scope: &mut Scope<'_>, span: Span, mut arg_iter: NsDataIter) -> Result<()> {\n\n if arg_iter.len() != 2 {\n\n return Err(Error::new(\n\n span,\n\n ErrorKind::WrongDefLikeArgCount(\"defmacro\"),\n\n ));\n\n }\n\n\n\n let self_datum = arg_iter.next().unwrap();\n\n let transformer_spec = arg_iter.next().unwrap();\n\n\n\n lower_macro(scope, self_datum, transformer_spec)\n\n}\n\n\n", "file_path": "compiler/hir/lowering.rs", "rank": 21, "score": 205707.90444060994 }, { "content": "pub fn prims_exports() -> Exports {\n\n PRIM_EXPORTS\n\n .iter()\n\n .map(|(name, binding)| ((*name).into(), binding.clone()))\n\n .collect()\n\n}\n\n\n", "file_path": "compiler/hir/exports.rs", "rank": 22, "score": 204276.42697892897 }, { "content": "pub fn tys_exports() -> Exports {\n\n TY_EXPORTS\n\n .iter()\n\n .map(|(name, binding)| ((*name).into(), binding.clone()))\n\n .collect()\n\n}\n", "file_path": "compiler/hir/exports.rs", "rank": 23, "score": 204276.42697892897 }, { "content": "/// Initialises LLVM\n\n///\n\n/// This must be called before anything else in this module. It can only be called from a single\n\n/// thread at once.\n\npub fn initialise_llvm(support_cross_compilation: bool) {\n\n use llvm_sys::target::*;\n\n\n\n unsafe {\n\n if support_cross_compilation {\n\n LLVM_InitializeAllTargetInfos();\n\n LLVM_InitializeAllTargets();\n\n LLVM_InitializeAllTargetMCs();\n\n LLVM_InitializeAllAsmPrinters();\n\n } else {\n\n LLVM_InitializeNativeTarget();\n\n LLVM_InitializeNativeAsmPrinter();\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod test {\n\n use super::*;\n\n use std::sync::Once;\n\n\n\n static INITIALISE_TEST_LLVM: Once = Once::new();\n\n\n\n pub fn initialise_test_llvm() {\n\n INITIALISE_TEST_LLVM.call_once(|| {\n\n initialise_llvm(false);\n\n });\n\n }\n\n}\n", "file_path": "compiler/codegen/mod.rs", "rank": 24, "score": 203700.55700823216 }, { "content": "/// Writes a representation of the passed box to the writer\n\npub fn write_boxed(w: &mut dyn Write, heap: &impl AsHeap, any_ref: Gc<boxed::Any>) -> Result<()> {\n\n use arret_runtime::boxed::AnySubtype;\n\n\n\n match any_ref.as_subtype() {\n\n AnySubtype::True(_) => write!(w, \"true\"),\n\n AnySubtype::False(_) => write!(w, \"false\"),\n\n AnySubtype::Nil(_) => write!(w, \"()\"),\n\n AnySubtype::Int(int_ref) => write!(w, \"{}\", int_ref.value()),\n\n AnySubtype::Sym(sym) => write_interned_sym(w, heap, sym.interned()),\n\n AnySubtype::Float(float_ref) => write_float(w, float_ref.value()),\n\n AnySubtype::Pair(list) => {\n\n write!(w, \"(\")?;\n\n write_boxed_seq(w, heap, list.as_list_ref().iter())?;\n\n write!(w, \")\")\n\n }\n\n AnySubtype::Vector(vec) => {\n\n write!(w, \"[\")?;\n\n write_boxed_seq(w, heap, vec.iter())?;\n\n write!(w, \"]\")\n\n }\n", "file_path": "runtime-syntax/writer.rs", "rank": 25, "score": 201749.75274171593 }, { "content": "/// Writes a pretty-printed representation of the passed box to the writer\n\npub fn pretty_print_boxed(write: &mut dyn Write, heap: &impl AsHeap, any_ref: Gc<boxed::Any>) {\n\n match any_ref.as_subtype() {\n\n boxed::AnySubtype::Str(string) => {\n\n write.write_all(string.as_str().as_bytes()).unwrap();\n\n }\n\n boxed::AnySubtype::Char(c) => {\n\n let mut buffer = [0; 4];\n\n write\n\n .write_all(c.value().encode_utf8(&mut buffer).as_bytes())\n\n .unwrap();\n\n }\n\n boxed::AnySubtype::Sym(sym) => {\n\n write\n\n .write_all(sym.name(heap.as_heap()).as_bytes())\n\n .unwrap();\n\n }\n\n _ => {\n\n write_boxed(write, heap.as_heap(), any_ref).unwrap();\n\n }\n\n }\n", "file_path": "runtime-syntax/writer.rs", "rank": 26, "score": 201687.15172805544 }, { "content": "type TaskEntry = extern \"C\" fn(&mut Task);\n\n\n\n#[export_name = \"arret_runtime_launch_task\"]\n\npub unsafe extern \"C\" fn launch_task(\n\n global_names: *const RawGlobalNames,\n\n classmap_classes: *const ClassRef<'static>,\n\n entry: TaskEntry,\n\n) {\n\n let interner = Interner::with_global_names(global_names);\n\n let class_map = ClassMap::with_const_classes(classmap_classes);\n\n\n\n let type_info = TypeInfo::new(interner, class_map);\n\n let mut task = Task::with_type_info(type_info);\n\n\n\n if let Err(err) = panic::catch_unwind(panic::AssertUnwindSafe(|| entry(&mut task))) {\n\n if let Some(message) = err.downcast_ref::<String>() {\n\n eprintln!(\"{}\", message);\n\n } else {\n\n eprintln!(\"Unexpected panic type\");\n\n };\n", "file_path": "runtime/compiler_support.rs", "rank": 27, "score": 195099.65376902936 }, { "content": "pub fn infer_module(\n\n imported_inferred_vars: &InferredModuleVars,\n\n defs: Vec<hir::Def<hir::Lowered>>,\n\n) -> result::Result<InferredModule, Vec<Error>> {\n\n RecursiveDefsCtx::new(imported_inferred_vars, defs).into_inferred_module()\n\n}\n\n\n", "file_path": "compiler/typeck/infer.rs", "rank": 28, "score": 194252.78750347093 }, { "content": "#[proc_macro_attribute]\n\npub fn rust_fun(\n\n attrs: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let attrs: proc_macro2::TokenStream = attrs.into();\n\n\n\n let mut attrs_iter = attrs.into_iter();\n\n let arret_type = attrs_iter.next().expect(\"Arret type expected\");\n\n\n\n if attrs_iter.next().is_some() {\n\n panic!(\"unexpected tokens after Arret type\");\n\n }\n\n\n\n // Parse the input tokens into a syntax tree\n\n let mut input_fn = parse_macro_input!(input as ItemFn);\n\n let mut input_sig = &mut input_fn.sig;\n\n let vis = input_fn.vis.clone();\n\n\n\n // Rename the function so the descriptor can take its original name\n\n let entry_point_name = format!(\"arret_{}_entry_point\", input_sig.ident);\n", "file_path": "rfi-derive/lib.rs", "rank": 30, "score": 191049.45919985446 }, { "content": "pub fn num_eq(\n\n ehx: &mut EvalHirCtx,\n\n b: &mut Builder,\n\n span: Span,\n\n arg_list_value: &Value,\n\n) -> Result<BuildOutcome> {\n\n Ok(compare_operand_list(\n\n ehx,\n\n b,\n\n span,\n\n arg_list_value,\n\n Comparison::Eq,\n\n ))\n\n}\n\n\n", "file_path": "compiler/mir/intrinsic/number.rs", "rank": 31, "score": 189819.27087227552 }, { "content": "pub fn data_from_str_with_span_offset(\n\n file_id: Option<FileId>,\n\n s: &str,\n\n span_offset: ByteIndex,\n\n) -> Result<Vec<Datum>> {\n\n Parser::from_str(file_id, s, span_offset).parse_data()\n\n}\n\n\n", "file_path": "syntax/parser.rs", "rank": 32, "score": 189784.09061517124 }, { "content": "pub fn datum_from_str_with_span_offset(\n\n file_id: Option<FileId>,\n\n s: &str,\n\n span_offset: ByteIndex,\n\n) -> Result<Datum> {\n\n Parser::from_str(file_id, s, span_offset).parse_datum()\n\n}\n\n\n", "file_path": "syntax/parser.rs", "rank": 33, "score": 189784.09061517124 }, { "content": "pub fn callee_takes_task(callee: &ops::Callee) -> bool {\n\n match callee {\n\n ops::Callee::BoxedFunThunk(_) => true,\n\n ops::Callee::PrivateFun(_) => true,\n\n ops::Callee::StaticSymbol(ops::StaticSymbol { abi, .. }) => abi.takes_task,\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/callee.rs", "rank": 34, "score": 189722.12122398085 }, { "content": "/// Returns the required type for a destruc\n\npub fn type_for_decl_destruc(\n\n destruc: &destruc::Destruc<hir::Lowered>,\n\n guide_type: Option<&ty::Ref<ty::Poly>>,\n\n) -> ty::Ref<ty::Poly> {\n\n match destruc {\n\n destruc::Destruc::Scalar(_, scalar) => match scalar.ty() {\n\n hir::DeclTy::Known(poly) => poly.clone(),\n\n hir::DeclTy::Free => guide_type.cloned().unwrap_or_else(|| Ty::Any.into()),\n\n },\n\n\n\n destruc::Destruc::List(_, list) => {\n\n let guide_type_iter =\n\n guide_type.and_then(|guide_type| ListIterator::try_new_from_ty_ref(guide_type));\n\n\n\n type_for_decl_list_destruc(list, guide_type_iter).into()\n\n }\n\n }\n\n}\n\n\n", "file_path": "compiler/typeck/destruc.rs", "rank": 35, "score": 189099.9248634771 }, { "content": "pub fn ensure_main_type(\n\n fallback_span: Span,\n\n complete_defs: &[hir::Def<hir::Inferred>],\n\n main_local_id: hir::LocalId,\n\n inferred_main_type: &ty::Ref<ty::Poly>,\n\n) -> Result<()> {\n\n let expected_main_type = ty::Fun::new_for_main().into();\n\n\n\n if !ty::is_a::ty_ref_is_a(inferred_main_type, &expected_main_type) {\n\n use crate::reporting::LocTrace;\n\n\n\n // Try to find where `(main!)` was defined\n\n let main_loc_trace = complete_defs\n\n .iter()\n\n .find_map(|def| {\n\n if let destruc::Destruc::Scalar(_, ref scalar) = def.destruc {\n\n if scalar.local_id() == &Some(main_local_id) {\n\n return Some(LocTrace::new(def.span, def.macro_invocation_span));\n\n }\n\n }\n", "file_path": "compiler/typeck/infer.rs", "rank": 36, "score": 189094.65923585324 }, { "content": "fn build_rfi_lib_path(base: &path::Path, package_name: &str, lib_type: LibType) -> path::PathBuf {\n\n let mut path_buf = path::PathBuf::new();\n\n path_buf.push(base);\n\n\n\n #[cfg(debug_assertions)]\n\n path_buf.push(\"debug\");\n\n\n\n #[cfg(not(debug_assertions))]\n\n path_buf.push(\"release\");\n\n\n\n match lib_type {\n\n LibType::Dynamic => {\n\n #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\n path_buf.push(format!(\"lib{}.dylib\", package_name));\n\n\n\n #[cfg(all(not(target_os = \"macos\"), not(target_os = \"ios\"),))]\n\n path_buf.push(format!(\"lib{}.so\", package_name));\n\n }\n\n LibType::Static => {\n\n path_buf.push(format!(\"lib{}.a\", package_name));\n", "file_path": "compiler/rfi/mod.rs", "rank": 38, "score": 188629.68135381985 }, { "content": "pub fn parse_command(mut line: String) -> ParsedCommand {\n\n match line.as_ref() {\n\n _ if line.starts_with(TYPE_ONLY_PREFIX) => {\n\n line.drain(0..TYPE_ONLY_PREFIX.len());\n\n ParsedCommand::EvalType(line)\n\n }\n\n HELP_COMMAND => {\n\n println!(\"Available REPL commands:\");\n\n println!();\n\n println!(\"/help Prints this summary\");\n\n println!(\"/type <expression> Evaluates the type of the given expression\");\n\n println!(\"/quit Exits the REPL\");\n\n ParsedCommand::Other\n\n }\n\n QUIT_COMMAND => ParsedCommand::Quit,\n\n _ => ParsedCommand::EvalValue(line),\n\n }\n\n}\n", "file_path": "driver/subcommand/repl/command.rs", "rank": 39, "score": 186279.8703285891 }, { "content": "/// Returns a specific ABI type to compactly encode the given set of possible values\n\npub fn specific_abi_type_for_values<'v>(\n\n possible_values: impl Iterator<Item = &'v Value>,\n\n) -> abitype::AbiType {\n\n specific_type_for_values(possible_values, specific_abi_type_for_type_tags)\n\n}\n\n\n", "file_path": "compiler/mir/specific_abi_type.rs", "rank": 40, "score": 185928.62715602317 }, { "content": "/// Loads environment values from an env parameter\n\npub fn load_from_env_param(\n\n b: &mut Builder,\n\n span: Span,\n\n local_values: &mut HashMap<hir::LocalId, Value>,\n\n env_values: &mut EnvValues,\n\n captures_reg: Option<BuiltReg>,\n\n) {\n\n use crate::mir::value;\n\n use arret_runtime::abitype;\n\n\n\n // Include the const values directly\n\n local_values.extend(\n\n env_values\n\n .const_values\n\n .iter()\n\n .map(|(local_id, value)| (*local_id, value.clone())),\n\n );\n\n\n\n match &env_values.captures_repr {\n\n CapturesRepr::Empty => {}\n", "file_path": "compiler/mir/env_values.rs", "rank": 41, "score": 185672.9498653041 }, { "content": "/// Loads env values assuming all captured variables are still inside the local function\n\npub fn load_from_current_fun(\n\n local_values: &mut HashMap<hir::LocalId, Value>,\n\n env_values: &EnvValues,\n\n) {\n\n local_values.extend(\n\n env_values\n\n .const_values\n\n .iter()\n\n .chain(env_values.free_values.iter())\n\n .map(|(local_id, value)| (*local_id, value.clone())),\n\n );\n\n}\n\n\n", "file_path": "compiler/mir/env_values.rs", "rank": 42, "score": 185672.60122617875 }, { "content": "/// Loads a vector member from a vector of known length\n\n///\n\n/// [`vector_length`] must be less than [`MAX_DIRECT_ACCESS_LENGTH`]\n\npub fn load_vector_member(\n\n ehx: &mut EvalHirCtx,\n\n b: &mut Builder,\n\n span: Span,\n\n vector_len: usize,\n\n vector_value: &Value,\n\n member_index: usize,\n\n) -> Value {\n\n use crate::mir::ops::*;\n\n use crate::mir::tagset::TypeTagSet;\n\n use crate::mir::value::build_reg::value_to_reg;\n\n use crate::mir::value::types::TypeHint;\n\n use crate::mir::value::RegValue;\n\n\n\n let member_possible_type_tags: TypeTagSet = vector_member_type(vector_value).into();\n\n\n\n let vector_reg = value_to_reg(\n\n ehx,\n\n b,\n\n span,\n", "file_path": "compiler/mir/vector_member.rs", "rank": 43, "score": 185672.35282533523 }, { "content": "pub fn load_record_field(\n\n ehx: &mut EvalHirCtx,\n\n b: &mut impl TryToBuilder,\n\n span: Span,\n\n record_cons: &record::ConsId,\n\n record_value: &Value,\n\n field_index: usize,\n\n) -> Value {\n\n match record_value {\n\n Value::Record(_, fields) => fields[field_index].clone(),\n\n Value::Const(boxed_any) => {\n\n use boxed::FieldValue;\n\n\n\n let boxed_record = boxed_any\n\n .downcast_ref::<boxed::Record>()\n\n .expect(\"unexpected type when accessing record field\");\n\n\n\n match boxed_record\n\n .field_values(ehx.as_heap())\n\n .nth(field_index)\n", "file_path": "compiler/mir/record_field.rs", "rank": 44, "score": 185667.4920252065 }, { "content": "pub fn gen_classmap_classes(\n\n tcx: &mut TargetCtx,\n\n llvm_module: LLVMModuleRef,\n\n record_structs: &[ops::RecordStructId],\n\n) -> LLVMValueRef {\n\n if record_structs.is_empty() {\n\n return unsafe { LLVMConstPointerNull(LLVMPointerType(tcx.classmap_class_llvm_type(), 0)) };\n\n }\n\n\n\n let llvm_classmap_field_type = tcx.classmap_field_llvm_type();\n\n let llvm_i8 = unsafe { LLVMInt8TypeInContext(tcx.llx) };\n\n let llvm_i32 = unsafe { LLVMInt32TypeInContext(tcx.llx) };\n\n\n\n let llvm_first_element_gep_indices =\n\n unsafe { &mut [LLVMConstInt(llvm_i32, 0, 0), LLVMConstInt(llvm_i32, 0, 0)] };\n\n\n\n let mut llvm_classmap_classes: Vec<LLVMValueRef> = record_structs\n\n .iter()\n\n .map(|record_struct| {\n\n let classmap_class = tcx\n", "file_path": "compiler/codegen/record_struct.rs", "rank": 45, "score": 185562.90050251305 }, { "content": "pub fn expect_spanned_ident<'a>(\n\n datum: &'a Datum,\n\n usage: &'static str,\n\n) -> Result<(Span, &'a DataStr)> {\n\n if let Datum::Sym(span, name) = datum {\n\n if !name.starts_with(':') {\n\n return Ok((*span, name));\n\n }\n\n }\n\n\n\n Err(Error::new(\n\n datum.span(),\n\n ErrorKind::ExpectedSym(\n\n ExpectedSym {\n\n found: datum.description(),\n\n usage,\n\n }\n\n .into(),\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "compiler/hir/util.rs", "rank": 47, "score": 185158.68745706347 }, { "content": "/// Lowers a list of polymorphic variables defined in `outer_scope` and places them in `inner_scope`\n\n///\n\n/// This is used for record types\n\npub fn lower_polymorphic_var_list(\n\n outer_scope: &Scope<'_>,\n\n inner_scope: &mut Scope<'_>,\n\n param_data: NsDataIter,\n\n) -> Result<Box<[PolymorphicVar]>> {\n\n let lowered_poly_vars = param_data\n\n .map(|var_datum| lower_polymorphic_var(outer_scope, var_datum))\n\n .collect::<Result<Vec<LoweredPolymorphicVar>>>()?;\n\n\n\n let poly_vars = lowered_poly_vars\n\n .iter()\n\n .map(|lpv| lpv.polymorphic_var.clone())\n\n .collect();\n\n\n\n bind_polymorphic_vars(inner_scope, lowered_poly_vars)?;\n\n Ok(poly_vars)\n\n}\n\n\n", "file_path": "compiler/hir/types.rs", "rank": 48, "score": 184982.98750885233 }, { "content": "/// Lowers a set of polymorphic variables defined in `outer_scope` and places them in `inner_scope`\n\n///\n\n/// This is used for functions and function types\n\npub fn lower_polymorphic_var_set(\n\n outer_scope: &Scope<'_>,\n\n inner_scope: &mut Scope<'_>,\n\n polymorphic_var_data: NsDataIter,\n\n) -> Result<(purity::PVars, ty::TVars)> {\n\n let mut pvars = purity::PVars::new();\n\n let mut tvars = ty::TVars::new();\n\n\n\n let lowered_poly_vars = polymorphic_var_data\n\n .map(|var_datum| lower_polymorphic_var(outer_scope, var_datum))\n\n .collect::<Result<Vec<LoweredPolymorphicVar>>>()?;\n\n\n\n for lowered_poly_var in lowered_poly_vars.iter() {\n\n match &lowered_poly_var.polymorphic_var {\n\n PolymorphicVar::PVar(pvar) => {\n\n pvars.push(pvar.clone());\n\n }\n\n PolymorphicVar::TVar(tvar) => {\n\n tvars.push(tvar.clone());\n\n }\n\n PolymorphicVar::Pure(_) | PolymorphicVar::TFixed(_, _) => {}\n\n }\n\n }\n\n\n\n bind_polymorphic_vars(inner_scope, lowered_poly_vars)?;\n\n Ok((pvars, tvars))\n\n}\n\n\n", "file_path": "compiler/hir/types.rs", "rank": 49, "score": 184982.87267807033 }, { "content": "pub fn gen_alloc_float(\n\n tcx: &mut TargetCtx,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n llvm_float_value: LLVMValueRef,\n\n) -> LLVMValueRef {\n\n unsafe {\n\n let alloced_float = gen_alloced_box::<boxed::Float>(\n\n tcx,\n\n builder,\n\n active_alloc,\n\n box_source,\n\n b\"alloced_float\\0\",\n\n );\n\n\n\n let value_ptr = LLVMBuildStructGEP(builder, alloced_float, 1, libcstr!(\"value_ptr\"));\n\n LLVMBuildStore(builder, llvm_float_value, value_ptr);\n\n\n\n alloced_float\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 50, "score": 184973.53997985783 }, { "content": "pub fn gen_alloc_char(\n\n tcx: &mut TargetCtx,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n llvm_char_value: LLVMValueRef,\n\n) -> LLVMValueRef {\n\n unsafe {\n\n let alloced_char = gen_alloced_box::<boxed::Char>(\n\n tcx,\n\n builder,\n\n active_alloc,\n\n box_source,\n\n b\"alloced_char\\0\",\n\n );\n\n\n\n let value_ptr = LLVMBuildStructGEP(builder, alloced_char, 1, libcstr!(\"value_ptr\"));\n\n LLVMBuildStore(builder, llvm_char_value, value_ptr);\n\n\n\n alloced_char\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 51, "score": 184973.53997985783 }, { "content": "pub fn gen_alloc_sym(\n\n tcx: &mut TargetCtx,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n llvm_interned_sym: LLVMValueRef,\n\n) -> LLVMValueRef {\n\n unsafe {\n\n let alloced_sym =\n\n gen_alloced_box::<boxed::Sym>(tcx, builder, active_alloc, box_source, b\"alloced_sym\\0\");\n\n\n\n let interned_sym_ptr =\n\n LLVMBuildStructGEP(builder, alloced_sym, 1, libcstr!(\"interned_sym_ptr\"));\n\n LLVMBuildStore(builder, llvm_interned_sym, interned_sym_ptr);\n\n\n\n alloced_sym\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 52, "score": 184973.53997985783 }, { "content": "pub fn gen_alloc_int(\n\n tcx: &mut TargetCtx,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n llvm_int_value: LLVMValueRef,\n\n) -> LLVMValueRef {\n\n unsafe {\n\n let alloced_int =\n\n gen_alloced_box::<boxed::Int>(tcx, builder, active_alloc, box_source, b\"alloced_int\\0\");\n\n\n\n let value_ptr = LLVMBuildStructGEP(builder, alloced_int, 1, libcstr!(\"value_ptr\"));\n\n LLVMBuildStore(builder, llvm_int_value, value_ptr);\n\n\n\n alloced_int\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 53, "score": 184973.53997985783 }, { "content": "pub fn type_for_decl_list_destruc(\n\n list: &destruc::List<hir::Lowered>,\n\n mut guide_type_iter: Option<ListIterator<'_, ty::Poly>>,\n\n) -> ty::List<ty::Poly> {\n\n let fixed_polys = list\n\n .fixed()\n\n .iter()\n\n .map(|fixed_destruc| {\n\n let guide_type = if let Some(guide_type_iter) = guide_type_iter.as_mut() {\n\n guide_type_iter.next()\n\n } else {\n\n None\n\n };\n\n\n\n type_for_decl_destruc(fixed_destruc, guide_type)\n\n })\n\n .collect();\n\n\n\n let rest_poly = match list.rest() {\n\n Some(rest) => match rest.ty() {\n", "file_path": "compiler/typeck/destruc.rs", "rank": 54, "score": 184973.53997985783 }, { "content": "/// Returns a specific boxed ABI type to encode the given set of possible values\n\npub fn specific_boxed_abi_type_for_values<'v>(\n\n possible_values: impl Iterator<Item = &'v Value>,\n\n) -> abitype::BoxedAbiType {\n\n specific_type_for_values(possible_values, specific_boxed_abi_type_for_type_tags).clone()\n\n}\n\n\n", "file_path": "compiler/mir/specific_abi_type.rs", "rank": 55, "score": 182627.68715125648 }, { "content": "pub fn interactive_loop(ccx: Arc<CompileCtx>, include_path: Option<path::PathBuf>) {\n\n use arret_compiler::repl::{EvalKind, EvaledExprValue, EvaledLine};\n\n use rustyline::error::ReadlineError;\n\n\n\n // Setup our REPL backend\n\n let repl_ctx = arret_compiler::repl::ReplCtx::new(ccx.clone());\n\n\n\n // Setup Rustyline\n\n let mut rl = rustyline::Editor::<ArretHelper>::new();\n\n\n\n // Import [stdlib base] so we have most useful things defined\n\n let initial_import = \"(import [stdlib base])\".to_owned();\n\n repl_ctx.send_line(initial_import, EvalKind::Value).unwrap();\n\n let mut sent_prelude_lines = 1;\n\n\n\n if let Some(include_path) = include_path {\n\n let include_file = fs::File::open(include_path).unwrap();\n\n\n\n // Import the include file line-by-line\n\n for line in BufReader::new(include_file).lines() {\n", "file_path": "driver/subcommand/repl/mod.rs", "rank": 56, "score": 182189.34415903618 }, { "content": "pub fn gen_record_field_ptr(\n\n tcx: &TargetCtx,\n\n builder: LLVMBuilderRef,\n\n record_storage: boxed::RecordStorage,\n\n llvm_boxed_record: LLVMValueRef,\n\n field_index: usize,\n\n pointer_name: &[u8],\n\n) -> LLVMValueRef {\n\n unsafe {\n\n let llvm_i32 = LLVMInt32TypeInContext(tcx.llx);\n\n\n\n match record_storage {\n\n boxed::RecordStorage::Inline(_) => {\n\n let field_gep_indices = &mut [\n\n LLVMConstInt(llvm_i32, 0, 0),\n\n LLVMConstInt(llvm_i32, u64::from(DATA_INDEX), 0),\n\n LLVMConstInt(llvm_i32, field_index as u64, 0),\n\n ];\n\n\n\n LLVMBuildInBoundsGEP(\n", "file_path": "compiler/codegen/record_struct.rs", "rank": 57, "score": 181693.62708562278 }, { "content": "pub fn gen_alloc_boxed_record(\n\n tcx: &mut TargetCtx,\n\n mcx: &mut ModCtx<'_, '_, '_>,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n input: &RecordInput<'_>,\n\n) -> LLVMValueRef {\n\n let RecordInput {\n\n record_struct,\n\n llvm_fields,\n\n } = input;\n\n\n\n let record_class_id = mcx.record_class_id_for_struct(record_struct);\n\n\n\n unsafe {\n\n let llvm_i8 = LLVMInt8TypeInContext(tcx.llx);\n\n let llvm_i32 = LLVMInt32TypeInContext(tcx.llx);\n\n\n\n let record_struct::TargetRecordStruct {\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 58, "score": 181123.66161873227 }, { "content": "pub fn gen_alloc_boxed_pair(\n\n tcx: &mut TargetCtx,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n input: &PairInput,\n\n) -> LLVMValueRef {\n\n let PairInput {\n\n llvm_head,\n\n llvm_rest,\n\n llvm_list_len,\n\n } = input;\n\n\n\n unsafe {\n\n let alloced_pair = gen_alloced_box::<boxed::Pair>(\n\n tcx,\n\n builder,\n\n active_alloc,\n\n box_source,\n\n b\"alloced_pair\\0\",\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 59, "score": 181123.66161873227 }, { "content": "pub fn visit_value_root(strong_pass: &mut boxed::collect::StrongPass, value: &mut Value) {\n\n match value {\n\n Value::Const(ref mut any_ref) => strong_pass.visit_box(any_ref),\n\n Value::List(ref mut fixed, ref mut rest) => {\n\n for any_ref in fixed.iter_mut() {\n\n visit_value_root(strong_pass, any_ref);\n\n }\n\n for any_ref in rest {\n\n visit_value_root(strong_pass, any_ref);\n\n }\n\n }\n\n Value::ArretFun(ref mut arret_fun) => {\n\n for (_, value) in arret_fun.env_values_mut().const_values.iter_mut() {\n\n visit_value_root(strong_pass, value);\n\n }\n\n for (_, value) in arret_fun.env_values_mut().free_values.iter_mut() {\n\n visit_value_root(strong_pass, value);\n\n }\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "compiler/mir/value/mod.rs", "rank": 60, "score": 179678.47734830691 }, { "content": "/// Builds the regs and ops for loading the argument list of a function\n\n///\n\n/// This results in an argument list value which contains all arguments passed to the function.\n\npub fn build_load_arg_list_value(\n\n ehx: &mut EvalHirCtx,\n\n b: &mut Builder,\n\n polymorph_abi: &PolymorphAbi,\n\n param_list_poly: &ty::List<ty::Poly>,\n\n) -> LoadedArgList {\n\n use crate::mir::value::from_reg::reg_to_value;\n\n use crate::ty::list_iter::ListIterator;\n\n\n\n let captures_reg: Option<BuiltReg> = if polymorph_abi.has_captures {\n\n Some(b.alloc_local())\n\n } else {\n\n None\n\n };\n\n\n\n let mut param_list_poly_iter = ListIterator::new(param_list_poly);\n\n\n\n let fixed_reg_values: Vec<(ops::RegId, Value)> = polymorph_abi\n\n .fixed_params\n\n .iter()\n", "file_path": "compiler/mir/arg_list.rs", "rank": 62, "score": 178173.63442473105 }, { "content": "fn new_eq_pred_arret_fun(span: Span) -> value::ArretFun {\n\n let expr_params = [\n\n ExprParam {\n\n source_name: \"left\".into(),\n\n poly_type: Ty::Any.into(),\n\n },\n\n ExprParam {\n\n source_name: \"right\".into(),\n\n poly_type: Ty::Any.into(),\n\n },\n\n ];\n\n\n\n let wrapped_expr = hir::Expr {\n\n result_ty: Ty::EqPred.into(),\n\n kind: hir::ExprKind::EqPred(span),\n\n };\n\n\n\n wrap_mono_expr_in_arret_fun(\n\n span,\n\n \"=\".into(),\n\n &expr_params,\n\n Ty::Bool.into(),\n\n wrapped_expr,\n\n )\n\n}\n\n\n", "file_path": "compiler/mir/value/synthetic_fun.rs", "rank": 63, "score": 178135.31620324776 }, { "content": "/// Pushes the arguments for a list constructor on to the passed `Vec`\n\n///\n\n/// This is used to share code between list and function types\n\nfn push_list_parts<M: ty::Pm>(list_parts: &mut Vec<String>, list_ref: &ty::List<M>) {\n\n for fixed in list_ref.fixed() {\n\n list_parts.push(str_for_ty_ref(fixed));\n\n }\n\n\n\n let rest = list_ref.rest();\n\n if !rest.is_never() {\n\n list_parts.push(\"&\".to_owned());\n\n list_parts.push(str_for_ty_ref(rest));\n\n }\n\n}\n\n\n", "file_path": "compiler/hir/types.rs", "rank": 64, "score": 178027.7058708351 }, { "content": "/// Updates `ops` in-place to replace allocs of the same native value with `OpKind::Alias`\n\npub fn remove_redundant_alloc_ops(ops: &mut [ops::Op]) {\n\n let mut native_to_boxed = HashMap::new();\n\n remove_branch_redundant_alloc_ops(ops, &mut native_to_boxed)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use crate::source::EMPTY_SPAN;\n\n\n\n #[test]\n\n fn test_box_different_native_regs() {\n\n let native_reg1 = ops::RegId::alloc();\n\n let boxed_reg1 = ops::RegId::alloc();\n\n\n\n let native_reg2 = ops::RegId::alloc();\n\n let boxed_reg2 = ops::RegId::alloc();\n\n\n\n let ops = &mut [\n", "file_path": "compiler/mir/optimise/duplicate_alloc_ops.rs", "rank": 65, "score": 177747.92200365954 }, { "content": "pub fn gen_alloc_boxed_fun_thunk(\n\n tcx: &mut TargetCtx,\n\n builder: LLVMBuilderRef,\n\n active_alloc: &mut ActiveAlloc<'_>,\n\n box_source: BoxSource,\n\n input: &FunThunkInput,\n\n) -> LLVMValueRef {\n\n let FunThunkInput {\n\n llvm_captures,\n\n llvm_entry_point,\n\n } = input;\n\n\n\n unsafe {\n\n let alloced_fun_thunk = gen_alloced_box::<boxed::FunThunk>(\n\n tcx,\n\n builder,\n\n active_alloc,\n\n box_source,\n\n b\"alloced_fun_thunk\\0\",\n\n );\n", "file_path": "compiler/codegen/alloc/types.rs", "rank": 66, "score": 177519.09881001647 }, { "content": "#[arret_rfi_derive::rust_fun(\"(Num & Num -> Bool)\")]\n\npub fn stdlib_num_eq(initial: Gc<boxed::Num>, rest: Gc<boxed::List<boxed::Num>>) -> bool {\n\n compare_nums(initial, rest, i64::eq, f64::eq)\n\n}\n\n\n", "file_path": "stdlib/rust/number.rs", "rank": 67, "score": 176943.08894830238 }, { "content": "/// Annotates an existing value with Arret type information\n\n///\n\n/// For the majority of values this is a no-op. For this reason this function takes a builder for\n\n/// the Arret type that is only invoked if the type information can be used.\n\npub fn value_with_arret_ty<F>(\n\n heap: &mut impl boxed::AsHeap,\n\n value: Value,\n\n build_arret_ty: F,\n\n) -> Value\n\nwhere\n\n F: FnOnce() -> ty::Ref<ty::Mono>,\n\n{\n\n if let Value::Reg(reg_value) = value {\n\n use crate::mir::value::from_reg::refine_reg_value_with_arret_ty;\n\n\n\n // This could be useful; request the type\n\n let arret_ty = build_arret_ty();\n\n refine_reg_value_with_arret_ty(heap, &reg_value, &arret_ty)\n\n } else {\n\n value\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "compiler/mir/value/types.rs", "rank": 68, "score": 176508.55165053127 }, { "content": "pub fn known_record_cons_for_value<'a>(\n\n ehx: &'a EvalHirCtx,\n\n value: &'a Value,\n\n) -> Option<&'a record::ConsId> {\n\n match value {\n\n Value::Const(any_ref) => any_ref.downcast_ref::<boxed::Record>().map(|record_ref| {\n\n ehx.cons_for_jit_record_class_id(record_ref.class_id())\n\n .expect(\"unable to lookup record cons for JIT record class ID\")\n\n }),\n\n Value::Record(cons, _) => Some(cons),\n\n Value::Reg(reg_value) => {\n\n if let TypeHint::KnownRecordCons(ref cons) = reg_value.type_hint {\n\n Some(cons)\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "compiler/mir/value/types.rs", "rank": 69, "score": 176498.25846062446 }, { "content": "/// Adds regs referenced by the passed value\n\nfn add_value_used_regs(value: &Value, used_regs: &mut HashSet<ops::RegId>) {\n\n match value {\n\n Value::Const(_)\n\n | Value::RustFun(_)\n\n | Value::TyPred(_)\n\n | Value::EqPred\n\n | Value::RecordCons(_)\n\n | Value::FieldAccessor(_, _) => {}\n\n Value::ArretFun(arret_fun) => {\n\n for (_, free_value) in arret_fun.env_values().free_values.iter() {\n\n add_value_used_regs(free_value, used_regs);\n\n }\n\n }\n\n Value::Reg(reg_value) => {\n\n used_regs.insert(reg_value.reg.into());\n\n }\n\n Value::Record(_, field_values) => {\n\n for field_value in field_values.iter() {\n\n add_value_used_regs(field_value, used_regs);\n\n }\n", "file_path": "compiler/mir/optimise/unused_ops.rs", "rank": 70, "score": 175882.95885925135 }, { "content": "/// Returns a TypeTagSet containing the possible type tags for a given value\n\npub fn possible_type_tags_for_value(value: &Value) -> TypeTagSet {\n\n match value {\n\n Value::Const(any_ref) => any_ref.header().type_tag().into(),\n\n Value::ArretFun(_)\n\n | Value::RustFun(_)\n\n | Value::TyPred(_)\n\n | Value::EqPred\n\n | Value::RecordCons(_)\n\n | Value::FieldAccessor(_, _) => boxed::TypeTag::FunThunk.into(),\n\n Value::List(fixed, rest) => {\n\n if !fixed.is_empty() {\n\n // Non-empty list\n\n boxed::TypeTag::Pair.into()\n\n } else if let Some(tail) = rest {\n\n possible_type_tags_for_value(tail)\n\n } else {\n\n // Empty list\n\n boxed::TypeTag::Nil.into()\n\n }\n\n }\n\n Value::Record(_, _) => boxed::TypeTag::Record.into(),\n\n Value::Reg(reg_value) => reg_value.possible_type_tags,\n\n }\n\n}\n\n\n", "file_path": "compiler/mir/value/types.rs", "rank": 71, "score": 173325.0916042196 }, { "content": "/// Returns if an expression can have a side effect\n\n///\n\n/// This is used for very basic dead code elimination during type checking.\n\npub fn expr_can_side_effect(expr: &hir::Expr<hir::Inferred>) -> bool {\n\n use hir::ExprKind;\n\n match &expr.kind {\n\n ExprKind::LocalRef(_, _)\n\n | ExprKind::ExportRef(_, _)\n\n | ExprKind::Lit(_)\n\n | ExprKind::EqPred(_)\n\n | ExprKind::TyPred(_, _)\n\n | ExprKind::RecordCons(_, _)\n\n | ExprKind::FieldAccessor(_)\n\n | ExprKind::Fun(_)\n\n | ExprKind::RustFun(_) => false,\n\n ExprKind::Do(exprs) => exprs.iter().any(expr_can_side_effect),\n\n ExprKind::MacroExpand(_, inner) => expr_can_side_effect(inner),\n\n ExprKind::Cond(cond) => {\n\n expr_can_side_effect(&cond.test_expr)\n\n || expr_can_side_effect(&cond.true_expr)\n\n || expr_can_side_effect(&cond.false_expr)\n\n }\n\n ExprKind::Let(let_expr) => {\n", "file_path": "compiler/typeck/dce.rs", "rank": 72, "score": 172257.31726171967 }, { "content": "pub fn gen_boxed_nil(tcx: &mut TargetCtx, mcx: &mut ModCtx<'_, '_, '_>) -> LLVMValueRef {\n\n tcx.ptr_to_singleton_box(mcx.module, boxed::TypeTag::Nil, b\"ARRET_NIL\\0\")\n\n}\n\n\n", "file_path": "compiler/codegen/const_gen.rs", "rank": 73, "score": 171605.11352120948 }, { "content": "fn c_main_llvm_type(tcx: &mut TargetCtx) -> LLVMTypeRef {\n\n unsafe {\n\n let llvm_argc_type = LLVMInt32TypeInContext(tcx.llx);\n\n let llvm_argv_type = LLVMPointerType(LLVMPointerType(LLVMInt8TypeInContext(tcx.llx), 0), 0);\n\n let llvm_ret_type = LLVMInt32TypeInContext(tcx.llx);\n\n\n\n let llvm_arg_types = &mut [llvm_argc_type, llvm_argv_type];\n\n\n\n LLVMFunctionType(\n\n llvm_ret_type,\n\n llvm_arg_types.as_mut_ptr(),\n\n llvm_arg_types.len() as u32,\n\n 0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/program.rs", "rank": 74, "score": 171483.92054677146 }, { "content": "pub fn expect_spanned_ns_ident(datum: NsDatum, usage: &'static str) -> Result<(Span, Ident)> {\n\n if let NsDatum::Ident(span, ident) = datum {\n\n Ok((span, ident))\n\n } else {\n\n Err(Error::new(\n\n datum.span(),\n\n ErrorKind::ExpectedSym(\n\n ExpectedSym {\n\n found: datum.description(),\n\n usage,\n\n }\n\n .into(),\n\n ),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "compiler/hir/util.rs", "rank": 75, "score": 171258.7086604383 }, { "content": "/// Parses the passed import set datum\n\n///\n\n/// This produces an AST without performing the import itself.\n\npub fn parse_import_set(import_set_datum: &Datum) -> Result<ParsedImportSet> {\n\n let span = import_set_datum.span();\n\n\n\n match import_set_datum {\n\n Datum::Vector(_, vs) => {\n\n let module_name = parse_module_name(span, vs.as_ref())?;\n\n Ok(ParsedImportSet::Module(span, module_name))\n\n }\n\n Datum::List(_, vs) if vs.len() >= 2 => {\n\n let filter_datum = &vs[0];\n\n\n\n let inner_import_datum = &vs[1];\n\n let filter_input = parse_import_set(inner_import_datum)?;\n\n\n\n let filter = parse_filter(span, filter_datum, &filter_input, &vs[2..])?;\n\n Ok(ParsedImportSet::Filter(filter, Box::new(filter_input)))\n\n }\n\n _ => Err(Error::new(span, ErrorKind::BadImportSet)),\n\n }\n\n}\n", "file_path": "compiler/hir/import/parse.rs", "rank": 76, "score": 170759.92107488462 }, { "content": "/// Determines if an op requires the heap to be in a consistent state before it's executed\n\n///\n\n/// Our `AllocAtom`s cannot span these operations\n\nfn op_needs_heap_checkpoint(tcx: &mut TargetCtx, op: &ops::Op) -> bool {\n\n use crate::mir::ops::OpKind;\n\n\n\n match op.kind() {\n\n OpKind::Ret(_)\n\n | OpKind::RetVoid\n\n | OpKind::Unreachable\n\n | OpKind::Call(_, _)\n\n | OpKind::Panic(_)\n\n | OpKind::Int64CheckedAdd(_, _)\n\n | OpKind::Int64CheckedSub(_, _)\n\n | OpKind::Int64CheckedMul(_, _)\n\n | OpKind::Int64CheckedDiv(_, _)\n\n | OpKind::Int64CheckedRem(_, _) => true,\n\n OpKind::Cond(cond_op) => cond_op\n\n .true_ops\n\n .iter()\n\n .chain(cond_op.false_ops.iter())\n\n // We additionally need to make sure we don't allocate in our branches. Otherwise we\n\n // might need to plan an allocation of a dynamic size to cover each branch. Instead\n\n // just start a new atom for each branch.\n\n .any(|op| op_needs_heap_checkpoint(tcx, op) || op_alloc_info(tcx, op).is_some()),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/alloc/plan.rs", "rank": 77, "score": 169207.58341000366 }, { "content": "pub fn server_capabilities() -> lsp_types::ServerCapabilities {\n\n lsp_types::ServerCapabilities {\n\n text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Options(\n\n lsp_types::TextDocumentSyncOptions {\n\n open_close: Some(true),\n\n change: Some(lsp_types::TextDocumentSyncKind::Incremental),\n\n ..Default::default()\n\n },\n\n )),\n\n workspace: Some(lsp_types::WorkspaceCapability {\n\n workspace_folders: Some(lsp_types::WorkspaceFolderCapability {\n\n supported: Some(true),\n\n change_notifications: Some(\n\n lsp_types::WorkspaceFolderCapabilityChangeNotifications::Bool(true),\n\n ),\n\n }),\n\n }),\n\n ..Default::default()\n\n }\n\n}\n", "file_path": "lsp-server/capabilities.rs", "rank": 78, "score": 168857.57042463354 }, { "content": "fn arret_main_llvm_type(tcx: &mut TargetCtx) -> LLVMTypeRef {\n\n unsafe {\n\n let llvm_arg_types = &mut [tcx.task_llvm_ptr_type()];\n\n\n\n LLVMFunctionType(\n\n LLVMVoidTypeInContext(tcx.llx),\n\n llvm_arg_types.as_mut_ptr(),\n\n llvm_arg_types.len() as u32,\n\n 0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "compiler/codegen/program.rs", "rank": 79, "score": 168664.6250379691 }, { "content": "/// Removes the rest argument from the passed iterator and returns it\n\n///\n\n/// The rest argument is denoted by using `&` before a final datum\n\npub fn try_take_rest_arg(data_iter: &mut NsDataIter) -> Option<NsDatum> {\n\n let data_len = data_iter.len();\n\n if data_len < 2 {\n\n return None;\n\n }\n\n\n\n // This is gross because we need to \"peek\" at the end of the iterator\n\n if let NsDatum::Ident(_, ident) = &data_iter.as_slice()[data_len - 2] {\n\n if ident.is_ampersand() {\n\n let rest = data_iter.next_back();\n\n // Remove the & completely\n\n data_iter.next_back();\n\n return rest;\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "compiler/hir/util.rs", "rank": 80, "score": 168377.74140626742 }, { "content": "fn arg_is_task(arg: &syn::PatType) -> bool {\n\n if let syn::Type::Reference(_) = *arg.ty {\n\n } else {\n\n return false;\n\n };\n\n\n\n if let syn::Pat::Ident(ref pat_ident) = *arg.pat {\n\n pat_ident.ident == \"task\"\n\n } else {\n\n false\n\n }\n\n}\n\n\n\n/// Annotates a Rust function to be exported via `arret_runtime::define_rust_module!`\n\n///\n\n/// This takes a single metadata string containing the full Arret type of the function. This is used\n\n/// to express concepts in Arret that don't exist in Rust. These include rest arguments and function\n\n/// purity.\n\n///\n\n/// The annotated Rust function can optionally take a `arret_runtime::task::Task` as its first\n\n/// parameter. An attempt will be made to encode the types of the remaining parameters but only\n\n/// certain primitive types and `arret_runtime::boxed` values are allowed.\n", "file_path": "rfi-derive/lib.rs", "rank": 81, "score": 168135.77921013892 }, { "content": "#[arret_rfi_derive::rust_fun(\"(Any -> Int)\")]\n\npub fn stdlib_hash(task: &mut Task, input: Gc<boxed::Any>) -> i64 {\n\n let mut state = DefaultHasher::new();\n\n\n\n input.hash_in_heap(task.heap(), &mut state);\n\n state.finish() as i64\n\n}\n", "file_path": "stdlib/rust/hash.rs", "rank": 82, "score": 168007.62353630958 }, { "content": "/// Return a specific ABI type to compactly encode the given value\n\npub fn specific_abi_type_for_value(value: &Value) -> abitype::AbiType {\n\n specific_abi_type_for_values(std::iter::once(value))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::hir::poly_for_str;\n\n use arret_runtime::abitype::EncodeBoxedAbiType;\n\n use arret_runtime::boxed;\n\n\n\n fn assert_abi_type_for_str(abi_type: abitype::AbiType, ty_str: &'static str) {\n\n let poly = poly_for_str(ty_str);\n\n assert_eq!(abi_type, specific_abi_type_for_ty_ref(&poly));\n\n }\n\n\n\n #[test]\n\n fn test_specific_abi_type_for_ty_ref() {\n\n assert_abi_type_for_str(abitype::AbiType::Bool, \"true\");\n\n assert_abi_type_for_str(abitype::AbiType::Bool, \"false\");\n", "file_path": "compiler/mir/specific_abi_type.rs", "rank": 83, "score": 167597.16546838675 }, { "content": "fn lower_literal_vec(literal_data: Vec<NsDatum>) -> Result<Vec<ty::Ref<ty::Poly>>> {\n\n literal_data.into_iter().map(lower_literal).collect()\n\n}\n\n\n", "file_path": "compiler/hir/types.rs", "rank": 84, "score": 167004.7917216824 }, { "content": "type Inner = u32;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Default)]\n\npub struct TypeTagSet(Inner);\n\n\n\n/// Efficient representation of a set of TypeTag\n\nimpl TypeTagSet {\n\n pub fn new() -> TypeTagSet {\n\n TypeTagSet(0)\n\n }\n\n\n\n pub fn all() -> TypeTagSet {\n\n ALL_TYPE_TAGS.iter().collect()\n\n }\n\n\n\n pub fn is_empty(self) -> bool {\n\n self.0 == 0\n\n }\n\n\n\n pub fn len(self) -> usize {\n", "file_path": "compiler/mir/tagset.rs", "rank": 85, "score": 166803.09348480642 }, { "content": "pub fn try_extract_import_set(datum: &Datum) -> Option<&[Datum]> {\n\n if let Datum::List(_, vs) = datum {\n\n match vs.as_ref() {\n\n [Datum::Sym(_, name), import_set @ ..] if name.as_ref() == \"import\" => Some(import_set),\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "compiler/hir/import/mod.rs", "rank": 86, "score": 166583.42248605247 }, { "content": "/// Determines if two purity refs are equivalent\n\n///\n\n/// This is for symmetry with [`ty_refs_equivalent`]\n\npub fn purity_refs_equivalent(purity_ref1: &purity::Ref, purity_ref2: &purity::Ref) -> bool {\n\n purity_ref1 == purity_ref2\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use crate::hir::{poly_for_str, tvar_bounded_by};\n\n use crate::source::EMPTY_SPAN;\n\n\n\n #[test]\n\n fn sym_types() {\n\n let foo_sym = poly_for_str(\"'foo\");\n\n let bar_sym = poly_for_str(\"'bar\");\n\n let any_sym = poly_for_str(\"Sym\");\n\n let any_int = poly_for_str(\"Int\");\n\n\n\n assert!(ty_ref_is_a(&foo_sym, &foo_sym));\n\n assert!(!ty_ref_is_a(&foo_sym, &bar_sym));\n", "file_path": "compiler/ty/is_a.rs", "rank": 87, "score": 165960.82027236227 }, { "content": "pub fn error_for_line(mut line: &str) -> Option<arret_syntax::error::Error> {\n\n use arret_syntax::parser::datum_from_str_with_span_offset;\n\n\n\n let span_offset = if line.starts_with(TYPE_ONLY_PREFIX) {\n\n line = &line[TYPE_ONLY_PREFIX.len()..];\n\n TYPE_ONLY_PREFIX.len()\n\n } else {\n\n 0\n\n };\n\n\n\n // Is this a command?\n\n if line.starts_with('/') ||\n\n // Or empty?\n\n line.chars().all(char::is_whitespace) ||\n\n // Or is too large to parse interactively?\n\n line.len() > MAXIMUM_PARSED_LINE_LEN\n\n {\n\n return None;\n\n }\n\n\n\n datum_from_str_with_span_offset(None, line, span_offset as ByteIndex).err()\n\n}\n\n\n", "file_path": "driver/subcommand/repl/syntax.rs", "rank": 88, "score": 165747.7403633368 }, { "content": "pub fn data_from_str(file_id: Option<FileId>, s: &str) -> Result<Vec<Datum>> {\n\n data_from_str_with_span_offset(file_id, s, 0)\n\n}\n\n\n", "file_path": "syntax/parser.rs", "rank": 89, "score": 165529.50031041482 }, { "content": "pub fn create_initialize_response() -> lsp_types::InitializeResult {\n\n lsp_types::InitializeResult {\n\n server_info: Some(lsp_types::ServerInfo {\n\n name: \"arret-lsp-server\".to_owned(),\n\n version: option_env!(\"CARGO_PKG_VERSION\").map(str::to_owned),\n\n }),\n\n capabilities: server_capabilities(),\n\n }\n\n}\n\n\n\n/// Runs a session loop against the provided connection\n\n///\n\n/// On a clean exit (`shutdown` followed by `exit`) this will return `Ok`, otherwise it will return\n\n/// `Err`.\n\npub async fn run(connection: Connection) -> Result<(), ()> {\n\n let Connection {\n\n mut incoming,\n\n outgoing,\n\n } = connection;\n\n\n", "file_path": "lsp-server/session.rs", "rank": 90, "score": 165475.59975505687 }, { "content": "type Result<T> = result::Result<T, Vec<Error>>;\n\n\n", "file_path": "compiler/hir/import/filter.rs", "rank": 91, "score": 165433.8901936374 }, { "content": "pub fn has_subtypes<M: ty::Pm>(ty_ref: &ty::Ref<M>) -> bool {\n\n ty_ref\n\n .try_to_fixed()\n\n .map(|ty| ty_has_subtypes(ty))\n\n .unwrap_or(true)\n\n}\n\n\n", "file_path": "compiler/ty/props.rs", "rank": 92, "score": 165375.0954530465 }, { "content": "pub fn is_literal<M: ty::Pm>(ty_ref: &ty::Ref<M>) -> bool {\n\n ty_ref\n\n .try_to_fixed()\n\n .map(|ty| ty_is_literal(ty))\n\n .unwrap_or(false)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use crate::hir::poly_for_str;\n\n use crate::source::EMPTY_SPAN;\n\n use crate::ty::record;\n\n use crate::ty::ty_args::TyArgs;\n\n\n\n fn str_has_subtypes(datum_str: &str) -> bool {\n\n let poly = poly_for_str(datum_str);\n\n has_subtypes(&poly)\n\n }\n", "file_path": "compiler/ty/props.rs", "rank": 93, "score": 165375.0954530465 }, { "content": "#[arret_rfi_derive::rust_fun(\"(Int Int -> Int)\")]\n\npub fn stdlib_quot(task: &mut Task, numerator: i64, denominator: i64) -> i64 {\n\n match numerator.checked_div(denominator) {\n\n Some(result) => result,\n\n None => {\n\n task.panic(\"division by zero\".to_owned());\n\n unreachable!(\"returned from panic\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "stdlib/rust/math.rs", "rank": 94, "score": 165168.31504557456 }, { "content": "#[arret_rfi_derive::rust_fun(\"(Int Int -> Int)\")]\n\npub fn stdlib_rem(task: &mut Task, numerator: i64, denominator: i64) -> i64 {\n\n match numerator.checked_rem(denominator) {\n\n Some(result) => result,\n\n None => {\n\n task.panic(\"division by zero\".to_owned());\n\n unreachable!(\"returned from panic\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "stdlib/rust/math.rs", "rank": 95, "score": 165168.31504557456 }, { "content": "#[arret_rfi_derive::rust_fun(\"(Num -> Int)\")]\n\npub fn stdlib_int(task: &mut Task, input: Gc<boxed::Num>) -> i64 {\n\n match input.as_subtype() {\n\n boxed::NumSubtype::Int(int_ref) => int_ref.value(),\n\n boxed::NumSubtype::Float(float_ref) => {\n\n let float_val = float_ref.value();\n\n\n\n if float_val.is_nan() {\n\n task.panic(format!(\n\n \"Float value `{}` is not a number; cannot convert to Int\",\n\n float_val\n\n ));\n\n } else if float_val.is_infinite() {\n\n task.panic(format!(\n\n \"Float value `{}` is infinite; cannot convert to Int\",\n\n float_val\n\n ));\n\n }\n\n\n\n float_val as i64\n\n }\n\n }\n\n}\n\n\n", "file_path": "stdlib/rust/number.rs", "rank": 96, "score": 165168.31504557456 }, { "content": "fn lower_type(scope: &mut Scope<'_>, self_datum: NsDatum, ty_datum: NsDatum) -> Result<()> {\n\n let (span, ident) = expect_spanned_ns_ident(self_datum, \"new type name\")?;\n\n let ty = lower_poly(scope, ty_datum)?;\n\n\n\n scope.insert_binding(span, ident, Binding::Ty(ty))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler/hir/lowering.rs", "rank": 97, "score": 164966.5153306886 }, { "content": "pub fn eval_input_file(ccx: &CompileCtx, input_file: &arret_compiler::SourceFile) -> bool {\n\n let result = try_eval_input_file(ccx, input_file);\n\n\n\n if let Err(diagnostics) = result {\n\n emit_diagnostics_to_stderr(ccx.source_loader(), diagnostics);\n\n false\n\n } else {\n\n true\n\n }\n\n}\n", "file_path": "driver/subcommand/eval.rs", "rank": 98, "score": 164613.60209682368 }, { "content": "pub fn specific_boxed_abi_type_for_ty_ref<M: ty::Pm>(\n\n ty_ref: &ty::Ref<M>,\n\n) -> &'static abitype::BoxedAbiType {\n\n specific_boxed_abi_type_for_type_tags(ty_ref.into())\n\n}\n\n\n", "file_path": "compiler/mir/specific_abi_type.rs", "rank": 99, "score": 163476.84341552237 } ]
Rust
tests/category_dist_sums.rs
bostontrader/bookwerx-core-rust
35f8ac82b22399ffb1ae0eabc4dcd186395d9fc3
use bookwerx_core_rust::db as D; use rocket::http::Status; use rocket::local::Client; use bookwerx_core_rust::dfp::dfp::{DFP, Sign, dfp_abs, dfp_add}; pub fn category_dist_sums(client: &Client, apikey: &String, categories: &Vec<D::Category>) -> () { let mut response = client .get(format!( "/category_dist_sums?apikey={}&category_id=666", &apikey )) .dispatch(); let mut r: D::Sums = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 0); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![2, 1], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}&time_start=2020-12", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![9], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![7], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}&time_start=2020-12&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![4], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![2, 1], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&time_start=2020-12", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![9], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![7], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&time_start=2020-12&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![4], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&decorate=false", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); let r: D::Sums = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&decorate=true", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); let r: D::SumsDecorated = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); }
use bookwerx_core_rust::db as D; use rocket::http::Status; use rocket::local::Client; use bookwerx_core_rust::dfp::dfp::{DFP, Sign, dfp_abs, dfp_add};
pub fn category_dist_sums(client: &Client, apikey: &String, categories: &Vec<D::Category>) -> () { let mut response = client .get(format!( "/category_dist_sums?apikey={}&category_id=666", &apikey )) .dispatch(); let mut r: D::Sums = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 0); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![2, 1], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}&time_start=2020-12", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![9], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![7], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={},{}&time_start=2020-12&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id, (categories.get(3).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 1); assert_eq!(r.sums[0].sum, DFP { amount: vec![4], exp: 0, sign: Sign::Positive }); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![2, 1], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&time_start=2020-12", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![9], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![7], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&time_start=2020-12&time_stop=2020-12", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); assert_eq!(dfp_abs(&(r.sums[0].sum) ), DFP { amount: vec![4], exp: 0, sign: Sign::Positive }); assert_eq!( dfp_add( r.sums[0].sum.clone(), r.sums[1].sum.clone()), DFP { amount: vec![], exp: 0, sign: Sign::Zero } ); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&decorate=false", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); let r: D::Sums = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); response = client .get(format!( "/category_dist_sums?apikey={}&category_id={}&decorate=true", &apikey, (categories.get(0).unwrap()).id )) .dispatch(); let r: D::SumsDecorated = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap(); assert_eq!(response.status(), Status::Ok); assert_eq!(r.sums.len(), 2); }
function_block-full_function
[ { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\nuse bookwerx_core_rust::dfp::dfp::{DFP, Sign};\n\n\n", "file_path": "tests/account_dist_sum.rs", "rank": 0, "score": 8.044035556254066 }, { "content": "use bookwerx_core_rust::dfp::dfp::{DFP, Sign, dfp_abs, dfp_add, dfp_from_string_exp};\n\n\n\n#[test]\n", "file_path": "tests/dfp.rs", "rank": 2, "score": 7.004586164653182 }, { "content": "\n\n let mut ret_val = (*dfp).clone();\n\n if (*dfp).sign == Sign::Negative {\n\n ret_val.sign = Sign::Positive;\n\n } // else if sign == Positive or Zero, do not change it.\n\n ret_val\n\n }\n\n\n\n /* Given two DFP, add them together and return a new DFP as the sum. */\n\n pub fn dfp_add( dfp1 : DFP, dfp2 : DFP) -> DFP {\n\n\n\n // Both of these now have the same exp. We can freely use n1.exp to build a return value with\n\n // the correct exp.\n\n let (n1, n2) = dfp_norm_exp( dfp1, dfp2);\n\n\n\n let ret_val =\n\n match (&n1.sign, &n2.sign) {\n\n\n\n // If one DFP is zero, return the other one.\n\n (Sign::Zero, _) => n2.clone(),\n", "file_path": "src/dfp.rs", "rank": 3, "score": 6.814637659703608 }, { "content": "pub mod dfp {\n\n\n\n use serde::{Deserialize, Serialize};\n\n\n\n #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]\n\n pub enum Sign {\n\n Positive,\n\n Negative,\n\n Zero\n\n }\n\n\n\n #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]\n\n pub struct DFP {\n\n pub amount : Vec<u8>,\n\n pub exp : i8,\n\n pub sign : Sign\n\n }\n\n\n\n /* Given a &DFP return a new DFP that is the absolute value of the original. */\n\n pub fn dfp_abs(dfp :&DFP) -> DFP {\n", "file_path": "src/dfp.rs", "rank": 4, "score": 6.387833466890703 }, { "content": " dfp_add1(n2, n1)\n\n },\n\n\n\n };\n\n dfp_norm(ret_val)\n\n }\n\n\n\n /* Given two DFP of different signs, return their sum. This is a special purpose function\n\n that exists solely to support dfp_add and has no wider general purpose use. The reasoning\n\n for this function's existence is that the addition of two DFP with different signs is required\n\n more than once in dfp_add and is thus factored out here.\n\n */\n\n fn dfp_add1( n1 : DFP, n2 : DFP) -> DFP {\n\n\n\n // compare |n1| and |n2|\n\n let cmp = md_compare(&n1.amount, &n2.amount);\n\n\n\n // If |n1| == |n2|, given that the signs are different, these add up to zero.\n\n if cmp == 0 {\n\n return DFP { amount: vec![], exp: 0, sign: Sign::Zero }\n", "file_path": "src/dfp.rs", "rank": 5, "score": 6.312379277851029 }, { "content": " };\n\n\n\n // |n1| != |n2| so one of them must be the larger one. We to return a DFP that contains the difference between\n\n // |larger| - |smaller| with a suitable sign.\n\n\n\n if cmp == -1 {\n\n let diff = md_sub(&n2.amount, &n1.amount);\n\n DFP { amount: *diff, exp: n1.exp, sign: Sign::Negative }\n\n } else { // cmp > 0\n\n let diff = md_sub(&n1.amount, &n2.amount);\n\n DFP { amount: *diff, exp: n1.exp, sign: Sign::Positive }\n\n }\n\n }\n\n\n\n /*\n\n Normalize a DFP. Given a DFP A, return a new DFP B that represents the same number such that the significand of DFP B has no LSD zeros.\n\n\n\n For example: [1], 3, [0,1], 2 and [0,0,1], 1 all represent 1000, but let's use the first choice as the normalized value.\n\n */\n\n fn dfp_norm( dfp : DFP) -> DFP {\n", "file_path": "src/dfp.rs", "rank": 6, "score": 6.257060151184627 }, { "content": "//use crate::dfpx::DFPx;\n\nuse crate::dfp::dfp::{DFP, Sign, dfp_add, dfp_from_string_exp};\n\nuse rocket::get;\n\nuse rocket::http::{RawStr, Status};\n\nuse rocket_contrib::json;\n\nuse std::collections::HashMap;\n\n\n\n/*\n\nGiven a comma delimited list of category_id, find all the distributions related to all accounts tagged as _all_ of the given categories, optionally filtered by time, and calculate and return the sum of the distributions for each particular account. Recall that the returned sums will be expressed using a decimal floating point format.\n\n\n\nA list of categories of one entry is merely a special case of the above, without a comma, and should work the same way.\n\n\n\nGiven an optional boolean decorate param, return extra decorative related fields such as account title and currency symbol.\n\n\n\nGiven an optional time_stop parameter, filter the above distributions such that\n\ndistribution.time <= time_stop before computing the sum.\n\n\n\nGiven an optional time_start parameter, filter the above distributions such that\n\ntime_start <= distribution.time before computing the sum.\n\n\n", "file_path": "src/routz/get_category_dist_sums.rs", "rank": 7, "score": 5.697527133318164 }, { "content": "//use crate::dfpx::DFPx;\n\nuse crate::dfp::dfp::{DFP, Sign, dfp_add, dfp_from_string_exp};\n\nuse rocket::get;\n\nuse rocket::http::{RawStr, Status};\n\nuse rocket_contrib::json;\n\n\n\n/*\n\nGiven an account_id, find all the distributions related to it, optionally filtered by time, and calculate and return their sum. Recall that the returned sum will be expressed using a decimal floating point format.\n\n\n\nGiven an optional time_stop parameter, filter the above distributions such that\n\ndistribution.time <= time_stop before computing the sum.\n\n\n\nGiven an optional time_start parameter, filter the above distributions such that\n\ntime_start <= distribution.time before computing the sum.\n\n\n\nOmitting both time_* params gives us a very simple call to find the balance _right now_!\n\n\n\nSetting a time_stop param let's us compute a balance as of a certain time and is what we do for\n\nbalance sheet items.\n\n\n\nSetting both time_* params gives us the change in balance during a time period and is what we do for\n\n income statement items.\n\n\n\nSetting only time_start doesn't seem real useful, but I'm sure somebody can find a need for doing this.\n\n */\n\n#[get(\"/account_dist_sum?<apikey>&<account_id>&<time_start>&<time_stop>\")]\n", "file_path": "src/routz/get_account_dist_sum.rs", "rank": 8, "score": 5.373949462017219 }, { "content": "\n\n //-- Positive + Positive\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![2], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![3], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1, 2], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![2, 2], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![2, 1], exp: -1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![2, 4], exp: -1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: -1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![9, 5], exp: -1, sign: Sign::Positive };\n", "file_path": "tests/dfp.rs", "rank": 9, "score": 5.128339245999905 }, { "content": " (_, Sign::Zero) => n1.clone(),\n\n\n\n // If both arguments are Positive just add them for a Positive sum.\n\n (Sign::Positive, Sign::Positive) => DFP {\n\n //amount: md_add( n1.amount, n2.amount, 0), exp: n1.exp, sign: Sign::Positive\n\n amount: md_add( n1.amount, n2.amount), exp: n1.exp, sign: Sign::Positive\n\n },\n\n\n\n // If both arguments are Negative return - (|a| + |b|)\n\n (Sign::Negative, Sign::Negative) => DFP {\n\n //amount: md_add( n1.amount, n2.amount, 0 ), exp: n1.exp, sign: Sign::Negative\n\n amount: md_add( n1.amount, n2.amount), exp: n1.exp, sign: Sign::Negative\n\n },\n\n\n\n // If the signs of the arguments are different...\n\n (Sign::Positive, Sign::Negative) => {\n\n dfp_add1(n1, n2)\n\n },\n\n\n\n (Sign::Negative, Sign::Positive) => {\n", "file_path": "src/dfp.rs", "rank": 10, "score": 5.110477408400406 }, { "content": " output = DFP{ amount: vec![2], exp: 1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![2,1], exp: -1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![8, 1], exp: -1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: -1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![9, 3], exp: -1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![8, 4], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: 1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n", "file_path": "tests/dfp.rs", "rank": 11, "score": 5.109093188590883 }, { "content": " input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![2], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![4], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![3], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![2], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![5], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1, 2], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![2, 2], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n", "file_path": "tests/dfp.rs", "rank": 12, "score": 5.109093188590883 }, { "content": " input2 = DFP { amount: vec![2], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![1], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1,2], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![2], exp: 1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1, 2], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![2], exp: 1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![2, 1], exp: -1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![8, 1], exp: -1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: -1, sign: Sign::Negative };\n", "file_path": "tests/dfp.rs", "rank": 13, "score": 5.109093188590883 }, { "content": " output = DFP{ amount: vec![], exp: 0, sign: Sign::Zero };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![2], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![3], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![2], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![1], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1,2], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![2], exp: 1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1,2], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n", "file_path": "tests/dfp.rs", "rank": 14, "score": 5.109093188590883 }, { "content": " input1 = DFP { amount: vec![1, 2], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![2, 2], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![2, 1], exp: -1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![2, 4], exp: -1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: -1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![9, 5], exp: -1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![5], exp: 1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n", "file_path": "tests/dfp.rs", "rank": 15, "score": 5.109093188590883 }, { "content": " input1 = DFP { amount: vec![9, 4], exp: 1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output = DFP{ amount: vec![1, 9, 4], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: 1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: -1, sign: Sign::Negative };\n\n output = DFP{ amount: vec![1, 0, 9, 4], exp: -1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n // other\n\n input1 = DFP { amount: vec![9, 0, 0, 0, 0 ,0, 0, 8, 4, 1, 2], exp: -8, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: -8, sign: Sign::Positive };\n\n output = DFP { amount: vec![1, 0, 0, 0 ,0, 0, 8, 4, 1, 2], exp: -7, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n}\n\n\n", "file_path": "tests/dfp.rs", "rank": 16, "score": 5.090095851152552 }, { "content": " input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![9, 3], exp: -1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![8, 4], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: 1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![9, 8, 4], exp: 0, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9, 4], exp: 1, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: -1, sign: Sign::Positive };\n\n output = DFP{ amount: vec![9, 9, 8, 4], exp: -1, sign: Sign::Negative };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n //-- Negative + Negative\n", "file_path": "tests/dfp.rs", "rank": 17, "score": 5.085870810343334 }, { "content": " assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![5], exp: 1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: 1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![1, 9, 4], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: 1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: -1, sign: Sign::Positive };\n\n output = DFP{ amount: vec![1, 0, 9, 4], exp: -1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n //-- Positive + Negative\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n", "file_path": "tests/dfp.rs", "rank": 18, "score": 5.085870810343334 }, { "content": " output = DFP{ amount: vec![9, 8, 4], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![9,4], exp: 1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: -1, sign: Sign::Negative };\n\n output = DFP{ amount: vec![9, 9, 8, 4], exp: -1, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n //-- Negative + Positive\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![], exp: 0, sign: Sign::Zero };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Positive };\n\n output = DFP{ amount: vec![2], exp: 0, sign: Sign::Positive };\n\n assert_eq!( dfp_add(input1, input2), output);\n\n\n\n input1 = DFP { amount: vec![3], exp: 0, sign: Sign::Negative };\n", "file_path": "tests/dfp.rs", "rank": 19, "score": 5.085870810343334 }, { "content": " input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n assert_eq!((output1, output2),dfp_norm_exp(input1,input2));\n\n\n\n // n1.exp > n2.exp\n\n input1 = DFP { amount: vec![1], exp: 1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n output1 = DFP { amount: vec![0,1], exp: 0, sign: Sign::Positive };\n\n output2 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n\n assert_eq!((output1, output2),dfp_norm_exp(input1,input2));\n\n\n\n // n1.exp < n2.exp\n\n input1 = DFP { amount: vec![2], exp: 0, sign: Sign::Negative };\n\n input2 = DFP { amount: vec![1], exp: 1, sign: Sign::Positive };\n\n output1 = DFP { amount: vec![2], exp: 0, sign: Sign::Negative };\n\n output2 = DFP { amount: vec![0,1], exp: 0, sign: Sign::Positive };\n\n assert_eq!((output1, output2),dfp_norm_exp(input1,input2));\n\n\n\n // 1.2, 3\n", "file_path": "src/dfp.rs", "rank": 20, "score": 5.0652277506327055 }, { "content": "\n\n #[test]\n\n fn dfp_norm_test() {\n\n\n\n let mut input;\n\n let mut output;\n\n\n\n input = DFP { amount: vec![], exp: 0, sign: Sign::Positive };\n\n output = DFP { amount: vec![], exp: 0, sign: Sign::Zero };\n\n assert_eq!(dfp_norm(input), output);\n\n\n\n input = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n output = DFP { amount: vec![1], exp: 0, sign: Sign::Positive };\n\n assert_eq!(dfp_norm(input), output);\n\n\n\n input = DFP { amount: vec![0, 1], exp: 0, sign: Sign::Positive };\n\n output = DFP { amount: vec![1], exp: 1, sign: Sign::Positive };\n\n assert_eq!(dfp_norm(input), output);\n\n }\n\n\n", "file_path": "src/dfp.rs", "rank": 21, "score": 4.964089711724742 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse bookwerx_core_rust::routes as R;\n\nuse bookwerx_core_rust::routz as Z;\n\n\n\nuse rocket::config::{Config, Environment};\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\nuse std::collections::HashMap;\n\n\n\n#[test]\n", "file_path": "tests/kahuna-grande.rs", "rank": 22, "score": 4.921888475737138 }, { "content": " input1 = DFP { amount: vec![2, 1], exp: -1, sign: Sign::Positive };\n\n input2 = DFP { amount: vec![3], exp: 0, sign: Sign::Positive };\n\n output1 = DFP { amount: vec![2, 1], exp: -1, sign: Sign::Positive };\n\n output2 = DFP { amount: vec![0, 3], exp: -1, sign: Sign::Positive };\n\n assert_eq!((output1, output2),dfp_norm_exp(input1,input2));\n\n }\n\n\n\n pub fn dfp_from_string_exp(s :&String, exp :i8) -> DFP {\n\n\n\n // 1. Given the input s, produce (neg_sign :bool, s1 :&String) where neg_sign ==\n\n // true if s starts with \"-\" or otherwise false, and s1 is the remaining s, after\n\n // said \"-\" is removed, if any.\n\n let l = s.len();\n\n\n\n let n1 = if l == 0 {\n\n \"\"\n\n } else {\n\n &s[0..1]\n\n };\n\n\n", "file_path": "src/dfp.rs", "rank": 23, "score": 4.880214568999576 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n", "file_path": "tests/currencies.rs", "rank": 24, "score": 4.832033984471209 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n", "file_path": "tests/transactions.rs", "rank": 25, "score": 4.832033984471209 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n// Examine accounts\n", "file_path": "tests/accounts.rs", "rank": 26, "score": 4.790878067232663 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n// Examine categories\n", "file_path": "tests/categories.rs", "rank": 27, "score": 4.790878067232663 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n// Examine acctcats\n", "file_path": "tests/acctcats.rs", "rank": 28, "score": 4.790878067232663 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n// Examine trancats\n", "file_path": "tests/trancats.rs", "rank": 29, "score": 4.790878067232663 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n", "file_path": "tests/linter.rs", "rank": 30, "score": 4.737631129050905 }, { "content": " assert_eq!(dfp_from_string_exp(&(\"-1\".to_string()), 5), output);\n\n\n\n output = DFP{ amount: vec![2,9,7,9,8,5,3,5,6,2,9,5,1,4,1,3], exp: 0, sign: Sign::Positive };\n\n assert_eq!(dfp_from_string_exp(&(\"3141592653589792\".to_string()), 0), output);\n\n\n\n output = DFP{ amount: vec![2,6,7,8,4,0,8,0,8,1,0,2], exp: -8, sign: Sign::Positive };\n\n assert_eq!(dfp_from_string_exp(&(\"201808048762\".to_string()), -8), output);\n\n\n\n output = DFP{ amount: vec![5], exp: 2, sign: Sign::Positive };\n\n assert_eq!(dfp_from_string_exp(&(\"500\".to_string()), 0), output);\n\n\n\n output = DFP{ amount: vec![5], exp: 2, sign: Sign::Negative };\n\n assert_eq!(dfp_from_string_exp(&(\"-500\".to_string()), 0), output);\n\n}\n", "file_path": "tests/dfp.rs", "rank": 31, "score": 4.729819837087241 }, { "content": "use crate::db::MyRocketSQLConn;\n\n\n\nuse crate::sql::{ensure_select_statement, execute_query, validate_columns, validate_tables};\n\nuse rocket::http::RawStr;\n\nuse rocket::State;\n\nuse rocket_contrib::json::JsonValue;\n\nuse std::collections::{HashMap, HashSet};\n\n\n\n#[rocket::get(\"/sql?<query>&<apikey>\")]\n", "file_path": "src/routz/sql.rs", "rank": 32, "score": 4.7232749053949 }, { "content": " let ln = dfp.amount.len();\n\n if ln == 0 {\n\n DFP { amount: vec![], exp: 0, sign: Sign::Zero }\n\n } else if ln == 1 {\n\n if dfp.amount[0] == 0 {\n\n DFP { amount: vec![], exp: 0, sign: Sign::Zero }\n\n } else {\n\n dfp.clone()\n\n }\n\n } else { // ln > 1\n\n if dfp.amount[0] == 0 {\n\n let mut v1 = dfp.amount.clone();\n\n let _ = v1.remove(0);\n\n let new_dfp = DFP { amount: v1, exp: dfp.exp + 1, sign: dfp.sign };\n\n dfp_norm( new_dfp)\n\n } else {\n\n dfp.clone()\n\n }\n\n }\n\n }\n", "file_path": "src/dfp.rs", "rank": 33, "score": 4.671657522847696 }, { "content": "use rocket::get;\n\nuse rocket::http::{RawStr, Status};\n\nuse rocket_contrib::json;\n\n\n\n#[get(\"/linter/categories?<apikey>\")]\n", "file_path": "src/routz/get_linter_categories.rs", "rank": 34, "score": 4.65914646745707 }, { "content": "use bookwerx_core_rust::db as D;\n\n\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n// Now try to delete things. Ensure that referential integrity constraints prevent inappropriate deletions.\n", "file_path": "tests/deletor.rs", "rank": 35, "score": 4.633034168441781 }, { "content": "use bookwerx_core_rust::db::PostApikeysResponse;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n// Get an API key\n", "file_path": "tests/apikey.rs", "rank": 36, "score": 4.6082522395600485 }, { "content": "use rocket::get;\n\nuse rocket::http::{RawStr, Status};\n\nuse rocket_contrib::json;\n\n\n\n// Find unused currencies.\n\n#[get(\"/linter/currencies?<apikey>\")]\n", "file_path": "src/routz/get_linter_currencies.rs", "rank": 37, "score": 4.5832198183252535 }, { "content": "use rocket::get;\n\nuse rocket::http::{RawStr, Status};\n\nuse rocket_contrib::json;\n\n\n\n// Find unused accounts\n\n#[get(\"/linter/accounts?<apikey>\")]\n", "file_path": "src/routz/get_linter_accounts.rs", "rank": 38, "score": 4.5832198183252535 }, { "content": "use bookwerx_core_rust::db as D;\n\nuse rocket::http::ContentType;\n\nuse rocket::http::Status;\n\nuse rocket::local::Client;\n\n\n\n/* Please see the comments for transactions for a discussion of constraints in this test.\n\nSpecifically don't change the amounts.\n\n\n\n These tests include testing the correct number of distributions for_account and for_tx\n\n*/\n", "file_path": "tests/distributions.rs", "rank": 39, "score": 4.485259413383994 }, { "content": "use crate::db::{\n\n APIResponse, Distribution, DistributionJoined, DistributionShort,\n\n GetDistributionJoinedResponse, GetDistributionResponse, MyRocketSQLConn,\n\n};\n\nuse regex::Regex;\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/distribution/<id>?<apikey>\")]\n", "file_path": "src/routz/distribution.rs", "rank": 40, "score": 4.4674477597669675 }, { "content": " // 4. Invoke the filter and map producing a Vec<u8> of the digits we want, in the correct order.\n\n let s4 :Vec<u8> = s3.rev().collect();\n\n let s4_len = s4.len();\n\n\n\n // 5. Produce the final result, normalize it, and return it.\n\n let ret_val = if s4_len == 0 {\n\n DFP { amount: vec![], exp: 0, sign: Sign::Zero }\n\n } else if neg_sign {\n\n DFP { amount: s4, exp: exp, sign: Sign::Negative }\n\n } else {\n\n DFP { amount: s4, exp: exp, sign: Sign::Positive }\n\n };\n\n\n\n dfp_norm( ret_val )\n\n\n\n }\n\n\n\n // Given two integers of a given base, and a prior carry (or zero if none) calculate their sum modulo base and return (sum, carry)\n\n fn dnc( i1 :u8, i2 :u8, carry :u8, base :u8) -> (u8, u8) {\n\n let sm = i1 + i2 + carry;\n", "file_path": "src/dfp.rs", "rank": 41, "score": 4.411972272865159 }, { "content": "#![feature(decl_macro)]\n\n\n\npub mod constants;\n\npub mod db;\n\npub mod dfp;\n\npub mod routz;\n\npub mod sql;\n\n\n\npub mod routes {\n\n\n\n use crate::db::{MyRocketSQLConn, Ping, PostApikeysResponse, Semver};\n\n use rocket::http::ContentType;\n\n use rocket::request::Request;\n\n use rocket::response;\n\n use rocket::response::{Responder, Response};\n\n use rocket_contrib::json::Json;\n\n\n\n impl<'r> Responder<'r> for crate::db::ApiResponseOld {\n\n fn respond_to(self, req: &Request) -> response::Result<'r> {\n\n Response::build_from(self.json.respond_to(&req).unwrap())\n", "file_path": "src/lib.rs", "rank": 42, "score": 4.380000555876848 }, { "content": "use crate::db::{\n\n APIResponse, GetTransactionResponse, MyRocketSQLConn, Transaction, TransactionShort,\n\n};\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/transaction/<id>?<apikey>\")]\n", "file_path": "src/routz/transaction.rs", "rank": 43, "score": 4.36959565448537 }, { "content": "use crate::db::{APIResponse, Trancat, TrancatShort, GetTrancatResponse, MyRocketSQLConn};\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/trancat/<id>?<apikey>\")]\n", "file_path": "src/routz/trancat.rs", "rank": 44, "score": 4.36959565448537 }, { "content": "use crate::db::{APIResponse, Currency, CurrencyShort, GetCurrencyResponse, MyRocketSQLConn};\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/currency/<id>?<apikey>\")]\n", "file_path": "src/routz/currency.rs", "rank": 45, "score": 4.36959565448537 }, { "content": "use crate::db::{APIResponse, Acctcat, AcctcatShort, GetAcctcatResponse, MyRocketSQLConn};\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/acctcat/<id>?<apikey>\")]\n", "file_path": "src/routz/acctcat.rs", "rank": 46, "score": 4.36959565448537 }, { "content": "use crate::db::{APIResponse, Category, CategoryShort, GetCategoryResponse, MyRocketSQLConn};\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/category/<id>?<apikey>\")]\n", "file_path": "src/routz/category.rs", "rank": 47, "score": 4.36959565448537 }, { "content": "use crate::db::{\n\n APIResponse, Account, AccountDenormalized, AccountJoined, AccountShort, GetAccountResponse,\n\n MyRocketSQLConn,\n\n};\n\nuse rocket::http::RawStr;\n\nuse rocket_contrib::json::Json;\n\n\n\n#[rocket::delete(\"/account/<id>?<apikey>\")]\n", "file_path": "src/routz/account.rs", "rank": 48, "score": 4.280914022619302 }, { "content": " // We use a HashMap because we'll soon need easy access to these values, given a key.\n\n let mut hm = HashMap::new();\n\n\n\n // 3.3 If we have requested decorations we will eventually need an \"in clause\" of account ids to work with. It's tempting to build that into this loop now. Resist the urge. Doing so makes this needlessly complicated. It's simple and fast enough to build the in_clause separately.\n\n //let mut sum: DFPx = DFPx { amount: 0, exp: 0 };\n\n let mut sum: DFP = DFP { amount: vec![], exp: 0, sign: Sign::Zero };\n\n\n\n let mut prior_account_id = 0;\n\n for v in vec {\n\n if v.account_id != prior_account_id {\n\n // This is the first record of a new account_id\n\n if prior_account_id == 0 {\n\n\n\n // This is the very first time in the loop. Nothing to do yet.\n\n } else {\n\n // This is the first element that has a new account_id. Save the sum from the prior account_id\n\n hm.insert(\n\n prior_account_id,\n\n crate::db::AcctSum {\n\n account_id: prior_account_id,\n", "file_path": "src/routz/get_category_dist_sums.rs", "rank": 49, "score": 4.265349283859866 }, { "content": " } else {\n\n let mut v1 :Vec<u8> = vec![0];\n\n let mut v2 :Vec<u8> = n2.amount.clone();\n\n v1.append(&mut v2);\n\n\n\n let new_n2 = DFP { amount: v1, exp: n2.exp - 1, sign: n2.sign };\n\n dfp_norm_exp( n1, new_n2)\n\n }\n\n }\n\n\n\n #[test]\n\n fn dfp_norm_exp_test() {\n\n\n\n let mut input1;\n\n let mut input2;\n\n let mut output1;\n\n let mut output2;\n\n\n\n // n1.exp == n2.exp\n\n input1 = DFP { amount: vec![1], exp: 0, sign: Sign::Negative };\n", "file_path": "src/dfp.rs", "rank": 51, "score": 4.173712465871555 }, { "content": "use crate::dfp::dfp::DFP;\n\nuse rocket::http::Status;\n\nuse rocket::request::FromForm;\n\nuse rocket_contrib::database;\n\nuse rocket_contrib::databases::mysql;\n\nuse rocket_contrib::json::JsonValue;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[database(\"mysqldb\")]\n\npub struct MyRocketSQLConn(mysql::Conn);\n\n\n\n/*\n\nWe have a blizzard of structs for a variety of reasons. Unfortunately, it's rather tedious to\n\nmanage them. Please allow me to enumerate the problems and solutions.\n\n\n\nSome of the structs are substantially similar to the underlying db row that they model. Naming them is reasonably easy. However, many structs have \"decorations\". That is, account names, currency symbols, and other similar related info. Naming these is not easy, especially since we tend to many similar variations.\n\n\n\nIn addition, these structs also have a variety of derives. These derives have accumulated over the eons and there's no good way to determine if any of them are unused. Granted, an unused derive does not have significant consequence. But it does offend my finely honed sense of aesthetics and that's reason enough to fret about this.\n\n\n\nWhat we have here now is my one-time pass over all of this to cleanup whatever chaos lurks within. It's the best I can reasonably do with this foul situation.\n", "file_path": "src/db.rs", "rank": 52, "score": 4.084537306059246 }, { "content": "use std::boxed::Box;\n\n\n\n#[cfg(test)]\n\nuse std::collections::{HashMap, HashSet};\n\n\n\n// Given an API key and a table.field, build a ConditionExpression that is\n\n// suitable for inclusion in a WHERE clause\n", "file_path": "src/sql.rs", "rank": 53, "score": 4.065193591698012 }, { "content": " ))\n\n .header(ContentType::Form)\n\n .dispatch();\n\n\n\n assert_eq!(response.status(), Status::Ok);\n\n match serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap() {\n\n D::APIResponse::Error(err) => assert_eq!(err, \"amountbt contains one or more non-numeric characters.\"),\n\n _ => assert!(false),\n\n }\n\n\n\n // 3.2 Successful put, wrong and negative amount. Test that we can use a - sign.\n\n response = client\n\n .put(\"/distributions\")\n\n .body(format!(\n\n \"&apikey={}&id={}&account_id={}&transaction_id={}&amount=-3&amount_exp=0&amountbt=-3\",\n\n apikey, lid, account_id0, transaction_id0\n\n ))\n\n .header(ContentType::Form)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n", "file_path": "tests/distributions.rs", "rank": 54, "score": 3.9600116479258856 }, { "content": " ))\n\n .dispatch();\n\n r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap();\n\n assert_eq!(response.status(), Status::Ok);\n\n assert_eq!(r.sum, DFP { amount: vec![2, 1], exp: 0, sign: Sign::Positive });\n\n\n\n // 2.2 time_start\n\n response = client\n\n .get(format!(\n\n \"/account_dist_sum?apikey={}&account_id={}&time_start=2020-12\",\n\n &apikey,\n\n (accounts.get(0).unwrap()).id\n\n ))\n\n .dispatch();\n\n r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap();\n\n assert_eq!(response.status(), Status::Ok);\n\n assert_eq!(r.sum, DFP { amount: vec![9], exp: 0, sign: Sign::Positive });\n\n\n\n // 2.3 time_stop\n\n response = client\n", "file_path": "tests/account_dist_sum.rs", "rank": 56, "score": 3.9554342442158505 }, { "content": "use assert_cmd::prelude::*;\n\nuse bookwerx_core_rust::constants as C;\n\nuse predicates::prelude::*;\n\nuse std::process::Command;\n\n\n\n/*\n\nThese tests should be run one at a time so be sure to set RUST_TEST_THREADS=1 when executing the tests. For example:\n\n\n\nRUST_BACKTRACE=1 RUST_TEST_THREADS=1 cargo test --test server_config\n\n\n\nTest that we can provide the correct configuration via a mixture of command-line and the environment. Other configuration is frequently needed in order to enable the server to proceed to the behavior under test.\n\n\n\n1.1 If neither --bind_ip nor BCR_BIND_IP are specified, the server will complain and exit. We _must have_ an IP address for the Rocket server to use or there's nothing else to do.\n\n\n\n1.2 If either one of --bind_ip or BCR_BIND_IP are specified, the startup message will mention it. But the server will terminate with an error because other subsequent configuration is missing.\n\n\n\n1.3 If both --bind_ip and BCR_BIND_IP are specified, the startup message mentions the value from --bind_ip. But the server will terminate with an error because other subsequent configuration is missing.\n\n\n\n\n\n2.1 If neither --bind_port nor BCR_BIND_PORT are specified, the server will complain and exit. We _must have_ a port for the Rocket server to use or there's nothing else to do.\n", "file_path": "tests/server_config.rs", "rank": 59, "score": 3.798321029622083 }, { "content": " .get(format!(\n\n \"/account_dist_sum?apikey={}&account_id={}&time_stop=2020-12\",\n\n &apikey,\n\n (accounts.get(0).unwrap()).id\n\n ))\n\n .dispatch();\n\n r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap();\n\n assert_eq!(response.status(), Status::Ok);\n\n assert_eq!(r.sum, DFP { amount: vec![7], exp: 0, sign: Sign::Positive });\n\n\n\n // 2.4 time_start and time_stop\n\n response = client\n\n .get(format!(\n\n \"/account_dist_sum?apikey={}&account_id={}&time_start=2020-12&time_stop=2020-12\",\n\n &apikey,\n\n (accounts.get(0).unwrap()).id\n\n ))\n\n .dispatch();\n\n r = serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap();\n\n assert_eq!(response.status(), Status::Ok);\n\n assert_eq!(r.sum, DFP { amount: vec![4], exp: 0, sign: Sign::Positive });\n\n}\n", "file_path": "tests/account_dist_sum.rs", "rank": 60, "score": 3.7588525084768785 }, { "content": "* Ensure that all of the fields requested are fully qualified using the table.field syntax and that said tables and fields are present in a white-list.\n\n\n\nFinally, the /sql route handler accepts an apikey as a separate parameter and we use that to modify the AST to do our own SQL injection of \"WHERE apikey = ? AND (any original where clause)\"\n\n\n\nThe power of this idea comes not from our primitive initial implementation. Instead, we provide a general framework for parsing, and examining and modifying the resulting AST. Using this starting point, it would be rather easy to extend.\n\n\n", "file_path": "README.md", "rank": 61, "score": 3.7160579032007384 }, { "content": "use assert_cmd::prelude::*;\n\nuse bookwerx_core_rust::constants as C;\n\nuse predicates::prelude::*;\n\nuse std::process::Command;\n\n\n\n/*\n\nThese tests should be run one at a time so be sure to set RUST_TEST_THREADS=1 when executing the tests. For example:\n\n\n\nRUST_BACKTRACE=1 RUST_TEST_THREADS=1 cargo test --test dbseed\n\n\n\nSome of these tests require access to a suitably configured mysql db.\n\n\n\n\n\n1.1 If neither --conn nor BCR_CONN are specified, the server will complain and exit. We _must have_ a connection string or there's nothing else to do.\n\n\n\n1.2 If either one of --conn or BCR_CONN are specified, the startup message will mention it. But the server will terminate with an error because other subsequent configuration is missing.\n\n\n\n1.3 If both --conn and BCR_CONN are specified, the startup message mentions the value from --conn. But the server will terminate with an error because other subsequent configuration is missing.\n\n\n\n\n", "file_path": "tests/dbseed.rs", "rank": 62, "score": 3.6906685577086247 }, { "content": " .body(format!(\n\n \"apikey={}&transaction_id={}&account_id={}&amount=3&amount_exp=0&amountbt=non-numeric\",\n\n apikey, transaction_id1, account_id1\n\n ))\n\n .header(ContentType::Form)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n match serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap() {\n\n D::APIResponse::Error(err) => assert_eq!(err, \"amountbt contains one or more non-numeric characters.\"),\n\n _ => assert!(false),\n\n }\n\n\n\n // We will have opportunity to POST using a minus sign later, don't test that here.\n\n\n\n // 2.3 Successful post. The amount is wrong, but we will fix it in a subsequent put.\n\n response = client\n\n .post(\"/distributions\")\n\n .body(format!(\n\n \"&apikey={}&transaction_id={}&account_id={}&amount=33&amount_exp=0&amountbt=33\",\n\n apikey, transaction_id0, account_id0\n", "file_path": "tests/distributions.rs", "rank": 63, "score": 3.676494675234702 }, { "content": "## Configuration\n\n\n\nThe binaries of **bookwerx-core-rust** do not do anything by default. If you want them to do anything useful, you'll need to ensure that they get the correct configuration options. You can deliver said options via the command line or the environment.\n\n\n\nAs described above, execute **server** or **dbseed** with the --help option to see the CLI choices. Each option has a corresponding environment variable.\n\n\n\n**dbseed** Uses the following environment variables. Each of these have a corresponding CLI option:\n\n\n\nBCR_CONN - A connection string to connect to the MySQL db. For example: mysql://root:supersecretpassword@172.17.0.2:3306\n\nNotice that there is no trailing \\ nor a database name.\n\n\n\nBCR_DBNAME - The name of the database to use.\n\n\n\nBCR_SEED - A file name for a file that contains SQL that will initialize the db. If this is present the db will be brain-wiped and reseeded.\n\n\n\n**server** Uses all of the above, except for BCR_SEED. In addition, it also uses:\n\n\n\nBCR_BIND_IP - An IP address for the http server to bind to.\n\n\n\nBCR_BIND_PORT - A port for the http server to bind to.\n\n\n\nBCR_MODE - Run the server in whatever mode. This is presently required, but unused.\n\n\n\n\n\n### Rocket\n\n\n\n**bookwerx-core-rust** uses Rocket as the http server, but it programmatically configures Rocket, so no other Rocket configuration is needed.\n\n\n\n### MySQL\n\n\n\n**bookwerx-core-rust** uses MySQL for the db. [This is configured separately.](https://dev.mysql.com)\n\n\n\nAlthough **bookwerx-core-rust** is able to drop and rebuild the db from an initial seed, this is a minimal thing. There are a variety of settings that you might want to tweak, such as character sets and collation, but the reseeding process does not deal with any of that. So you may need to examine the configuration of your MySQL server to get the particular settings that you want.\n\n\n", "file_path": "README.md", "rank": 64, "score": 3.666187673010524 }, { "content": " sum,\n\n },\n\n );\n\n }\n\n prior_account_id = v.account_id;\n\n //sum = DFPx { amount: v.amount, exp: v.amount_exp };\n\n //sum = DFPx { amount: 0, exp: 0 };\n\n sum = DFP { amount: vec![], exp: 0, sign: Sign::Zero };\n\n\n\n }\n\n\n\n // This records account_id is the same as the prior record, so just add the values\n\n //sum = sum.add(&DFPx {\n\n //amount: v.amount,\n\n //exp: v.amount_exp,\n\n //});\n\n sum = dfp_add( sum, dfp_from_string_exp(&v.amountbt, v.amount_exp) );\n\n\n\n }\n\n\n", "file_path": "src/routz/get_category_dist_sums.rs", "rank": 65, "score": 3.349188018459895 }, { "content": "// RUST_BACKTRACE=1 RUST_TEST_THREADS=1 cargo test --test kahuna-grande\n\n\n\n#![feature(proc_macro_hygiene, decl_macro)]\n\n#[macro_use]\n\nextern crate rocket;\n\n\n\nmod account_dist_sum;\n\nmod accounts;\n\nmod acctcats;\n\nmod apikey;\n\nmod categories;\n\nmod category_dist_sums;\n\nmod currencies;\n\nmod deletor;\n\nmod distributions;\n\nmod linter;\n\nmod trancats;\n\nmod transactions;\n\n\n\nuse bookwerx_core_rust::constants as C;\n", "file_path": "tests/kahuna-grande.rs", "rank": 66, "score": 3.321121693021136 }, { "content": "\n\n #[rocket::post(\"/apikeys\")]\n\n pub fn post_apikey(mut conn: MyRocketSQLConn) -> Json<PostApikeysResponse> {\n\n use rand::distributions::Alphanumeric;\n\n use rand::{thread_rng, Rng};\n\n\n\n let rand_string: String = thread_rng().sample_iter(&Alphanumeric).take(10).collect();\n\n\n\n match conn.prep_exec(\n\n format!(\"INSERT INTO apikeys (apikey) VALUES ('{}')\", rand_string),\n\n (),\n\n ) {\n\n Ok(_) => Json(PostApikeysResponse::Apikey(rand_string)),\n\n Err(err) => Json(PostApikeysResponse::Error(err.to_string())),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 67, "score": 3.1893062654206967 }, { "content": " crate::db::BalanceResultBt {\n\n account_id,\n\n amountbt,\n\n amount_exp,\n\n }\n\n })\n\n .collect()\n\n })\n\n .unwrap();\n\n\n\n // We now have zero or more records to sum.\n\n //let mut sum: DFPx = DFPx { amount: 0, exp: 0 };\n\n let mut sum: DFP = DFP { amount: vec![], exp: 0, sign: Sign::Zero };\n\n\n\n for n in vec {\n\n //sum = sum.add(&DFPx { amount: n.amount, exp: n.amount_exp, });\n\n sum = dfp_add( sum, dfp_from_string_exp(&n.amountbt, n.amount_exp) );\n\n }\n\n\n\n\n\n\n\n\n\n // Now build and return the http response.\n\n crate::db::ApiResponseOld {\n\n json: json!({ \"sum\": sum }),\n\n status: Status::Ok,\n\n }\n\n}\n", "file_path": "src/routz/get_account_dist_sum.rs", "rank": 68, "score": 3.060101528993702 }, { "content": " let (neg_sign, s1) = if n1 == \"-\" {\n\n (true, s)\n\n } else {\n\n (false, s)\n\n };\n\n\n\n // 2. Build a filter to remove all non-numeric characters from s1\n\n let s2 = s1.chars().filter(|x|\n\n match *x {\n\n '0' => true,\n\n '1' => true,\n\n '2' => true,\n\n '3' => true,\n\n '4' => true,\n\n '5' => true,\n\n '6' => true,\n\n '7' => true,\n\n '8' => true,\n\n '9' => true,\n\n _ => false,\n", "file_path": "src/dfp.rs", "rank": 69, "score": 3.028772520490463 }, { "content": "## The /sql endpoint\n\n\n\nA few examples to get you started:\n\n```http request\n\nhttp://localhost:3003/sql?query=SELECT accounts.id FROM accounts&apikey=catfood\n\nhttp://localhost:3003/sql?query=SELECT accounts.id, currencies.id, currencies.title FROM accounts JOIN currencies ON accounts.currency_id %3d currencies.id&apikey=catfood\n\nhttp://localhost:3003/sql?query=SELECT accounts_categories.account_id FROM accounts_categories JOIN accounts ON accounts.id%3daccounts_categories.account_id JOIN currencies ON currencies.id%3daccounts_categories.currency_id WHERE accounts_categories.category_id IN (1155, 1165) AND currencies.symbol%3d%22BTC%22 GROUP BY accounts_categories.account_id HAVING COUNT(DISTINCT accounts_categories.category_id)%3d2&apikey=catfood\n\n```\n\nPlease notice that we must %encode the equal sign in the query string as %3d.\n\n\n\nPlease also be aware that in more complex queries you may have parsing problems related to spaces around parentheses.\n\n\n\nBecause of the woes associated with both ordinary REST and GraphQL we have implemented the /sql endpoint whereby the client can submit a string of SQL and get back expected results. This feature is not for the faint-of-hearted because it requires knowledge of SQL generally as well as the underlying MySQL schema. Not to mention the risk of SQL injection attacks. Nevertheless, there are several safety features in place that can help you control this risk.\n\n\n\nFirst of all, we parse any SQL using [nom-sql](https://github.com/ms705/nom-sql), which is a full-blown SQL parser. An excellent choice for all your SQL parsing needs in general.\n\n\n\nNext, we examine the abstract syntax tree (AST) that the parser returns in order to:\n\n \n\n* Ensure that the query is only a SELECT statement.\n\n\n", "file_path": "README.md", "rank": 73, "score": 2.8609851556011283 }, { "content": "## REST vs GraphQL\n\n\n\nIn this project we have a variety of choices for communication between clients and this server. Two obvious choices are a RESTful API or GraphQL. We have evaluated both choices. Although GraphQL is an interesting contender, after the smoke settled, only the RESTful API emerged from Thunderdome.\n\n\n\nOne major problem with RESTful APIs is that there tends to be a proliferation of endpoints and input parameters in order to accommodate real-world usage. Naming these things and managing them generally is a tedious (but tractable) exercise. These woes led us to try using GraphQL. Unfortunately doing so proved to be a disappointment. \n\n\n\nWe encountered the following general intractable issues with GraphQL:\n\n\n\n* How can we efficiently execute the GraphQL queries? Doing so requires some connection to the underlying MySQL db. And doing that requires that we translate GraphQL queries into MySQL. This is easier said than done.\n\n\n\n* The entire GraphQL ecosystem is generally too complicated for our usage. A lot of it is very sophisticated and impressive but it's generally tainted by low quality documentation. Especially the very limited products available for Rust. Going beyond contrived getting-started examples is too difficult and docs.rs style reference is just not useful. We consider temptation to dissect a product's parser in order to use it to be a red flag.\n\n\n\nWe have no wish to bash GraphQL or any of the impressive products that deal with it. We leave this note here as an archaeological relic so that our progeny and successors can (however unlikely) possibly learn from history and avoid the mistakes of their ancestors. Perhaps in their time, after their heads have been thawed and their bodies regenerated, they will have access to easier-to-use GraphQL -> SQL tooling.\n\n\n\nBut until and unless that happens, soldiering on and dealing with the admittedly tedious RESTful managerial issues is tractable and still the easiest path forward for this particular project.\n\n\n", "file_path": "README.md", "rank": 74, "score": 2.80671138222351 }, { "content": "## Getting Started\n\n\n\nThe easiest way to get started is to explore [***bookwerx-ui***](https://github.com/bostontrader/bookwerx-ui-elm). It provides an [example UI](http://94.74.116.6:3005/) that demonstrates the API interaction with ***bookwerx-core***.\n\n\n\nUsing this UI you can connect to a [publicly visible demonstration server](http://94.74.116.6:3003), request an API key for your own use, and generally put the API to work. The UI also guides you through a proper sequence of API calls. For example, you cannot define an account until you have defined the currency that said account will use. \n\n\n\n## Installation\n\n\n\n### Prerequisites\n\n\n\n* You will need rust (nightly, because that's what Rocket demands).\n\n\n\n* You will need git.\n\n\n\n* You will need mysql.\n\n\n\nThe care and feeding of these items are beyond the scope of these instructions...\n\n\n\n... but assuming they are correctly installed...\n\n\n\n```bash\n\ngit clone https://github.com/bostontrader/bookwerx-core-rust.git\n\ncd bookwerx-core-rust\n\ncargo build --release\n\ncargo run --bin dbseed -- --help\n\ncargo run --bin server -- --help\n\n```\n\nNote the syntax for the *cargo run* commands. This executes the command and feeds the command-line arg '--help' to it. Whereupon you can further dissect the operation.\n\n\n\n**dbseed** will brain-wipe your db and reseed to a minimal usable condition. For example:\n\n\n\n```bash\n\ncargo run --bin dbseed -- --conn mysql://root:supersecretpassword@172.17.0.2:3306 --dbname somedbname --seed dbseed.sql\n\n```\n\n\n\n**server** is the actual server that you're lookin' to use. The server needs to connect to a db that has been properly seeded, hence the prior step. As an example for execution of the server:\n\n\n\n```bash\n\ncargo run --bin server -- \\\n\n --bind_ip 0.0.0.0 --bind_port 8000 \\\n\n --conn mysql://root:supersecretpassword@172.17.0.2:3306 --dbname somedbname \\\n\n --mode test\n\n```\n\n\n\n(Note: --mode test is presently required, but unused. )\n\n\n", "file_path": "README.md", "rank": 75, "score": 2.785859969469598 }, { "content": " /*\n\n Suppose you want to add 1 + 0.1. As DFP they'd be represented as [1] 0 Positive and [1] -1 Positive. In\n\n order to perform the addition we'll first have to change [1] 0 to [0, 1] -1. This is still the same number\n\n but now the exponents of both DFP match and we can simply add the digits normally.\n\n\n\n This function takes two DFP n1, and n2. If the exponents are the same it merely\n\n returns (n1, n2). But if the exponents are different, it adjusts the amount and exponent of the DFP with the\n\n higher exponent by successively appending a 0 as a LSD and decrementing the exponent until the exponent matches\n\n the other DFP. This function then returns (new n1, new n2).\n\n */\n\n fn dfp_norm_exp( n1 : DFP, n2 : DFP) -> (DFP, DFP) {\n\n if n1.exp == n2.exp {\n\n (n1, n2)\n\n } else if n1.exp > n2.exp {\n\n let mut v1 :Vec<u8> = vec![0];\n\n let mut v2 :Vec<u8> = n1.amount.clone();\n\n v1.append(&mut v2);\n\n\n\n let new_n1 = DFP { amount: v1, exp: n1.exp - 1, sign: n1.sign };\n\n dfp_norm_exp( new_n1, n2)\n", "file_path": "src/dfp.rs", "rank": 76, "score": 2.562449192344696 }, { "content": "Examples:\n\n\n\nRUST_BACKTRACE=1 RUST_TEST_THREADS=1 cargo test\n\n\n\nRun all tests. This command uses a single thread, thus forcing the tests to run sequentially. We need to do this because I've been a bad boy and made tests that are not independent.\n\n\n\nRUST_BACKTRACE=1 RUST_TEST_THREADS=1 cargo test --test server_config\n\n\n\nThis runs just the server_config.rs test.\n\n\n\nIntegration tests:\n\n\n\nThe first part of integration testing is to crank up **bookwerx-core-rust** and/or **dbseed** and study the presence or absence of suitable configuration params as specified via command line args and/or environment variables.\n\n\n\nUnfortunately, we can't test these params in isolation. We want to be able to test for the presence of certain messages relating to the presence or absence of certain configuration parameters. But as the server starts, it may exit prematurely because of a variety of possible configuration errors that it encounters first. In order to test a specific configuration param, we must ensure that the rest of the configuration is sufficiently correct to enable the server to proceed to the point at which it will consider the specific configuration under test. All of this also depends upon the order of operations within the source code. Changing this will likely change the order of operations. This is a fragile nuisance, but what better way to deal with it? Suck it up buttercup and deal with it.\n\n\n", "file_path": "tests/README.md", "rank": 77, "score": 2.4663912883235595 }, { "content": "# Introduction\n\n\n\n[![Build Status](https://travis-ci.org/bostontrader/bookwerx-core-rust.png?branch=master)](https://travis-ci.org/bostontrader/bookwerx-core-rust)\n\n[![codecov](https://codecov.io/gh/bostontrader/bookwerx-core-rust/branch/master/graph/badge.svg)](https://codecov.io/gh/bostontrader/bookwerx-core-rust)\n\n[![MIT license](http://img.shields.io/badge/license-MIT-brightgreen.svg)](http://opensource.org/licenses/MIT)\n\n\n\nThe purpose of ***bookwerx-core-rust*** is to provide a primarily RESTful API that supports multi-currency\n\n bookkeeping, using the double-entry bookkeeping model, slightly adapted to squeeze \n\n in multiple currencies. \n\n \n\n ***bookwerx-core*** is written using [the rust programming language.](https://www.rust-lang.org), It uses [rocket](https://rocket.rs) as its web server with [MySQL](https://www.mysql.com) for... you know what MySQL is for.\n\n\n\nAny application that deals with \"money\" (fiat, precious metals, cryptocoins) will\n\nquickly encounter the need for bookkeeping. Rolling your own methods is, as usual,\n\n easier said than done, so perhaps you can save yourself some grief and enjoy ***bookwerx-core*** instead.\n\n\n\nWith this API, the user can:\n\n\n\n* Perform ordinary CRUD operations on the various bookkeeping objects,\n\nsuch as accounts, currencies, and transactions.\n\n\n\n* Query balance sheet and profit and loss information.\n\n\n\n* Perform linting of the bookkeeping objects.\n\n\n\nBut wait... there's more! For those who wish to live dangerously bookwerx-core offers an endpoint that will take a string of custom SQL as a query parameter and send back expected results. But before you panic and start shouting about SQL Injection attacks please take a moment to read the details of this feature. There are some safety features to reduce the risk and the general framework of providing said safety gives adventure seekers a start in either tightening, loosening, or eliminating the screws as desired.\n\n\n", "file_path": "README.md", "rank": 78, "score": 2.267537596923733 }, { "content": "Omitting both time_* params gives us a very simple call to find the balance _right now_!\n\n\n\nSetting the time_stop param let's us compute a balance as of a certain time and is what we do for\n\nbalance sheet items.\n\n\n\nSetting both time_* params gives us the change in balance during a time period and is what we do for\n\n income statement items.\n\n\n\nSetting only time_start doesn't seem real useful, but I'm sure somebody can find a need for doing this.\n\n */\n\n#[get(\"/category_dist_sums?<apikey>&<category_id>&<time_start>&<time_stop>&<decorate>\")]\n", "file_path": "src/routz/get_category_dist_sums.rs", "rank": 79, "score": 2.25641118118708 }, { "content": " // 2. Try to post a new trancat.\n\n\n\n // 2.1. But first post using a non-existent apikey.\n\n response = client\n\n .post(\"/trancats\")\n\n .body(format!(\n\n \"apikey=notarealkey&transaction_id={}&category_id={}\",\n\n (transactions.get(0).unwrap()).id,\n\n (categories.get(0).unwrap()).id\n\n ))\n\n .header(ContentType::Form)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n match serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap() {\n\n D::APIResponse::Error(_) => assert!(true),\n\n _ => assert!(false),\n\n }\n\n\n\n // 2.2 Successful post.\n\n response = client\n", "file_path": "tests/trancats.rs", "rank": 80, "score": 2.2207767101819003 }, { "content": " // 2. Try to post a new acctcat.\n\n\n\n // 2.1. But first post using a non-existent apikey.\n\n response = client\n\n .post(\"/acctcats\")\n\n .body(format!(\n\n \"apikey=notarealkey&account_id={}&category_id={}\",\n\n (accounts.get(0).unwrap()).id,\n\n (categories.get(0).unwrap()).id\n\n ))\n\n .header(ContentType::Form)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n match serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap() {\n\n D::APIResponse::Error(_) => assert!(true),\n\n _ => assert!(false),\n\n }\n\n\n\n // 2.2 Successful post.\n\n response = client\n", "file_path": "tests/acctcats.rs", "rank": 81, "score": 2.2207767101819003 }, { "content": " /*\n\n Given two Vec<u8>, top and bottom, perform a multi-digit subtraction of bottom from top and return Maybe (the resulting value). This is a Maybe because parameter errors are possible.\n\n\n\n This is the public entry point where obvious cases are dealt with.\n\n\n\n This function uses the \"Three Digit Trick\" (http://web.sonoma.edu/users/w/wilsonst/courses/math_300/groupwork/altsub/3d.html) generalized for Vec of digits of indefinite length.\n\n\n\n The advantage of this method is that we don't have to deal with regrouping. The disadvantage is that there are many gyrations required to get this done making this somewhat more difficult to understand and probably impacting running time.\n\n\n\n This function is a very special purpose thing that exists solely to support dfp_add. dfp_add needs subtraction in order to deal with negative DFP numbers. As such, this function only cares about doing that and so it has the following rather tedious constraints:\n\n\n\n * top and bottom are both Vec of \"small\" positive integers from 0 to base-1 inclusive and have not been tested with \"large\" or negative integers.\n\n\n\n * the top and bottom Vec both represent positive numbers. In the event of the necessity of subtraction of a negative number, re-arrange the problem using negation such that this constraint can hold.\n\n\n\n * top >= bottom\n\n */\n\n fn md_sub(top :&Vec<u8>, bottom :&Vec<u8>) -> Box<Vec<u8>> {\n\n let zz_top = md_stripz(top);\n\n let zz_bot = md_stripz( bottom );\n", "file_path": "src/dfp.rs", "rank": 82, "score": 2.1940736467982287 }, { "content": " a Box<Vec<u8>> containing the resulting value.\n\n\n\n This is a private method that performs the actual subtraction for md_sub which is\n\n responsible for any error checking. md_sub is also responsible for ensuring that top > bottom when\n\n invoking this function.\n\n */\n\n fn md_sub1(top :Vec<u8>, bottom :Vec<u8>) -> Box<Vec<u8>> {\n\n\n\n let n1 = *md_sub2(bottom, top.len() as u8);\n\n let mut n2 = md_add(top,n1);\n\n let l2 = (*n2).len();\n\n\n\n n2[l2-1] = n2[l2-1]-1; // subtract 1000\n\n\n\n let n3 = md_add( vec![1], n2);\n\n Box::new(md_stripz(&n3))\n\n }\n\n\n\n /*\n\n In order to perform a subtraction using the selected algorithm, we want to subtract\n", "file_path": "src/dfp.rs", "rank": 83, "score": 2.0285599834232566 }, { "content": "2.1 If neither --dbname nor BCR_DBNAME are specified, the server will complain and exit. We _must have_ a db name or there's nothing else to do.\n\n\n\n2.2 If either one of --dbname or BCR_DBNAME are specified, the startup message will mention it. But the server will terminate with an error because other subsequent configuration is missing.\n\n\n\n2.3 If both --dbname and BCR_DBNAME are specified, the startup message mentions the value from --dbname. But the server will terminate with an error because other subsequent configuration is missing.\n\n\n\n\n\nNow that we know that a connection string and a database name can be specified, it's time to do the same for specifying the seed file. In addition to testing the presence or absence of the seed file name, we must also test whether the seed file points to a non-existent file, a file that contains nonsense, and a file that can be used successfully.\n\n\n\n3.1 If neither --init nor BCR_INIT are specified, the server will complain and exit. We _must have_ a seed file name or there's nothing else to do.\n\n\n\n3.2 If either one of --init or BCR_INIT are specified...\n\n\n\nUse these variations to test seed file errors...\n\n\n\n3.2.1 If --seed, as configured via the command line is present, the startup message will mention it. But point this to a non-existent file and the server will shut down in error.\n\n\n\n3.2.2 If BCR_SEED, as configured via the environment is present, the startup message will mention it. But point this to a file that contains nonsense and the server will shutdown in error.\n\n\n\n3.3 If both --seed and BCR_SEED are specified, the startup message will mention the value from --seed. Point this to a file that contains valid SQL and the server will shutdown successfully.\n", "file_path": "tests/dbseed.rs", "rank": 84, "score": 2.020670648550019 }, { "content": " // 2. Try to post a new distribution\n\n\n\n // 2.1. But first post using a non-existent apikey.\n\n response = client\n\n .post(\"/distributions\")\n\n .body(format!(\n\n \"apikey=notarealkey&transaction_id={}&account_id={}&amount=3&amount_exp=0&amountbt=3\",\n\n transaction_id1, account_id1\n\n ))\n\n .header(ContentType::Form)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n match serde_json::from_str(&(response.body_string().unwrap())[..]).unwrap() {\n\n D::APIResponse::Error(_) => assert!(true), // WARNING: Are we seeing the right error? Big long tedious referential integrity error.\n\n _ => assert!(false),\n\n }\n\n\n\n // 2.2. amountbt with non-numeric should fail.\n\n response = client\n\n .post(\"/distributions\")\n", "file_path": "tests/distributions.rs", "rank": 85, "score": 1.9997128495477559 }, { "content": "**bookwerx-core-rust** uses the 3rd party crate 'clap' in order to manage the CLI. We won't bother to test its functionality. So for example we won't test that --help produces a help screen, we will always use the --long-form of an option and won't test that the -s(short form) also works, nor will we test that --not-a-real-option produces a suitable error message.\n\n\n\nWhen **bookwerx-core-rust** is executed, it must have access to a MySQL server, even during integration testing. Please examine .travis.yml to see how we easily we use docker to install and run a MariaDB image that we can subsequently use for testing.\n\n \n\nThe testing must therefore have _some_ connection string and _some_ database name. The integration tests hardwire suitable values. Given this hardwiring, we must therefore be careful to keep these things in sync. I don't like this hardwiring, but it's not obvious to me how we can DRY this. IMHO, tolerating this nuisance is simply the best choice.\n\n\n\nKahunaGrande\n\n\n\nThis is a test the sends many, many requests to the server in order to the operation of each of the routes, as well as referential integrity constraints between the objects that are created in the db.\n\n\n\nBut what do we _really_ want to test here? There is a combinatorial explosion of possible variations of requests that defy our effort to test them. Upon reflection, the following categories help with our analysis.\n\n\n\n1. Some requests are so malformed that Rocket will not accept them. Such as:\n\n\n\n A. Missing routes. These will yield 404 not found responses.\n\n\n\n B. Routes that are recognized but have some other problem such as missing or extraneous parameters\n\n or parameters that cannot be properly parsed. Rocket will reject these requests. These will yield 422 unprocessable entity.\n\n\n\n2. Some requests satisfy Rocket, but yield errors with the db. These requests yield 200 responses, but said responses contain some error message from the db.\n\n\n\n3. Some request accomplish their goal. They yield 200 responses.\n\n\n\nThat said...\n\n\n\n1. We don't want to test anything that yields 404 or 422. Doing so constitutes testing Rocket and is outside\n\nthe scope of this project.\n\n\n", "file_path": "tests/README.md", "rank": 86, "score": 1.9711039170904123 }, { "content": "## Categories\n\n\n\n**bookwerx-core-rust** provides a system of categories so that you may define any number of categories and then tag accounts and transactions with any number of these categories. It's easy to imagine that we might want to categorize accounts as \"assets\", \"liabilities\", etc., and perhaps categorize transactions as \"deposits\", \"transfers\", or \"withdrawals\". Once you get the hang of this the ideas will flow freely.\n\n\n\nThis feature is implemented using one table that contains all the defined categories, a join table that relates categories to accounts in a many-to-many relationship, as well as another join table to do the same between categories and transactions. A category can optionally also have a single parent category. In this this way we can define a hierarchy of categories. \n\n\n\nThis is a very general purpose tool. It would be easy for the user to create a blizzard of categories, lose track of their purposes, apply said categories haphazardly, and to generally get confused. Managing this complexity is the job for another tool.\n", "file_path": "README.md", "rank": 87, "score": 1.772819463427357 }, { "content": "## Dates and Times\n\n\n\nDealing with dates and times is a bottomless pit of complexity. We will make this easier for everybody involved by promulgating the following policy:\n\n\n\nA transaction occurs at a single instant in time with said time recorded as any string format suitable to your app.\n\n\n\nOne practical example would be an ISO-8601 string. Said strings can have the quantity of seconds recorded to an unknown, but sufficient, quantity of decimal places. For example: \"2019-07-20T15:32:17.00000001Z\" This will get you started and you can run a long time before you outgrow this plan.\n\n\n\n## Numbers\n\n\n\nGenerally, the API sends and receives financial numeric amounts using a decimal floating point system. Each number is represented as an integer significand and an integer exponent. In this way we can _exactly_ store and transmit the numbers without being bothered with round-off errors. It's the job of a UI to perform non-destructive rounding when necessary.\n\n\n\nThis is just the tip of a general issue of [dealing with the amounts we encounter when dealing with crypto coins](https://gist.github.com/bostontrader/37ad3aba39d77e6f8a4e8212c02b25aa).\n\n\n", "file_path": "README.md", "rank": 88, "score": 1.6290568466324036 } ]
Rust
src/vdac0/opa2_cal.rs
timokroeger/efm32pg12-pac
bbe43a716047334a3cd51de8e0e3d51280497689
#[doc = "Reader of register OPA2_CAL"] pub type R = crate::R<u32, super::OPA2_CAL>; #[doc = "Writer for register OPA2_CAL"] pub type W = crate::W<u32, super::OPA2_CAL>; #[doc = "Register OPA2_CAL `reset()`'s with value 0x80e7"] impl crate::ResetValue for super::OPA2_CAL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x80e7 } } #[doc = "Reader of field `CM1`"] pub type CM1_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CM1`"] pub struct CM1_W<'a> { w: &'a mut W, } impl<'a> CM1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } #[doc = "Reader of field `CM2`"] pub type CM2_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CM2`"] pub struct CM2_W<'a> { w: &'a mut W, } impl<'a> CM2_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 5)) | (((value as u32) & 0x0f) << 5); self.w } } #[doc = "Reader of field `CM3`"] pub type CM3_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CM3`"] pub struct CM3_W<'a> { w: &'a mut W, } impl<'a> CM3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10); self.w } } #[doc = "Reader of field `GM`"] pub type GM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `GM`"] pub struct GM_W<'a> { w: &'a mut W, } impl<'a> GM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 13)) | (((value as u32) & 0x07) << 13); self.w } } #[doc = "Reader of field `GM3`"] pub type GM3_R = crate::R<u8, u8>; #[doc = "Write proxy for field `GM3`"] pub struct GM3_W<'a> { w: &'a mut W, } impl<'a> GM3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 17)) | (((value as u32) & 0x03) << 17); self.w } } #[doc = "Reader of field `OFFSETP`"] pub type OFFSETP_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OFFSETP`"] pub struct OFFSETP_W<'a> { w: &'a mut W, } impl<'a> OFFSETP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 20)) | (((value as u32) & 0x1f) << 20); self.w } } #[doc = "Reader of field `OFFSETN`"] pub type OFFSETN_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OFFSETN`"] pub struct OFFSETN_W<'a> { w: &'a mut W, } impl<'a> OFFSETN_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 26)) | (((value as u32) & 0x1f) << 26); self.w } } impl R { #[doc = "Bits 0:3 - Compensation Cap Cm1 Trim Value"] #[inline(always)] pub fn cm1(&self) -> CM1_R { CM1_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 5:8 - Compensation Cap Cm2 Trim Value"] #[inline(always)] pub fn cm2(&self) -> CM2_R { CM2_R::new(((self.bits >> 5) & 0x0f) as u8) } #[doc = "Bits 10:11 - Compensation Cap Cm3 Trim Value"] #[inline(always)] pub fn cm3(&self) -> CM3_R { CM3_R::new(((self.bits >> 10) & 0x03) as u8) } #[doc = "Bits 13:15 - Gm Trim Value"] #[inline(always)] pub fn gm(&self) -> GM_R { GM_R::new(((self.bits >> 13) & 0x07) as u8) } #[doc = "Bits 17:18 - Gm3 Trim Value"] #[inline(always)] pub fn gm3(&self) -> GM3_R { GM3_R::new(((self.bits >> 17) & 0x03) as u8) } #[doc = "Bits 20:24 - OPAx Non-Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetp(&self) -> OFFSETP_R { OFFSETP_R::new(((self.bits >> 20) & 0x1f) as u8) } #[doc = "Bits 26:30 - OPAx Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetn(&self) -> OFFSETN_R { OFFSETN_R::new(((self.bits >> 26) & 0x1f) as u8) } } impl W { #[doc = "Bits 0:3 - Compensation Cap Cm1 Trim Value"] #[inline(always)] pub fn cm1(&mut self) -> CM1_W { CM1_W { w: self } } #[doc = "Bits 5:8 - Compensation Cap Cm2 Trim Value"] #[inline(always)] pub fn cm2(&mut self) -> CM2_W { CM2_W { w: self } } #[doc = "Bits 10:11 - Compensation Cap Cm3 Trim Value"] #[inline(always)] pub fn cm3(&mut self) -> CM3_W { CM3_W { w: self } } #[doc = "Bits 13:15 - Gm Trim Value"] #[inline(always)] pub fn gm(&mut self) -> GM_W { GM_W { w: self } } #[doc = "Bits 17:18 - Gm3 Trim Value"] #[inline(always)] pub fn gm3(&mut self) -> GM3_W { GM3_W { w: self } } #[doc = "Bits 20:24 - OPAx Non-Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetp(&mut self) -> OFFSETP_W { OFFSETP_W { w: self } } #[doc = "Bits 26:30 - OPAx Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetn(&mut self) -> OFFSETN_W { OFFSETN_W { w: self } } }
#[doc = "Reader of register OPA2_CAL"] pub type R = crate::R<u32, super::OPA2_CAL>; #[doc = "Writer for register OPA2_CAL"] pub type W = crate::W<u32, super::OPA2_CAL>; #[doc = "Register OPA2_CAL `reset()`'s with value 0x80e7"] impl crate::ResetValue for super::OPA2_CAL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x80e7 } } #[doc = "Reader of field `CM1`"] pub type CM1_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CM1`"] pub struct CM1_W<'a> { w: &'a mut W, } impl<'a> CM1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } #[doc = "Reader of field `CM2`"] pub type CM2_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CM2`"] pub struct CM2_W<'a> { w: &'a mut W, } impl<'a> CM2_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 5)) | (((value as u32) & 0x0f) << 5); self.w } } #[doc = "Reader of field `CM3`"] pub type CM3_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CM3`"] pub struct CM3_W<'a> { w: &'a mut W, } impl<'a> CM3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10); self.w } } #[doc = "Reader of field `GM`"] pub type GM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `GM`"] pub struct GM_W<'a> { w: &'a mut W, } impl<'a> GM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 13)) | (((value as u32) & 0x07) << 13); self.w } } #[doc = "Reader of field `GM3`"] pub type GM3_R = crate::R<u8, u8>; #[doc = "Write proxy for field `GM3`"] pub struct GM3_W<'a> { w: &'a mut W, } impl<'a> GM3_W<'a> { #[doc = r"Writes raw bits to the field"] #
self } } #[doc = "Bits 17:18 - Gm3 Trim Value"] #[inline(always)] pub fn gm3(&mut self) -> GM3_W { GM3_W { w: self } } #[doc = "Bits 20:24 - OPAx Non-Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetp(&mut self) -> OFFSETP_W { OFFSETP_W { w: self } } #[doc = "Bits 26:30 - OPAx Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetn(&mut self) -> OFFSETN_W { OFFSETN_W { w: self } } }
[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 17)) | (((value as u32) & 0x03) << 17); self.w } } #[doc = "Reader of field `OFFSETP`"] pub type OFFSETP_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OFFSETP`"] pub struct OFFSETP_W<'a> { w: &'a mut W, } impl<'a> OFFSETP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 20)) | (((value as u32) & 0x1f) << 20); self.w } } #[doc = "Reader of field `OFFSETN`"] pub type OFFSETN_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OFFSETN`"] pub struct OFFSETN_W<'a> { w: &'a mut W, } impl<'a> OFFSETN_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 26)) | (((value as u32) & 0x1f) << 26); self.w } } impl R { #[doc = "Bits 0:3 - Compensation Cap Cm1 Trim Value"] #[inline(always)] pub fn cm1(&self) -> CM1_R { CM1_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 5:8 - Compensation Cap Cm2 Trim Value"] #[inline(always)] pub fn cm2(&self) -> CM2_R { CM2_R::new(((self.bits >> 5) & 0x0f) as u8) } #[doc = "Bits 10:11 - Compensation Cap Cm3 Trim Value"] #[inline(always)] pub fn cm3(&self) -> CM3_R { CM3_R::new(((self.bits >> 10) & 0x03) as u8) } #[doc = "Bits 13:15 - Gm Trim Value"] #[inline(always)] pub fn gm(&self) -> GM_R { GM_R::new(((self.bits >> 13) & 0x07) as u8) } #[doc = "Bits 17:18 - Gm3 Trim Value"] #[inline(always)] pub fn gm3(&self) -> GM3_R { GM3_R::new(((self.bits >> 17) & 0x03) as u8) } #[doc = "Bits 20:24 - OPAx Non-Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetp(&self) -> OFFSETP_R { OFFSETP_R::new(((self.bits >> 20) & 0x1f) as u8) } #[doc = "Bits 26:30 - OPAx Inverting Input Offset Configuration Value"] #[inline(always)] pub fn offsetn(&self) -> OFFSETN_R { OFFSETN_R::new(((self.bits >> 26) & 0x1f) as u8) } } impl W { #[doc = "Bits 0:3 - Compensation Cap Cm1 Trim Value"] #[inline(always)] pub fn cm1(&mut self) -> CM1_W { CM1_W { w: self } } #[doc = "Bits 5:8 - Compensation Cap Cm2 Trim Value"] #[inline(always)] pub fn cm2(&mut self) -> CM2_W { CM2_W { w: self } } #[doc = "Bits 10:11 - Compensation Cap Cm3 Trim Value"] #[inline(always)] pub fn cm3(&mut self) -> CM3_W { CM3_W { w: self } } #[doc = "Bits 13:15 - Gm Trim Value"] #[inline(always)] pub fn gm(&mut self) -> GM_W { GM_W { w:
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 164476.77146587084 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 67447.28445172955 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 2, "score": 60595.94147526195 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 3, "score": 60583.55344579244 }, { "content": "#[doc = \"Reader of register OPA1_CAL\"]\n\npub type R = crate::R<u32, super::OPA1_CAL>;\n\n#[doc = \"Writer for register OPA1_CAL\"]\n\npub type W = crate::W<u32, super::OPA1_CAL>;\n\n#[doc = \"Register OPA1_CAL `reset()`'s with value 0x80e7\"]\n\nimpl crate::ResetValue for super::OPA1_CAL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x80e7\n\n }\n\n}\n\n#[doc = \"Reader of field `CM1`\"]\n\npub type CM1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CM1`\"]\n\npub struct CM1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CM1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/vdac0/opa1_cal.rs", "rank": 4, "score": 93.35209757277342 }, { "content": "#[doc = \"Reader of register OPA0_CAL\"]\n\npub type R = crate::R<u32, super::OPA0_CAL>;\n\n#[doc = \"Writer for register OPA0_CAL\"]\n\npub type W = crate::W<u32, super::OPA0_CAL>;\n\n#[doc = \"Register OPA0_CAL `reset()`'s with value 0x80e7\"]\n\nimpl crate::ResetValue for super::OPA0_CAL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x80e7\n\n }\n\n}\n\n#[doc = \"Reader of field `CM1`\"]\n\npub type CM1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CM1`\"]\n\npub struct CM1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CM1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/vdac0/opa0_cal.rs", "rank": 6, "score": 93.35209757277343 }, { "content": "#[doc = \"Reader of field `CM3`\"]\n\npub type CM3_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CM3`\"]\n\npub struct CM3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CM3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `GM`\"]\n\npub type GM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `GM`\"]\n\npub struct GM_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/vdac0/opa0_cal.rs", "rank": 8, "score": 91.35200431739946 }, { "content": "#[doc = \"Reader of field `CM3`\"]\n\npub type CM3_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CM3`\"]\n\npub struct CM3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CM3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `GM`\"]\n\npub type GM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `GM`\"]\n\npub struct GM_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/vdac0/opa1_cal.rs", "rank": 9, "score": 91.35200431739949 }, { "content": "impl<'a> GM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 13)) | (((value as u32) & 0x07) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `GM3`\"]\n\npub type GM3_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `GM3`\"]\n\npub struct GM3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> GM3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 17)) | (((value as u32) & 0x03) << 17);\n\n self.w\n", "file_path": "src/vdac0/opa1_cal.rs", "rank": 10, "score": 90.56183276428216 }, { "content": "impl<'a> GM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 13)) | (((value as u32) & 0x07) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `GM3`\"]\n\npub type GM3_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `GM3`\"]\n\npub struct GM3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> GM3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 17)) | (((value as u32) & 0x03) << 17);\n\n self.w\n", "file_path": "src/vdac0/opa0_cal.rs", "rank": 12, "score": 90.56183276428216 }, { "content": "#[doc = \"Reader of register INITWAITVAL\"]\n\npub type R = crate::R<u32, super::INITWAITVAL>;\n\n#[doc = \"Writer for register INITWAITVAL\"]\n\npub type W = crate::W<u32, super::INITWAITVAL>;\n\n#[doc = \"Register INITWAITVAL `reset()`'s with value 0xff\"]\n\nimpl crate::ResetValue for super::INITWAITVAL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xff\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/initwaitval.rs", "rank": 13, "score": 89.70383465324794 }, { "content": "#[doc = \"Reader of register SADDRMASK\"]\n\npub type R = crate::R<u32, super::SADDRMASK>;\n\n#[doc = \"Writer for register SADDRMASK\"]\n\npub type W = crate::W<u32, super::SADDRMASK>;\n\n#[doc = \"Register SADDRMASK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SADDRMASK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MASK`\"]\n\npub type MASK_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MASK`\"]\n\npub struct MASK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MASK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/saddrmask.rs", "rank": 14, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register ETMTRACEIDR\"]\n\npub type R = crate::R<u32, super::ETMTRACEIDR>;\n\n#[doc = \"Writer for register ETMTRACEIDR\"]\n\npub type W = crate::W<u32, super::ETMTRACEIDR>;\n\n#[doc = \"Register ETMTRACEIDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMTRACEIDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TRACEID`\"]\n\npub type TRACEID_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TRACEID`\"]\n\npub struct TRACEID_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRACEID_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmtraceidr.rs", "rank": 15, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register DATA0XORBYTE\"]\n\npub type R = crate::R<u32, super::DATA0XORBYTE>;\n\n#[doc = \"Writer for register DATA0XORBYTE\"]\n\npub type W = crate::W<u32, super::DATA0XORBYTE>;\n\n#[doc = \"Register DATA0XORBYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0XORBYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0XORBYTE`\"]\n\npub type DATA0XORBYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA0XORBYTE`\"]\n\npub struct DATA0XORBYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0XORBYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0xorbyte.rs", "rank": 16, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register REP1\"]\n\npub type R = crate::R<u32, super::REP1>;\n\n#[doc = \"Writer for register REP1\"]\n\npub type W = crate::W<u32, super::REP1>;\n\n#[doc = \"Register REP1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::REP1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `REP1`\"]\n\npub type REP1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `REP1`\"]\n\npub struct REP1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REP1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/letimer0/rep1.rs", "rank": 17, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register TIMECMP0\"]\n\npub type R = crate::R<u32, super::TIMECMP0>;\n\n#[doc = \"Writer for register TIMECMP0\"]\n\npub type W = crate::W<u32, super::TIMECMP0>;\n\n#[doc = \"Register TIMECMP0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIMECMP0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TCMPVAL`\"]\n\npub type TCMPVAL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TCMPVAL`\"]\n\npub struct TCMPVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TCMPVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usart0/timecmp0.rs", "rank": 18, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register TXDATA\"]\n\npub type R = crate::R<u32, super::TXDATA>;\n\n#[doc = \"Writer for register TXDATA\"]\n\npub type W = crate::W<u32, super::TXDATA>;\n\n#[doc = \"Register TXDATA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXDATA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXDATA`\"]\n\npub type TXDATA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXDATA`\"]\n\npub struct TXDATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXDATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/leuart0/txdata.rs", "rank": 19, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register DATA0BYTE14\"]\n\npub type R = crate::R<u32, super::DATA0BYTE14>;\n\n#[doc = \"Writer for register DATA0BYTE14\"]\n\npub type W = crate::W<u32, super::DATA0BYTE14>;\n\n#[doc = \"Register DATA0BYTE14 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0BYTE14 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0BYTE14`\"]\n\npub type DATA0BYTE14_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA0BYTE14`\"]\n\npub struct DATA0BYTE14_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0BYTE14_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0byte14.rs", "rank": 20, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register SEQ4\"]\n\npub type R = crate::R<u32, super::SEQ4>;\n\n#[doc = \"Writer for register SEQ4\"]\n\npub type W = crate::W<u32, super::SEQ4>;\n\n#[doc = \"Register SEQ4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEQ4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INSTR16`\"]\n\npub type INSTR16_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INSTR16`\"]\n\npub struct INSTR16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INSTR16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/seq4.rs", "rank": 21, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register ETMTEEVR\"]\n\npub type R = crate::R<u32, super::ETMTEEVR>;\n\n#[doc = \"Writer for register ETMTEEVR\"]\n\npub type W = crate::W<u32, super::ETMTEEVR>;\n\n#[doc = \"Register ETMTEEVR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMTEEVR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESA`\"]\n\npub type RESA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESA`\"]\n\npub struct RESA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmteevr.rs", "rank": 22, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register SENSORSTATE\"]\n\npub type R = crate::R<u32, super::SENSORSTATE>;\n\n#[doc = \"Writer for register SENSORSTATE\"]\n\npub type W = crate::W<u32, super::SENSORSTATE>;\n\n#[doc = \"Register SENSORSTATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SENSORSTATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SENSORSTATE`\"]\n\npub type SENSORSTATE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SENSORSTATE`\"]\n\npub struct SENSORSTATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SENSORSTATE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/sensorstate.rs", "rank": 23, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register TXDOUBLE\"]\n\npub type R = crate::R<u32, super::TXDOUBLE>;\n\n#[doc = \"Writer for register TXDOUBLE\"]\n\npub type W = crate::W<u32, super::TXDOUBLE>;\n\n#[doc = \"Register TXDOUBLE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXDOUBLE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXDATA0`\"]\n\npub type TXDATA0_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXDATA0`\"]\n\npub struct TXDATA0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXDATA0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usart0/txdouble.rs", "rank": 24, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register SEQ2\"]\n\npub type R = crate::R<u32, super::SEQ2>;\n\n#[doc = \"Writer for register SEQ2\"]\n\npub type W = crate::W<u32, super::SEQ2>;\n\n#[doc = \"Register SEQ2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEQ2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INSTR8`\"]\n\npub type INSTR8_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INSTR8`\"]\n\npub struct INSTR8_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INSTR8_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/seq2.rs", "rank": 25, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register TXDATA\"]\n\npub type R = crate::R<u32, super::TXDATA>;\n\n#[doc = \"Writer for register TXDATA\"]\n\npub type W = crate::W<u32, super::TXDATA>;\n\n#[doc = \"Register TXDATA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXDATA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXDATA`\"]\n\npub type TXDATA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXDATA`\"]\n\npub struct TXDATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXDATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/txdata.rs", "rank": 26, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DONE`\"]\n\npub type DONE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DONE`\"]\n\npub struct DONE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DONE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/ien.rs", "rank": 27, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DATA0BYTE\"]\n\npub type R = crate::R<u32, super::DATA0BYTE>;\n\n#[doc = \"Writer for register DATA0BYTE\"]\n\npub type W = crate::W<u32, super::DATA0BYTE>;\n\n#[doc = \"Register DATA0BYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0BYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0BYTE`\"]\n\npub type DATA0BYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA0BYTE`\"]\n\npub struct DATA0BYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0BYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0byte.rs", "rank": 28, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register CMD\"]\n\npub type R = crate::R<u32, super::CMD>;\n\n#[doc = \"Writer for register CMD\"]\n\npub type W = crate::W<u32, super::CMD>;\n\n#[doc = \"Register CMD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INSTR`\"]\n\npub type INSTR_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INSTR`\"]\n\npub struct INSTR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INSTR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/cmd.rs", "rank": 29, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register SEQ3\"]\n\npub type R = crate::R<u32, super::SEQ3>;\n\n#[doc = \"Writer for register SEQ3\"]\n\npub type W = crate::W<u32, super::SEQ3>;\n\n#[doc = \"Register SEQ3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEQ3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INSTR12`\"]\n\npub type INSTR12_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INSTR12`\"]\n\npub struct INSTR12_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INSTR12_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/seq3.rs", "rank": 30, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register TIMECMP1\"]\n\npub type R = crate::R<u32, super::TIMECMP1>;\n\n#[doc = \"Writer for register TIMECMP1\"]\n\npub type W = crate::W<u32, super::TIMECMP1>;\n\n#[doc = \"Register TIMECMP1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIMECMP1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TCMPVAL`\"]\n\npub type TCMPVAL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TCMPVAL`\"]\n\npub struct TCMPVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TCMPVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usart0/timecmp1.rs", "rank": 31, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register CHEN\"]\n\npub type R = crate::R<u32, super::CHEN>;\n\n#[doc = \"Writer for register CHEN\"]\n\npub type W = crate::W<u32, super::CHEN>;\n\n#[doc = \"Register CHEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CHEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CHEN`\"]\n\npub type CHEN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CHEN`\"]\n\npub struct CHEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CHEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/chen.rs", "rank": 32, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register ETMTRIGGER\"]\n\npub type R = crate::R<u32, super::ETMTRIGGER>;\n\n#[doc = \"Writer for register ETMTRIGGER\"]\n\npub type W = crate::W<u32, super::ETMTRIGGER>;\n\n#[doc = \"Register ETMTRIGGER `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMTRIGGER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESA`\"]\n\npub type RESA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESA`\"]\n\npub struct RESA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmtrigger.rs", "rank": 33, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register ETMFFLR\"]\n\npub type R = crate::R<u32, super::ETMFFLR>;\n\n#[doc = \"Writer for register ETMFFLR\"]\n\npub type W = crate::W<u32, super::ETMFFLR>;\n\n#[doc = \"Register ETMFFLR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMFFLR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BYTENUM`\"]\n\npub type BYTENUM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `BYTENUM`\"]\n\npub struct BYTENUM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BYTENUM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmfflr.rs", "rank": 34, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DMCFG\"]\n\npub type R = crate::R<u32, super::DMCFG>;\n\n#[doc = \"Writer for register DMCFG\"]\n\npub type W = crate::W<u32, super::DMCFG>;\n\n#[doc = \"Register DMCFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DMCFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DMG`\"]\n\npub type DMG_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DMG`\"]\n\npub struct DMG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DMG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/csen/dmcfg.rs", "rank": 35, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register SADDR\"]\n\npub type R = crate::R<u32, super::SADDR>;\n\n#[doc = \"Writer for register SADDR\"]\n\npub type W = crate::W<u32, super::SADDR>;\n\n#[doc = \"Register SADDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SADDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/saddr.rs", "rank": 36, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register TXDATA\"]\n\npub type R = crate::R<u32, super::TXDATA>;\n\n#[doc = \"Writer for register TXDATA\"]\n\npub type W = crate::W<u32, super::TXDATA>;\n\n#[doc = \"Register TXDATA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXDATA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXDATA`\"]\n\npub type TXDATA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXDATA`\"]\n\npub struct TXDATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXDATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usart0/txdata.rs", "rank": 37, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register SEQ0\"]\n\npub type R = crate::R<u32, super::SEQ0>;\n\n#[doc = \"Writer for register SEQ0\"]\n\npub type W = crate::W<u32, super::SEQ0>;\n\n#[doc = \"Register SEQ0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEQ0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INSTR0`\"]\n\npub type INSTR0_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INSTR0`\"]\n\npub struct INSTR0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INSTR0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/seq0.rs", "rank": 38, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register INPUTSEL\"]\n\npub type R = crate::R<u32, super::INPUTSEL>;\n\n#[doc = \"Writer for register INPUTSEL\"]\n\npub type W = crate::W<u32, super::INPUTSEL>;\n\n#[doc = \"Register INPUTSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INPUTSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `POSSEL`\"]\n\npub type POSSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `POSSEL`\"]\n\npub struct POSSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POSSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/acmp0/inputsel.rs", "rank": 39, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register PRSCTRL\"]\n\npub type R = crate::R<u32, super::PRSCTRL>;\n\n#[doc = \"Writer for register PRSCTRL\"]\n\npub type W = crate::W<u32, super::PRSCTRL>;\n\n#[doc = \"Register PRSCTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PRSCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DECCMPVAL`\"]\n\npub type DECCMPVAL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DECCMPVAL`\"]\n\npub struct DECCMPVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DECCMPVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/prsctrl.rs", "rank": 40, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DDATA0BYTE\"]\n\npub type R = crate::R<u32, super::DDATA0BYTE>;\n\n#[doc = \"Writer for register DDATA0BYTE\"]\n\npub type W = crate::W<u32, super::DDATA0BYTE>;\n\n#[doc = \"Register DDATA0BYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA0BYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA0BYTE`\"]\n\npub type DDATA0BYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DDATA0BYTE`\"]\n\npub struct DDATA0BYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA0BYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata0byte.rs", "rank": 41, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register REP0\"]\n\npub type R = crate::R<u32, super::REP0>;\n\n#[doc = \"Writer for register REP0\"]\n\npub type W = crate::W<u32, super::REP0>;\n\n#[doc = \"Register REP0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::REP0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `REP0`\"]\n\npub type REP0_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `REP0`\"]\n\npub struct REP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/letimer0/rep0.rs", "rank": 42, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register TIMECMP2\"]\n\npub type R = crate::R<u32, super::TIMECMP2>;\n\n#[doc = \"Writer for register TIMECMP2\"]\n\npub type W = crate::W<u32, super::TIMECMP2>;\n\n#[doc = \"Register TIMECMP2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIMECMP2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TCMPVAL`\"]\n\npub type TCMPVAL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TCMPVAL`\"]\n\npub struct TCMPVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TCMPVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usart0/timecmp2.rs", "rank": 43, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DDATA0BYTE32\"]\n\npub type R = crate::R<u32, super::DDATA0BYTE32>;\n\n#[doc = \"Writer for register DDATA0BYTE32\"]\n\npub type W = crate::W<u32, super::DDATA0BYTE32>;\n\n#[doc = \"Register DDATA0BYTE32 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA0BYTE32 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA0BYTE32`\"]\n\npub type DDATA0BYTE32_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DDATA0BYTE32`\"]\n\npub struct DDATA0BYTE32_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA0BYTE32_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata0byte32.rs", "rank": 44, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register REQDIS\"]\n\npub type R = crate::R<u32, super::REQDIS>;\n\n#[doc = \"Writer for register REQDIS\"]\n\npub type W = crate::W<u32, super::REQDIS>;\n\n#[doc = \"Register REQDIS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::REQDIS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `REQDIS`\"]\n\npub type REQDIS_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `REQDIS`\"]\n\npub struct REQDIS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REQDIS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/reqdis.rs", "rank": 45, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register TIME\"]\n\npub type R = crate::R<u32, super::TIME>;\n\n#[doc = \"Writer for register TIME\"]\n\npub type W = crate::W<u32, super::TIME>;\n\n#[doc = \"Register TIME `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIME {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SECU`\"]\n\npub type SECU_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SECU`\"]\n\npub struct SECU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SECU_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/rtcc/time.rs", "rank": 46, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register DATA0BYTE15\"]\n\npub type R = crate::R<u32, super::DATA0BYTE15>;\n\n#[doc = \"Writer for register DATA0BYTE15\"]\n\npub type W = crate::W<u32, super::DATA0BYTE15>;\n\n#[doc = \"Register DATA0BYTE15 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0BYTE15 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0BYTE15`\"]\n\npub type DATA0BYTE15_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA0BYTE15`\"]\n\npub struct DATA0BYTE15_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0BYTE15_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0byte15.rs", "rank": 47, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register PULSECTRL\"]\n\npub type R = crate::R<u32, super::PULSECTRL>;\n\n#[doc = \"Writer for register PULSECTRL\"]\n\npub type W = crate::W<u32, super::PULSECTRL>;\n\n#[doc = \"Register PULSECTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PULSECTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `PULSEW`\"]\n\npub type PULSEW_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PULSEW`\"]\n\npub struct PULSEW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PULSEW_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/leuart0/pulsectrl.rs", "rank": 48, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register QDATA0BYTE\"]\n\npub type R = crate::R<u32, super::QDATA0BYTE>;\n\n#[doc = \"Writer for register QDATA0BYTE\"]\n\npub type W = crate::W<u32, super::QDATA0BYTE>;\n\n#[doc = \"Register QDATA0BYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::QDATA0BYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `QDATA0BYTE`\"]\n\npub type QDATA0BYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `QDATA0BYTE`\"]\n\npub struct QDATA0BYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> QDATA0BYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/qdata0byte.rs", "rank": 49, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register INPUTDATABYTE\"]\n\npub type R = crate::R<u32, super::INPUTDATABYTE>;\n\n#[doc = \"Writer for register INPUTDATABYTE\"]\n\npub type W = crate::W<u32, super::INPUTDATABYTE>;\n\n#[doc = \"Register INPUTDATABYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INPUTDATABYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INPUTDATABYTE`\"]\n\npub type INPUTDATABYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INPUTDATABYTE`\"]\n\npub struct INPUTDATABYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INPUTDATABYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpcrc/inputdatabyte.rs", "rank": 50, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register ETMTSEVR\"]\n\npub type R = crate::R<u32, super::ETMTSEVR>;\n\n#[doc = \"Writer for register ETMTSEVR\"]\n\npub type W = crate::W<u32, super::ETMTSEVR>;\n\n#[doc = \"Register ETMTSEVR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMTSEVR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESAEVT`\"]\n\npub type RESAEVT_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESAEVT`\"]\n\npub struct RESAEVT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESAEVT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmtsevr.rs", "rank": 51, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DATA1BYTE\"]\n\npub type R = crate::R<u32, super::DATA1BYTE>;\n\n#[doc = \"Writer for register DATA1BYTE\"]\n\npub type W = crate::W<u32, super::DATA1BYTE>;\n\n#[doc = \"Register DATA1BYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA1BYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA1BYTE`\"]\n\npub type DATA1BYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA1BYTE`\"]\n\npub struct DATA1BYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA1BYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data1byte.rs", "rank": 52, "score": 89.11316338196755 }, { "content": "#[doc = \"Reader of register SYNC\"]\n\npub type R = crate::R<u32, super::SYNC>;\n\n#[doc = \"Writer for register SYNC\"]\n\npub type W = crate::W<u32, super::SYNC>;\n\n#[doc = \"Register SYNC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SYNC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SYNCTRIG`\"]\n\npub type SYNCTRIG_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SYNCTRIG`\"]\n\npub struct SYNCTRIG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SYNCTRIG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/sync.rs", "rank": 53, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register TXDOUBLE\"]\n\npub type R = crate::R<u32, super::TXDOUBLE>;\n\n#[doc = \"Writer for register TXDOUBLE\"]\n\npub type W = crate::W<u32, super::TXDOUBLE>;\n\n#[doc = \"Register TXDOUBLE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXDOUBLE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXDATA0`\"]\n\npub type TXDATA0_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXDATA0`\"]\n\npub struct TXDATA0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXDATA0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/txdouble.rs", "rank": 54, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register CHDONE\"]\n\npub type R = crate::R<u32, super::CHDONE>;\n\n#[doc = \"Writer for register CHDONE\"]\n\npub type W = crate::W<u32, super::CHDONE>;\n\n#[doc = \"Register CHDONE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CHDONE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CHDONE`\"]\n\npub type CHDONE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CHDONE`\"]\n\npub struct CHDONE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CHDONE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/chdone.rs", "rank": 55, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DATE\"]\n\npub type R = crate::R<u32, super::DATE>;\n\n#[doc = \"Writer for register DATE\"]\n\npub type W = crate::W<u32, super::DATE>;\n\n#[doc = \"Register DATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DAYOMU`\"]\n\npub type DAYOMU_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DAYOMU`\"]\n\npub struct DAYOMU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DAYOMU_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/rtcc/date.rs", "rank": 56, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register ETMISCIN\"]\n\npub type R = crate::R<u32, super::ETMISCIN>;\n\n#[doc = \"Writer for register ETMISCIN\"]\n\npub type W = crate::W<u32, super::ETMISCIN>;\n\n#[doc = \"Register ETMISCIN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMISCIN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `EXTIN`\"]\n\npub type EXTIN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `EXTIN`\"]\n\npub struct EXTIN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EXTIN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmiscin.rs", "rank": 57, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register SEQ1\"]\n\npub type R = crate::R<u32, super::SEQ1>;\n\n#[doc = \"Writer for register SEQ1\"]\n\npub type W = crate::W<u32, super::SEQ1>;\n\n#[doc = \"Register SEQ1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEQ1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INSTR4`\"]\n\npub type INSTR4_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `INSTR4`\"]\n\npub struct INSTR4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INSTR4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/seq1.rs", "rank": 58, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register OVSCFG\"]\n\npub type R = crate::R<u32, super::OVSCFG>;\n\n#[doc = \"Writer for register OVSCFG\"]\n\npub type W = crate::W<u32, super::OVSCFG>;\n\n#[doc = \"Register OVSCFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OVSCFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FILTLEN`\"]\n\npub type FILTLEN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `FILTLEN`\"]\n\npub struct FILTLEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FILTLEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/pcnt0/ovscfg.rs", "rank": 59, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register ETMTECR1\"]\n\npub type R = crate::R<u32, super::ETMTECR1>;\n\n#[doc = \"Writer for register ETMTECR1\"]\n\npub type W = crate::W<u32, super::ETMTECR1>;\n\n#[doc = \"Register ETMTECR1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMTECR1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADRCMP`\"]\n\npub type ADRCMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ADRCMP`\"]\n\npub struct ADRCMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADRCMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmtecr1.rs", "rank": 60, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register DBGHALT\"]\n\npub type R = crate::R<u32, super::DBGHALT>;\n\n#[doc = \"Writer for register DBGHALT\"]\n\npub type W = crate::W<u32, super::DBGHALT>;\n\n#[doc = \"Register DBGHALT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DBGHALT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DBGHALT`\"]\n\npub type DBGHALT_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DBGHALT`\"]\n\npub struct DBGHALT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBGHALT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/dbghalt.rs", "rank": 61, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DATA0BYTE12\"]\n\npub type R = crate::R<u32, super::DATA0BYTE12>;\n\n#[doc = \"Writer for register DATA0BYTE12\"]\n\npub type W = crate::W<u32, super::DATA0BYTE12>;\n\n#[doc = \"Register DATA0BYTE12 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0BYTE12 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0BYTE12`\"]\n\npub type DATA0BYTE12_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA0BYTE12`\"]\n\npub struct DATA0BYTE12_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0BYTE12_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0byte12.rs", "rank": 62, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DATA0BYTE13\"]\n\npub type R = crate::R<u32, super::DATA0BYTE13>;\n\n#[doc = \"Writer for register DATA0BYTE13\"]\n\npub type W = crate::W<u32, super::DATA0BYTE13>;\n\n#[doc = \"Register DATA0BYTE13 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0BYTE13 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0BYTE13`\"]\n\npub type DATA0BYTE13_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATA0BYTE13`\"]\n\npub struct DATA0BYTE13_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0BYTE13_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0byte13.rs", "rank": 63, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DECSTATE\"]\n\npub type R = crate::R<u32, super::DECSTATE>;\n\n#[doc = \"Writer for register DECSTATE\"]\n\npub type W = crate::W<u32, super::DECSTATE>;\n\n#[doc = \"Register DECSTATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DECSTATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DECSTATE`\"]\n\npub type DECSTATE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DECSTATE`\"]\n\npub struct DECSTATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DECSTATE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/decstate.rs", "rank": 64, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register QDATA1BYTE\"]\n\npub type R = crate::R<u32, super::QDATA1BYTE>;\n\n#[doc = \"Writer for register QDATA1BYTE\"]\n\npub type W = crate::W<u32, super::QDATA1BYTE>;\n\n#[doc = \"Register QDATA1BYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::QDATA1BYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `QDATA1BYTE`\"]\n\npub type QDATA1BYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `QDATA1BYTE`\"]\n\npub struct QDATA1BYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> QDATA1BYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/qdata1byte.rs", "rank": 65, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register DDATA1BYTE\"]\n\npub type R = crate::R<u32, super::DDATA1BYTE>;\n\n#[doc = \"Writer for register DDATA1BYTE\"]\n\npub type W = crate::W<u32, super::DDATA1BYTE>;\n\n#[doc = \"Register DDATA1BYTE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA1BYTE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA1BYTE`\"]\n\npub type DDATA1BYTE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DDATA1BYTE`\"]\n\npub struct DDATA1BYTE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA1BYTE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata1byte.rs", "rank": 66, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register ETMTESSEICR\"]\n\npub type R = crate::R<u32, super::ETMTESSEICR>;\n\n#[doc = \"Writer for register ETMTESSEICR\"]\n\npub type W = crate::W<u32, super::ETMTESSEICR>;\n\n#[doc = \"Register ETMTESSEICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ETMTESSEICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `STARTRSEL`\"]\n\npub type STARTRSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `STARTRSEL`\"]\n\npub struct STARTRSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STARTRSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmtesseicr.rs", "rank": 67, "score": 89.11316338196757 }, { "content": "#[doc = \"Reader of register HFRCOSS\"]\n\npub type R = crate::R<u32, super::HFRCOSS>;\n\n#[doc = \"Writer for register HFRCOSS\"]\n\npub type W = crate::W<u32, super::HFRCOSS>;\n\n#[doc = \"Register HFRCOSS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::HFRCOSS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SSAMP`\"]\n\npub type SSAMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SSAMP`\"]\n\npub struct SSAMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SSAMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/hfrcoss.rs", "rank": 68, "score": 89.11316338196758 }, { "content": "#[doc = \"Reader of register CAL\"]\n\npub type R = crate::R<u32, super::CAL>;\n\n#[doc = \"Writer for register CAL\"]\n\npub type W = crate::W<u32, super::CAL>;\n\n#[doc = \"Register CAL `reset()`'s with value 0x0008_2004\"]\n\nimpl crate::ResetValue for super::CAL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0008_2004\n\n }\n\n}\n\n#[doc = \"Reader of field `OFFSETTRIM`\"]\n\npub type OFFSETTRIM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `OFFSETTRIM`\"]\n\npub struct OFFSETTRIM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OFFSETTRIM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/vdac0/cal.rs", "rank": 69, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register LFXOCTRL\"]\n\npub type R = crate::R<u32, super::LFXOCTRL>;\n\n#[doc = \"Writer for register LFXOCTRL\"]\n\npub type W = crate::W<u32, super::LFXOCTRL>;\n\n#[doc = \"Register LFXOCTRL `reset()`'s with value 0x0700_9000\"]\n\nimpl crate::ResetValue for super::LFXOCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0700_9000\n\n }\n\n}\n\n#[doc = \"Reader of field `TUNING`\"]\n\npub type TUNING_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TUNING`\"]\n\npub struct TUNING_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TUNING_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/lfxoctrl.rs", "rank": 70, "score": 88.3263130478498 }, { "content": "#[doc = \"Reader of register CTRL\"]\n\npub type R = crate::R<u32, super::CTRL>;\n\n#[doc = \"Writer for register CTRL\"]\n\npub type W = crate::W<u32, super::CTRL>;\n\n#[doc = \"Register CTRL `reset()`'s with value 0x0700_0000\"]\n\nimpl crate::ResetValue for super::CTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0700_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `SYNCPRSSETEN`\"]\n\npub type SYNCPRSSETEN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SYNCPRSSETEN`\"]\n\npub struct SYNCPRSSETEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SYNCPRSSETEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/ctrl.rs", "rank": 71, "score": 88.3263130478498 }, { "content": "#[doc = \"Reader of register TEMPLIMITS\"]\n\npub type R = crate::R<u32, super::TEMPLIMITS>;\n\n#[doc = \"Writer for register TEMPLIMITS\"]\n\npub type W = crate::W<u32, super::TEMPLIMITS>;\n\n#[doc = \"Register TEMPLIMITS `reset()`'s with value 0xff00\"]\n\nimpl crate::ResetValue for super::TEMPLIMITS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xff00\n\n }\n\n}\n\n#[doc = \"Reader of field `TEMPLOW`\"]\n\npub type TEMPLOW_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TEMPLOW`\"]\n\npub struct TEMPLOW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TEMPLOW_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/emu/templimits.rs", "rank": 72, "score": 88.3263130478498 }, { "content": "#[doc = \"Reader of register DCDCZDETCTRL\"]\n\npub type R = crate::R<u32, super::DCDCZDETCTRL>;\n\n#[doc = \"Writer for register DCDCZDETCTRL\"]\n\npub type W = crate::W<u32, super::DCDCZDETCTRL>;\n\n#[doc = \"Register DCDCZDETCTRL `reset()`'s with value 0x0150\"]\n\nimpl crate::ResetValue for super::DCDCZDETCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0150\n\n }\n\n}\n\n#[doc = \"Reader of field `ZDETILIMSEL`\"]\n\npub type ZDETILIMSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ZDETILIMSEL`\"]\n\npub struct ZDETILIMSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ZDETILIMSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/emu/dcdczdetctrl.rs", "rank": 73, "score": 88.3263130478498 }, { "content": "#[doc = \"Reader of register CAL\"]\n\npub type R = crate::R<u32, super::CAL>;\n\n#[doc = \"Writer for register CAL\"]\n\npub type W = crate::W<u32, super::CAL>;\n\n#[doc = \"Register CAL `reset()`'s with value 0x4078_4078\"]\n\nimpl crate::ResetValue for super::CAL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x4078_4078\n\n }\n\n}\n\n#[doc = \"Reader of field `SINGLEOFFSET`\"]\n\npub type SINGLEOFFSET_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SINGLEOFFSET`\"]\n\npub struct SINGLEOFFSET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SINGLEOFFSET_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/adc0/cal.rs", "rank": 74, "score": 88.3263130478498 }, { "content": "#[doc = \"Reader of register DCDCLNCOMPCTRL\"]\n\npub type R = crate::R<u32, super::DCDCLNCOMPCTRL>;\n\n#[doc = \"Writer for register DCDCLNCOMPCTRL\"]\n\npub type W = crate::W<u32, super::DCDCLNCOMPCTRL>;\n\n#[doc = \"Register DCDCLNCOMPCTRL `reset()`'s with value 0x5720_4077\"]\n\nimpl crate::ResetValue for super::DCDCLNCOMPCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x5720_4077\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPENR1`\"]\n\npub type COMPENR1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMPENR1`\"]\n\npub struct COMPENR1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPENR1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/emu/dcdclncompctrl.rs", "rank": 75, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register PERIODSEL\"]\n\npub type R = crate::R<u32, super::PERIODSEL>;\n\n#[doc = \"Writer for register PERIODSEL\"]\n\npub type W = crate::W<u32, super::PERIODSEL>;\n\n#[doc = \"Register PERIODSEL `reset()`'s with value 0x20\"]\n\nimpl crate::ResetValue for super::PERIODSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x20\n\n }\n\n}\n\n#[doc = \"Reader of field `PERIODSEL`\"]\n\npub type PERIODSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PERIODSEL`\"]\n\npub struct PERIODSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PERIODSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cryotimer/periodsel.rs", "rank": 76, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register DCDCCLIMCTRL\"]\n\npub type R = crate::R<u32, super::DCDCCLIMCTRL>;\n\n#[doc = \"Writer for register DCDCCLIMCTRL\"]\n\npub type W = crate::W<u32, super::DCDCCLIMCTRL>;\n\n#[doc = \"Register DCDCCLIMCTRL `reset()`'s with value 0x0100\"]\n\nimpl crate::ResetValue for super::DCDCCLIMCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0100\n\n }\n\n}\n\n#[doc = \"Reader of field `CLIMBLANKDLY`\"]\n\npub type CLIMBLANKDLY_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CLIMBLANKDLY`\"]\n\npub struct CLIMBLANKDLY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CLIMBLANKDLY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/emu/dcdcclimctrl.rs", "rank": 77, "score": 88.3263130478498 }, { "content": "#[doc = \"Reader of register HFXOSTARTUPCTRL\"]\n\npub type R = crate::R<u32, super::HFXOSTARTUPCTRL>;\n\n#[doc = \"Writer for register HFXOSTARTUPCTRL\"]\n\npub type W = crate::W<u32, super::HFXOSTARTUPCTRL>;\n\n#[doc = \"Register HFXOSTARTUPCTRL `reset()`'s with value 0x0005_0020\"]\n\nimpl crate::ResetValue for super::HFXOSTARTUPCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0005_0020\n\n }\n\n}\n\n#[doc = \"Reader of field `IBTRIMXOCORE`\"]\n\npub type IBTRIMXOCORE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `IBTRIMXOCORE`\"]\n\npub struct IBTRIMXOCORE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IBTRIMXOCORE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/hfxostartupctrl.rs", "rank": 78, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register ETMCLAIMSET\"]\n\npub type R = crate::R<u32, super::ETMCLAIMSET>;\n\n#[doc = \"Writer for register ETMCLAIMSET\"]\n\npub type W = crate::W<u32, super::ETMCLAIMSET>;\n\n#[doc = \"Register ETMCLAIMSET `reset()`'s with value 0x0f\"]\n\nimpl crate::ResetValue for super::ETMCLAIMSET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0f\n\n }\n\n}\n\n#[doc = \"Reader of field `SETTAG`\"]\n\npub type SETTAG_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SETTAG`\"]\n\npub struct SETTAG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SETTAG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/etm/etmclaimset.rs", "rank": 79, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register DCDCLNFREQCTRL\"]\n\npub type R = crate::R<u32, super::DCDCLNFREQCTRL>;\n\n#[doc = \"Writer for register DCDCLNFREQCTRL\"]\n\npub type W = crate::W<u32, super::DCDCLNFREQCTRL>;\n\n#[doc = \"Register DCDCLNFREQCTRL `reset()`'s with value 0x1000_0000\"]\n\nimpl crate::ResetValue for super::DCDCLNFREQCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x1000_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `RCOBAND`\"]\n\npub type RCOBAND_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RCOBAND`\"]\n\npub struct RCOBAND_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RCOBAND_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/emu/dcdclnfreqctrl.rs", "rank": 80, "score": 88.32631304784977 }, { "content": "#[doc = \"Reader of register DCDCLPCTRL\"]\n\npub type R = crate::R<u32, super::DCDCLPCTRL>;\n\n#[doc = \"Writer for register DCDCLPCTRL\"]\n\npub type W = crate::W<u32, super::DCDCLPCTRL>;\n\n#[doc = \"Register DCDCLPCTRL `reset()`'s with value 0x0300_0000\"]\n\nimpl crate::ResetValue for super::DCDCLPCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0300_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `LPCMPHYSSELEM234H`\"]\n\npub type LPCMPHYSSELEM234H_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `LPCMPHYSSELEM234H`\"]\n\npub struct LPCMPHYSSELEM234H_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LPCMPHYSSELEM234H_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/emu/dcdclpctrl.rs", "rank": 81, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register HFXOSTEADYSTATECTRL\"]\n\npub type R = crate::R<u32, super::HFXOSTEADYSTATECTRL>;\n\n#[doc = \"Writer for register HFXOSTEADYSTATECTRL\"]\n\npub type W = crate::W<u32, super::HFXOSTEADYSTATECTRL>;\n\n#[doc = \"Register HFXOSTEADYSTATECTRL `reset()`'s with value 0xa30b_4507\"]\n\nimpl crate::ResetValue for super::HFXOSTEADYSTATECTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xa30b_4507\n\n }\n\n}\n\n#[doc = \"Reader of field `IBTRIMXOCORE`\"]\n\npub type IBTRIMXOCORE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `IBTRIMXOCORE`\"]\n\npub struct IBTRIMXOCORE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IBTRIMXOCORE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/hfxosteadystatectrl.rs", "rank": 82, "score": 88.32631304784978 }, { "content": "#[doc = \"Reader of register ANACTRL\"]\n\npub type R = crate::R<u32, super::ANACTRL>;\n\n#[doc = \"Writer for register ANACTRL\"]\n\npub type W = crate::W<u32, super::ANACTRL>;\n\n#[doc = \"Register ANACTRL `reset()`'s with value 0x70\"]\n\nimpl crate::ResetValue for super::ANACTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x70\n\n }\n\n}\n\n#[doc = \"Reader of field `IREFPROG`\"]\n\npub type IREFPROG_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `IREFPROG`\"]\n\npub struct IREFPROG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IREFPROG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/csen/anactrl.rs", "rank": 83, "score": 88.32631304784977 }, { "content": "#[doc = \"Reader of register HFRCOCTRL\"]\n\npub type R = crate::R<u32, super::HFRCOCTRL>;\n\n#[doc = \"Writer for register HFRCOCTRL\"]\n\npub type W = crate::W<u32, super::HFRCOCTRL>;\n\n#[doc = \"Register HFRCOCTRL `reset()`'s with value 0xb148_1f7f\"]\n\nimpl crate::ResetValue for super::HFRCOCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xb148_1f7f\n\n }\n\n}\n\n#[doc = \"Reader of field `TUNING`\"]\n\npub type TUNING_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TUNING`\"]\n\npub struct TUNING_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TUNING_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/hfrcoctrl.rs", "rank": 84, "score": 87.55454164342054 }, { "content": "#[doc = \"Reader of register AUXHFRCOCTRL\"]\n\npub type R = crate::R<u32, super::AUXHFRCOCTRL>;\n\n#[doc = \"Writer for register AUXHFRCOCTRL\"]\n\npub type W = crate::W<u32, super::AUXHFRCOCTRL>;\n\n#[doc = \"Register AUXHFRCOCTRL `reset()`'s with value 0xb148_1f7f\"]\n\nimpl crate::ResetValue for super::AUXHFRCOCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xb148_1f7f\n\n }\n\n}\n\n#[doc = \"Reader of field `TUNING`\"]\n\npub type TUNING_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TUNING`\"]\n\npub struct TUNING_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TUNING_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/auxhfrcoctrl.rs", "rank": 85, "score": 87.55454164342054 }, { "content": "#[doc = \"Reader of register ST16_TCONFB\"]\n\npub type R = crate::R<u32, super::ST16_TCONFB>;\n\n#[doc = \"Writer for register ST16_TCONFB\"]\n\npub type W = crate::W<u32, super::ST16_TCONFB>;\n\n#[doc = \"Register ST16_TCONFB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST16_TCONFB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st16_tconfb.rs", "rank": 86, "score": 86.79738440652459 }, { "content": "#[doc = \"Reader of register ST0_TCONFB\"]\n\npub type R = crate::R<u32, super::ST0_TCONFB>;\n\n#[doc = \"Writer for register ST0_TCONFB\"]\n\npub type W = crate::W<u32, super::ST0_TCONFB>;\n\n#[doc = \"Register ST0_TCONFB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST0_TCONFB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st0_tconfb.rs", "rank": 87, "score": 86.79738440652459 }, { "content": "#[doc = \"Reader of register ST30_TCONFB\"]\n\npub type R = crate::R<u32, super::ST30_TCONFB>;\n\n#[doc = \"Writer for register ST30_TCONFB\"]\n\npub type W = crate::W<u32, super::ST30_TCONFB>;\n\n#[doc = \"Register ST30_TCONFB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST30_TCONFB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st30_tconfb.rs", "rank": 88, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register CH0_CTRL\"]\n\npub type R = crate::R<u32, super::CH0_CTRL>;\n\n#[doc = \"Writer for register CH0_CTRL\"]\n\npub type W = crate::W<u32, super::CH0_CTRL>;\n\n#[doc = \"Register CH0_CTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CH0_CTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SIGSEL`\"]\n\npub type SIGSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SIGSEL`\"]\n\npub struct SIGSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SIGSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prs/ch0_ctrl.rs", "rank": 89, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST13_TCONFA\"]\n\npub type R = crate::R<u32, super::ST13_TCONFA>;\n\n#[doc = \"Writer for register ST13_TCONFA\"]\n\npub type W = crate::W<u32, super::ST13_TCONFA>;\n\n#[doc = \"Register ST13_TCONFA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST13_TCONFA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st13_tconfa.rs", "rank": 90, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST12_TCONFA\"]\n\npub type R = crate::R<u32, super::ST12_TCONFA>;\n\n#[doc = \"Writer for register ST12_TCONFA\"]\n\npub type W = crate::W<u32, super::ST12_TCONFA>;\n\n#[doc = \"Register ST12_TCONFA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST12_TCONFA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st12_tconfa.rs", "rank": 91, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST3_TCONFB\"]\n\npub type R = crate::R<u32, super::ST3_TCONFB>;\n\n#[doc = \"Writer for register ST3_TCONFB\"]\n\npub type W = crate::W<u32, super::ST3_TCONFB>;\n\n#[doc = \"Register ST3_TCONFB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST3_TCONFB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st3_tconfb.rs", "rank": 92, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST16_TCONFA\"]\n\npub type R = crate::R<u32, super::ST16_TCONFA>;\n\n#[doc = \"Writer for register ST16_TCONFA\"]\n\npub type W = crate::W<u32, super::ST16_TCONFA>;\n\n#[doc = \"Register ST16_TCONFA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST16_TCONFA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st16_tconfa.rs", "rank": 93, "score": 86.79738440652459 }, { "content": "#[doc = \"Reader of register ST4_TCONFB\"]\n\npub type R = crate::R<u32, super::ST4_TCONFB>;\n\n#[doc = \"Writer for register ST4_TCONFB\"]\n\npub type W = crate::W<u32, super::ST4_TCONFB>;\n\n#[doc = \"Register ST4_TCONFB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST4_TCONFB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st4_tconfb.rs", "rank": 94, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST27_TCONFA\"]\n\npub type R = crate::R<u32, super::ST27_TCONFA>;\n\n#[doc = \"Writer for register ST27_TCONFA\"]\n\npub type W = crate::W<u32, super::ST27_TCONFA>;\n\n#[doc = \"Register ST27_TCONFA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST27_TCONFA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st27_tconfa.rs", "rank": 95, "score": 86.79738440652459 }, { "content": "#[doc = \"Reader of register CH4_CTRL\"]\n\npub type R = crate::R<u32, super::CH4_CTRL>;\n\n#[doc = \"Writer for register CH4_CTRL\"]\n\npub type W = crate::W<u32, super::CH4_CTRL>;\n\n#[doc = \"Register CH4_CTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CH4_CTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SIGSEL`\"]\n\npub type SIGSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SIGSEL`\"]\n\npub struct SIGSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SIGSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prs/ch4_ctrl.rs", "rank": 96, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST28_TCONFB\"]\n\npub type R = crate::R<u32, super::ST28_TCONFB>;\n\n#[doc = \"Writer for register ST28_TCONFB\"]\n\npub type W = crate::W<u32, super::ST28_TCONFB>;\n\n#[doc = \"Register ST28_TCONFB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST28_TCONFB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st28_tconfb.rs", "rank": 97, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register CH7_TIMING\"]\n\npub type R = crate::R<u32, super::CH7_TIMING>;\n\n#[doc = \"Writer for register CH7_TIMING\"]\n\npub type W = crate::W<u32, super::CH7_TIMING>;\n\n#[doc = \"Register CH7_TIMING `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CH7_TIMING {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `EXTIME`\"]\n\npub type EXTIME_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `EXTIME`\"]\n\npub struct EXTIME_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EXTIME_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/ch7_timing.rs", "rank": 98, "score": 86.79738440652457 }, { "content": "#[doc = \"Reader of register ST10_TCONFA\"]\n\npub type R = crate::R<u32, super::ST10_TCONFA>;\n\n#[doc = \"Writer for register ST10_TCONFA\"]\n\npub type W = crate::W<u32, super::ST10_TCONFA>;\n\n#[doc = \"Register ST10_TCONFA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ST10_TCONFA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP`\"]\n\npub type COMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `COMP`\"]\n\npub struct COMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lesense/st10_tconfa.rs", "rank": 99, "score": 86.79738440652457 } ]
Rust
src/uu/sort/src/merge.rs
oconnor663/coreutils
c7930a63f7221a6b0e3791c15529e6b10de07ef2
use std::{ cmp::Ordering, ffi::OsStr, io::{Read, Write}, iter, rc::Rc, sync::mpsc::{channel, sync_channel, Receiver, Sender, SyncSender}, thread, }; use compare::Compare; use crate::{ chunks::{self, Chunk}, compare_by, open, GlobalSettings, }; pub fn merge<'a>(files: &[impl AsRef<OsStr>], settings: &'a GlobalSettings) -> FileMerger<'a> { let (request_sender, request_receiver) = channel(); let mut reader_files = Vec::with_capacity(files.len()); let mut loaded_receivers = Vec::with_capacity(files.len()); for (file_number, file) in files.iter().filter_map(open).enumerate() { let (sender, receiver) = sync_channel(2); loaded_receivers.push(receiver); reader_files.push(ReaderFile { file, sender: Some(sender), carry_over: vec![], }); request_sender .send((file_number, Chunk::new(vec![0; 8 * 1024], |_| Vec::new()))) .unwrap(); } for file_number in 0..reader_files.len() { request_sender .send((file_number, Chunk::new(vec![0; 8 * 1024], |_| Vec::new()))) .unwrap(); } thread::spawn({ let settings = settings.clone(); move || { reader( request_receiver, &mut reader_files, &settings, if settings.zero_terminated { b'\0' } else { b'\n' }, ) } }); let mut mergeable_files = vec![]; for (file_number, receiver) in loaded_receivers.into_iter().enumerate() { mergeable_files.push(MergeableFile { current_chunk: Rc::new(receiver.recv().unwrap()), file_number, line_idx: 0, receiver, }) } FileMerger { heap: binary_heap_plus::BinaryHeap::from_vec_cmp( mergeable_files, FileComparator { settings }, ), request_sender, prev: None, } } struct ReaderFile { file: Box<dyn Read + Send>, sender: Option<SyncSender<Chunk>>, carry_over: Vec<u8>, } fn reader( recycled_receiver: Receiver<(usize, Chunk)>, files: &mut [ReaderFile], settings: &GlobalSettings, separator: u8, ) { for (file_idx, chunk) in recycled_receiver.iter() { let (recycled_lines, recycled_buffer) = chunk.recycle(); let ReaderFile { file, sender, carry_over, } = &mut files[file_idx]; chunks::read( sender, recycled_buffer, None, carry_over, file, &mut iter::empty(), separator, recycled_lines, settings, ); } } pub struct MergeableFile { current_chunk: Rc<Chunk>, line_idx: usize, receiver: Receiver<Chunk>, file_number: usize, } struct PreviousLine { chunk: Rc<Chunk>, line_idx: usize, file_number: usize, } pub struct FileMerger<'a> { heap: binary_heap_plus::BinaryHeap<MergeableFile, FileComparator<'a>>, request_sender: Sender<(usize, Chunk)>, prev: Option<PreviousLine>, } impl<'a> FileMerger<'a> { pub fn write_all(&mut self, settings: &GlobalSettings) { let mut out = settings.out_writer(); while self.write_next(settings, &mut out) {} } fn write_next(&mut self, settings: &GlobalSettings, out: &mut impl Write) -> bool { if let Some(file) = self.heap.peek() { let prev = self.prev.replace(PreviousLine { chunk: file.current_chunk.clone(), line_idx: file.line_idx, file_number: file.file_number, }); file.current_chunk.with_lines(|lines| { let current_line = &lines[file.line_idx]; if settings.unique { if let Some(prev) = &prev { let cmp = compare_by( &prev.chunk.borrow_lines()[prev.line_idx], current_line, settings, ); if cmp == Ordering::Equal { return; } } } current_line.print(out, settings); }); let was_last_line_for_file = file.current_chunk.borrow_lines().len() == file.line_idx + 1; if was_last_line_for_file { if let Ok(next_chunk) = file.receiver.recv() { let mut file = self.heap.peek_mut().unwrap(); file.current_chunk = Rc::new(next_chunk); file.line_idx = 0; } else { self.heap.pop(); } } else { self.heap.peek_mut().unwrap().line_idx += 1; } if let Some(prev) = prev { if let Ok(prev_chunk) = Rc::try_unwrap(prev.chunk) { self.request_sender .send((prev.file_number, prev_chunk)) .ok(); } } } !self.heap.is_empty() } } struct FileComparator<'a> { settings: &'a GlobalSettings, } impl<'a> Compare<MergeableFile> for FileComparator<'a> { fn compare(&self, a: &MergeableFile, b: &MergeableFile) -> Ordering { let mut cmp = compare_by( &a.current_chunk.borrow_lines()[a.line_idx], &b.current_chunk.borrow_lines()[b.line_idx], self.settings, ); if cmp == Ordering::Equal { cmp = a.file_number.cmp(&b.file_number); } cmp.reverse() } }
use std::{ cmp::Ordering, ffi::OsStr, io::{Read, Write}, iter, rc::Rc, sync::mpsc::{channel, sync_channel, Receiver, Sender, SyncSender}, thread, }; use compare::Compare; use crate::{ chunks::{self, Chunk}, compare_by, open, GlobalSettings, }; pub fn merge<'a>(files: &[impl AsRef<OsStr>], settings: &'a GlobalSettings) -> FileMerger<'a> { let (request_sender, request_receiver) = channel(); let mut reader_files = Vec::with_capacity(files.len()); let mut loaded_receivers = Vec::with_capacity(files.len()); for (file_number, file) in files.iter().filter_map(open).enumerate() { let (sender, receiver) = sync_channel(2); loaded_receivers.push(receiver); reader_files.push(ReaderFile { file, sender: Some(sender), carry_over: vec![], }); request_sender .send((file_number, Chunk::new(vec![0; 8 * 1024], |_| Vec::new()))) .unwrap(); } for file_number in 0..reader_files.len() { request_sender .send((file_number, Chunk::new(vec![0; 8 * 1024], |_| Vec::new()))) .unwrap(); } thread::spawn({ let settings = settings.clone(); move || { reader( request_receiver, &mut reader_files, &settings, if settings.zero_terminated { b'\0' } else { b'\n' }, ) } }); let mut mergeable_files = vec![]; for (file_number, receiver) in loaded_receivers.into_iter().enumerate() { mergeable_files.push(MergeableFile { current_chunk: Rc::new(receiver.recv().unwrap()), file_number, line_idx: 0, receiver, }) } FileMerger { heap: binary_heap_plus::BinaryHeap::from_vec_cmp( mergeable_files, FileComparator { settings }, ), request_sender, prev: None, } } struct ReaderFile { file: Box<dyn Read + Send>, sender: Option<SyncSender<Chunk>>, carry_over: Vec<u8>, } fn reader( recycled_receiver: Receiver<(usize, Chunk)>, files: &mut [ReaderFile], settings: &GlobalSettings, separator: u8, ) { for (file_idx, chunk) in recycled_receiver.iter() { let (recycled_lines, recycled_buffer) = chunk.recycle(); let ReaderFile { file, sender, carry_over, } = &mut files[file_idx]; chunks::read( sender, recycled_buffer, None, carry_over, file, &mut iter::empty(), separator, recycled_lines, settings, ); } } pub struct MergeableFile { current_chunk: Rc<Chunk>, line_idx: usize, receiver: Receiver<Chunk>, file_number: usize, } struct PreviousLine { chunk: Rc<Chunk>, line_idx: usize, file_number: usize, } pub struct FileMerger<'a> { heap: binary_heap_plus::BinaryHeap<MergeableFile, FileComparator<'a>>, request_sender: Sender<(usize, Chunk)>, prev: Option<PreviousLine>, } impl<'a> FileMerger<'a> { pub fn write_all(&mut self, settings: &GlobalSettings) { let mut out = settings.out_writer(); while self.write_next(settings, &mut out) {} } fn write_next(&mut self, settings: &GlobalSettings, out: &mut impl Write) -> bool { if let Some(file) = self.heap.peek() { let prev = self.prev.replace(PreviousLine { chunk: file.current_chunk.clone(), line_idx: file.line_idx, file_number: file.file_number, }); file.current_chunk.with_lines(|lines| { let current_line = &lines[file.line_idx]; if settings.unique { if let Some(prev) = &prev { let cmp = compare_by( &prev.chunk.borrow_lines()[prev.line_idx], current_line, settings, ); if cmp == Ordering::Equal { return; } } } current_line.print(out, settings); }); let was_last_line_for_file = file.current_chunk.borrow_lines().len() == file.line_idx + 1; if was_last_line_for_file { if let Ok(next_chunk) = file.receiver.recv() { let mut file = self.heap.peek_mut().unwrap(); file.current_chunk = Rc::new(next_chunk); file.line_idx = 0; } else { self.heap.pop(); } } else { self.heap.peek_mut().unwrap().line_idx += 1; } if let Some(prev) = prev { if let Ok(prev_chunk) = Rc::try_unwrap(prev.chunk) { self.request_sender .send((prev.file_number, prev_chunk)) .ok(); } } } !self.heap.is_empty() } } struct FileComparator<'a> { settings: &'a GlobalSettings, } impl<'a> Compare<MergeableFile> for FileComparator<'a> { fn compare(&self, a: &MergeableFile, b: &MergeableFile) -> Ordering { let mut cmp =
; if cmp == Ordering::Equal { cmp = a.file_number.cmp(&b.file_number); } cmp.reverse() } }
compare_by( &a.current_chunk.borrow_lines()[a.line_idx], &b.current_chunk.borrow_lines()[b.line_idx], self.settings, )
call_expression
[ { "content": "/// Write the lines in `chunk` to `file`, separated by `separator`.\n\nfn write(chunk: &mut Chunk, file: &Path, separator: u8) {\n\n chunk.with_lines_mut(|lines| {\n\n // Write the lines to the file\n\n let file = crash_if_err!(1, OpenOptions::new().create(true).write(true).open(file));\n\n let mut writer = BufWriter::new(file);\n\n for s in lines.iter() {\n\n crash_if_err!(1, writer.write_all(s.line.as_bytes()));\n\n crash_if_err!(1, writer.write_all(&[separator]));\n\n }\n\n });\n\n}\n", "file_path": "src/uu/sort/src/ext_sort.rs", "rank": 0, "score": 463736.336596373 }, { "content": "/// Sort files by using auxiliary files for storing intermediate chunks (if needed), and output the result.\n\npub fn ext_sort(files: &mut impl Iterator<Item = Box<dyn Read + Send>>, settings: &GlobalSettings) {\n\n let tmp_dir = crash_if_err!(1, tempfile::Builder::new().prefix(\"uutils_sort\").tempdir_in(&settings.tmp_dir));\n\n let (sorted_sender, sorted_receiver) = std::sync::mpsc::sync_channel(1);\n\n let (recycled_sender, recycled_receiver) = std::sync::mpsc::sync_channel(1);\n\n thread::spawn({\n\n let settings = settings.clone();\n\n move || sorter(recycled_receiver, sorted_sender, settings)\n\n });\n\n let read_result = reader_writer(\n\n files,\n\n &tmp_dir,\n\n if settings.zero_terminated {\n\n b'\\0'\n\n } else {\n\n b'\\n'\n\n },\n\n // Heuristically chosen: Dividing by 10 seems to keep our memory usage roughly\n\n // around settings.buffer_size as a whole.\n\n settings.buffer_size / 10,\n\n settings.clone(),\n", "file_path": "src/uu/sort/src/ext_sort.rs", "rank": 1, "score": 435396.8923203978 }, { "content": "#[allow(unused_assignments)]\n\nfn fold_file<T: Read>(mut file: BufReader<T>, spaces: bool, width: usize) {\n\n let mut line = String::new();\n\n let mut output = String::new();\n\n let mut col_count = 0;\n\n let mut last_space = None;\n\n\n\n /// Print the output line, resetting the column and character counts.\n\n ///\n\n /// If `spaces` is `true`, print the output line up to the last\n\n /// encountered whitespace character (inclusive) and set the remaining\n\n /// characters as the start of the next line.\n\n macro_rules! emit_output {\n\n () => {\n\n let consume = match last_space {\n\n Some(i) => i + 1,\n\n None => output.len(),\n\n };\n\n\n\n println!(\"{}\", &output[..consume]);\n\n output.replace_range(..consume, \"\");\n", "file_path": "src/uu/fold/src/fold.rs", "rank": 2, "score": 374361.81501484325 }, { "content": "/// Fold `file` to fit `width` (number of columns), counting all characters as\n\n/// one column.\n\n///\n\n/// This function handles folding for the `-b`/`--bytes` option, counting\n\n/// tab, backspace, and carriage return as occupying one column, identically\n\n/// to all other characters in the stream.\n\n///\n\n/// If `spaces` is `true`, attempt to break lines at whitespace boundaries.\n\nfn fold_file_bytewise<T: Read>(mut file: BufReader<T>, spaces: bool, width: usize) {\n\n let mut line = String::new();\n\n\n\n loop {\n\n if let Ok(0) = file.read_line(&mut line) {\n\n break;\n\n }\n\n\n\n if line == \"\\n\" {\n\n println!();\n\n line.truncate(0);\n\n continue;\n\n }\n\n\n\n let len = line.len();\n\n let mut i = 0;\n\n\n\n while i < len {\n\n let width = if len - i >= width { width } else { len - i };\n\n let slice = {\n", "file_path": "src/uu/fold/src/fold.rs", "rank": 3, "score": 368868.5590363317 }, { "content": "fn rbuf_n_lines(input: &mut impl std::io::BufRead, n: usize, zero: bool) -> std::io::Result<()> {\n\n if n == 0 {\n\n return Ok(());\n\n }\n\n let stdout = std::io::stdout();\n\n let mut stdout = stdout.lock();\n\n let mut lines = 0usize;\n\n split::walk_lines(input, zero, |e| match e {\n\n split::Event::Data(dat) => {\n\n stdout.write_all(dat)?;\n\n Ok(true)\n\n }\n\n split::Event::Line => {\n\n lines += 1;\n\n if lines == n {\n\n Ok(false)\n\n } else {\n\n Ok(true)\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/uu/head/src/head.rs", "rank": 4, "score": 360976.1649990316 }, { "content": "pub fn vec_of_size(n: usize) -> Vec<u8> {\n\n let mut result = Vec::new();\n\n for _ in 0..n {\n\n result.push('a' as u8);\n\n }\n\n assert_eq!(result.len(), n);\n\n result\n\n}\n\n\n\n/// Sanity checks for test utils\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_code_is() {\n\n let res = CmdResult {\n\n tmpd: None,\n\n code: Some(32),\n\n success: false,\n", "file_path": "tests/common/util.rs", "rank": 5, "score": 354467.20046951185 }, { "content": "// parse_options loads the options into the settings, returning an array of\n\n// error messages.\n\npub fn parse_options(settings: &mut crate::Settings, opts: &clap::ArgMatches) -> Vec<String> {\n\n // This vector holds error messages encountered.\n\n let mut errs: Vec<String> = vec![];\n\n settings.renumber = !opts.is_present(options::NO_RENUMBER);\n\n match opts.value_of(options::NUMER_SEPARATOR) {\n\n None => {}\n\n Some(val) => {\n\n settings.number_separator = val.to_owned();\n\n }\n\n }\n\n match opts.value_of(options::NUMBER_FORMAT) {\n\n None => {}\n\n Some(val) => match val {\n\n \"ln\" => {\n\n settings.number_format = crate::NumberFormat::Left;\n\n }\n\n \"rn\" => {\n\n settings.number_format = crate::NumberFormat::Right;\n\n }\n\n \"rz\" => {\n", "file_path": "src/uu/nl/src/helper.rs", "rank": 6, "score": 340043.7389063504 }, { "content": "// Iterate 'args' and delete the first occurrence\n\n// of a prefix '-' if it's associated with MODE\n\n// e.g. \"chmod -v -xw -R FILE\" -> \"chmod -v xw -R FILE\"\n\npub fn strip_minus_from_mode(args: &mut Vec<String>) -> bool {\n\n for arg in args {\n\n if arg.starts_with('-') {\n\n if let Some(second) = arg.chars().nth(1) {\n\n match second {\n\n 'r' | 'w' | 'x' | 'X' | 's' | 't' | 'u' | 'g' | 'o' | '0'..='7' => {\n\n // TODO: use strip_prefix() once minimum rust version reaches 1.45.0\n\n *arg = arg[1..arg.len()].to_string();\n\n return true;\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/uu/chmod/src/chmod.rs", "rank": 7, "score": 337236.52760587854 }, { "content": "/// Iterate over bytes in the file, in reverse, until we find the\n\n/// `num_delimiters` instance of `delimiter`. The `file` is left seek'd to the\n\n/// position just after that delimiter.\n\nfn backwards_thru_file(file: &mut File, num_delimiters: usize, delimiter: u8) {\n\n // This variable counts the number of delimiters found in the file\n\n // so far (reading from the end of the file toward the beginning).\n\n let mut counter = 0;\n\n\n\n for (block_idx, slice) in ReverseChunks::new(file).enumerate() {\n\n // Iterate over each byte in the slice in reverse order.\n\n let mut iter = slice.iter().enumerate().rev();\n\n\n\n // Ignore a trailing newline in the last block, if there is one.\n\n if block_idx == 0 {\n\n if let Some(c) = slice.last() {\n\n if *c == delimiter {\n\n iter.next();\n\n }\n\n }\n\n }\n\n\n\n // For each byte, increment the count of the number of\n\n // delimiters found. If we have found more than the specified\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 8, "score": 325477.76970585005 }, { "content": "/// The function that is executed on the sorter thread.\n\nfn sorter(receiver: Receiver<Chunk>, sender: SyncSender<Chunk>, settings: GlobalSettings) {\n\n while let Ok(mut payload) = receiver.recv() {\n\n payload.with_lines_mut(|lines| sort_by(lines, &settings));\n\n sender.send(payload).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/uu/sort/src/ext_sort.rs", "rank": 9, "score": 322453.08269172086 }, { "content": "fn rbuf_but_last_n_bytes(input: &mut impl std::io::BufRead, n: usize) -> std::io::Result<()> {\n\n if n == 0 {\n\n //prints everything\n\n return rbuf_n_bytes(input, std::usize::MAX);\n\n }\n\n let stdout = std::io::stdout();\n\n let mut stdout = stdout.lock();\n\n\n\n let mut ringbuf = vec![0u8; n];\n\n\n\n // first we fill the ring buffer\n\n if let Err(e) = input.read_exact(&mut ringbuf) {\n\n if e.kind() == ErrorKind::UnexpectedEof {\n\n return Ok(());\n\n } else {\n\n return Err(e);\n\n }\n\n }\n\n let mut buffer = [0u8; BUF_SIZE];\n\n loop {\n", "file_path": "src/uu/head/src/head.rs", "rank": 10, "score": 317991.25494541845 }, { "content": "// nl implements the main functionality for an individual buffer.\n\nfn nl<T: Read>(reader: &mut BufReader<T>, settings: &Settings) {\n\n let regexp: regex::Regex = regex::Regex::new(r\".?\").unwrap();\n\n let mut line_no = settings.starting_line_number;\n\n // The current line number's width as a string. Using to_string is inefficient\n\n // but since we only do it once, it should not hurt.\n\n let mut line_no_width = line_no.to_string().len();\n\n let line_no_width_initial = line_no_width;\n\n // Stores the smallest integer with one more digit than line_no, so that\n\n // when line_no >= line_no_threshold, we need to use one more digit.\n\n let mut line_no_threshold = 10u64.pow(line_no_width as u32);\n\n let mut empty_line_count: u64 = 0;\n\n let fill_char = match settings.number_format {\n\n NumberFormat::RightZero => '0',\n\n _ => ' ',\n\n };\n\n // Initially, we use the body's line counting settings\n\n let mut regex_filter = match settings.body_numbering {\n\n NumberingStyle::NumberForRegularExpression(ref re) => re,\n\n _ => &regexp,\n\n };\n", "file_path": "src/uu/nl/src/nl.rs", "rank": 11, "score": 317620.39112958836 }, { "content": "fn write_tab_to_end<W: Write>(mut in_buf: &[u8], writer: &mut W) -> usize {\n\n let mut count = 0;\n\n loop {\n\n match in_buf.iter().position(|c| *c == b'\\n' || *c == b'\\t') {\n\n Some(p) => {\n\n writer.write_all(&in_buf[..p]).unwrap();\n\n if in_buf[p] == b'\\n' {\n\n return count + p + 1;\n\n } else {\n\n writer.write_all(b\"^I\").unwrap();\n\n in_buf = &in_buf[p + 1..];\n\n count += p + 1;\n\n }\n\n }\n\n None => {\n\n writer.write_all(in_buf).unwrap();\n\n return 0;\n\n }\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/uu/cat/src/cat.rs", "rank": 12, "score": 316689.7966169399 }, { "content": "fn unbounded_tail<T: Read>(reader: &mut BufReader<T>, settings: &Settings) {\n\n // Read through each line/char and store them in a ringbuffer that always\n\n // contains count lines/chars. When reaching the end of file, output the\n\n // data in the ringbuf.\n\n match settings.mode {\n\n FilterMode::Lines(count, _) => {\n\n for line in unbounded_tail_collect(reader.lines(), count, settings.beginning) {\n\n println!(\"{}\", line);\n\n }\n\n }\n\n FilterMode::Bytes(count) => {\n\n for byte in unbounded_tail_collect(reader.bytes(), count, settings.beginning) {\n\n let mut stdout = stdout();\n\n print_byte(&mut stdout, byte);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 13, "score": 313141.75132625 }, { "content": "fn write_nonprint_to_end<W: Write>(in_buf: &[u8], writer: &mut W, tab: &[u8]) -> usize {\n\n let mut count = 0;\n\n\n\n for byte in in_buf.iter().map(|c| *c) {\n\n if byte == b'\\n' {\n\n break;\n\n }\n\n match byte {\n\n 9 => writer.write_all(tab),\n\n 0..=8 | 10..=31 => writer.write_all(&[b'^', byte + 64]),\n\n 32..=126 => writer.write_all(&[byte]),\n\n 127 => writer.write_all(&[b'^', byte - 64]),\n\n 128..=159 => writer.write_all(&[b'M', b'-', b'^', byte - 64]),\n\n 160..=254 => writer.write_all(&[b'M', b'-', byte - 128]),\n\n _ => writer.write_all(&[b'M', b'-', b'^', 63]),\n\n }\n\n .unwrap();\n\n count += 1;\n\n }\n\n if count != in_buf.len() {\n", "file_path": "src/uu/cat/src/cat.rs", "rank": 14, "score": 307696.1401131128 }, { "content": "// from cat.rs\n\nfn open(path: impl AsRef<OsStr>) -> Option<Box<dyn Read + Send>> {\n\n let path = path.as_ref();\n\n if path == \"-\" {\n\n let stdin = stdin();\n\n return Some(Box::new(stdin) as Box<dyn Read + Send>);\n\n }\n\n\n\n match File::open(Path::new(path)) {\n\n Ok(f) => Some(Box::new(f) as Box<dyn Read + Send>),\n\n Err(e) => {\n\n show_error!(\"{0:?}: {1}\", path, e.to_string());\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n", "file_path": "src/uu/sort/src/sort.rs", "rank": 15, "score": 306457.18309681764 }, { "content": "fn print_escaped(input: &str, mut output: impl Write) -> io::Result<bool> {\n\n let mut should_stop = false;\n\n\n\n let mut buffer = ['\\\\'; 2];\n\n\n\n let mut iter = input.chars().peekable();\n\n while let Some(mut c) = iter.next() {\n\n let mut start = 1;\n\n\n\n if c == '\\\\' {\n\n if let Some(next) = iter.next() {\n\n c = match next {\n\n '\\\\' => '\\\\',\n\n 'a' => '\\x07',\n\n 'b' => '\\x08',\n\n 'c' => {\n\n should_stop = true;\n\n break;\n\n }\n\n 'e' => '\\x1b',\n", "file_path": "src/uu/echo/src/echo.rs", "rank": 16, "score": 306083.4357336126 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn read(\n\n sender_option: &mut Option<SyncSender<Chunk>>,\n\n mut buffer: Vec<u8>,\n\n max_buffer_size: Option<usize>,\n\n carry_over: &mut Vec<u8>,\n\n file: &mut Box<dyn Read + Send>,\n\n next_files: &mut impl Iterator<Item = Box<dyn Read + Send>>,\n\n separator: u8,\n\n lines: Vec<Line<'static>>,\n\n settings: &GlobalSettings,\n\n) {\n\n assert!(lines.is_empty());\n\n if let Some(sender) = sender_option {\n\n if buffer.len() < carry_over.len() {\n\n buffer.resize(carry_over.len() + 10 * 1024, 0);\n\n }\n\n buffer[..carry_over.len()].copy_from_slice(&carry_over);\n\n let (read, should_continue) = read_to_buffer(\n\n file,\n\n next_files,\n", "file_path": "src/uu/sort/src/chunks.rs", "rank": 17, "score": 303867.51529090496 }, { "content": "// write***_to_end methods\n\n// Write all symbols till end of line or end of buffer is reached\n\n// Return the (number of written symbols + 1) or 0 if the end of buffer is reached\n\nfn write_to_end<W: Write>(in_buf: &[u8], writer: &mut W) -> usize {\n\n match in_buf.iter().position(|c| *c == b'\\n') {\n\n Some(p) => {\n\n writer.write_all(&in_buf[..p]).unwrap();\n\n p + 1\n\n }\n\n None => {\n\n writer.write_all(in_buf).unwrap();\n\n 0\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/uu/cat/src/cat.rs", "rank": 18, "score": 303131.96625091165 }, { "content": "fn find_seps(data: &mut Vec<&[u8]>, sep: u8) {\n\n // need to use for loop so we don't borrow the vector as we modify it in place\n\n // basic idea:\n\n // * We don't care about the order of the result. This lets us slice the slices\n\n // without making a new vector.\n\n // * Starting from the end of the vector, we examine each element.\n\n // * If that element contains the separator, we remove it from the vector,\n\n // and then sub-slice it into slices that do not contain the separator.\n\n // * We maintain the invariant throughout that each element in the vector past\n\n // the ith element does not have any separators remaining.\n\n for i in (0..data.len()).rev() {\n\n if data[i].contains(&sep) {\n\n let this = data.swap_remove(i);\n\n let mut p = 0;\n\n let mut i = 1;\n\n loop {\n\n if i == this.len() {\n\n break;\n\n }\n\n\n", "file_path": "src/uu/shuf/src/shuf.rs", "rank": 19, "score": 299744.5410561278 }, { "content": "fn bsd_sum(mut reader: Box<dyn Read>) -> (usize, u16) {\n\n let mut buf = [0; 1024];\n\n let mut blocks_read = 0;\n\n let mut checksum: u16 = 0;\n\n loop {\n\n match reader.read(&mut buf) {\n\n Ok(n) if n != 0 => {\n\n blocks_read += 1;\n\n for &byte in buf[..n].iter() {\n\n checksum = (checksum >> 1) + ((checksum & 1) << 15);\n\n checksum = checksum.wrapping_add(u16::from(byte));\n\n }\n\n }\n\n _ => break,\n\n }\n\n }\n\n\n\n (blocks_read, checksum)\n\n}\n\n\n", "file_path": "src/uu/sum/src/sum.rs", "rank": 20, "score": 299256.4064270682 }, { "content": "fn sysv_sum(mut reader: Box<dyn Read>) -> (usize, u16) {\n\n let mut buf = [0; 512];\n\n let mut blocks_read = 0;\n\n let mut ret = 0u32;\n\n\n\n loop {\n\n match reader.read(&mut buf) {\n\n Ok(n) if n != 0 => {\n\n blocks_read += 1;\n\n for &byte in buf[..n].iter() {\n\n ret = ret.wrapping_add(u32::from(byte));\n\n }\n\n }\n\n _ => break,\n\n }\n\n }\n\n\n\n ret = (ret & 0xffff) + (ret >> 16);\n\n ret = (ret & 0xffff) + (ret >> 16);\n\n\n\n (blocks_read, ret as u16)\n\n}\n\n\n", "file_path": "src/uu/sum/src/sum.rs", "rank": 21, "score": 299256.4064270682 }, { "content": "fn read_input_file(filename: &str) -> Vec<u8> {\n\n let mut file = BufReader::new(if filename == \"-\" {\n\n Box::new(stdin()) as Box<dyn Read>\n\n } else {\n\n match File::open(filename) {\n\n Ok(f) => Box::new(f) as Box<dyn Read>,\n\n Err(e) => crash!(1, \"failed to open '{}': {}\", filename, e),\n\n }\n\n });\n\n\n\n let mut data = Vec::new();\n\n if let Err(e) = file.read_to_end(&mut data) {\n\n crash!(1, \"failed reading '{}': {}\", filename, e)\n\n };\n\n\n\n data\n\n}\n\n\n", "file_path": "src/uu/shuf/src/shuf.rs", "rank": 22, "score": 295012.1737486804 }, { "content": "/// When tail'ing a file, we do not need to read the whole file from start to\n\n/// finish just to find the last n lines or bytes. Instead, we can seek to the\n\n/// end of the file, and then read the file \"backwards\" in blocks of size\n\n/// `BLOCK_SIZE` until we find the location of the first line/byte. This ends up\n\n/// being a nice performance win for very large files.\n\nfn bounded_tail(file: &mut File, settings: &Settings) {\n\n // Find the position in the file to start printing from.\n\n match settings.mode {\n\n FilterMode::Lines(count, delimiter) => {\n\n backwards_thru_file(file, count as usize, delimiter);\n\n }\n\n FilterMode::Bytes(count) => {\n\n file.seek(SeekFrom::End(-(count as i64))).unwrap();\n\n }\n\n }\n\n\n\n // Print the target section of the file.\n\n let stdout = stdout();\n\n let mut stdout = stdout.lock();\n\n std::io::copy(file, &mut stdout).unwrap();\n\n}\n\n\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 23, "score": 291096.50479642843 }, { "content": "pub fn base_conv_vec(src: &[u8], radix_src: u8, radix_dest: u8) -> Vec<u8> {\n\n let mut result = vec![0];\n\n for i in src {\n\n result = arrnum_int_mult(&result, radix_dest, radix_src);\n\n result = arrnum_int_add(&result, radix_dest, *i);\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/uu/printf/src/tokenize/num_format/formatters/base_conv/mod.rs", "rank": 24, "score": 285030.0520005617 }, { "content": "pub fn read_size(child: &mut Child, size: usize) -> String {\n\n let mut output = Vec::new();\n\n output.resize(size, 0);\n\n sleep(Duration::from_secs(1));\n\n child\n\n .stdout\n\n .as_mut()\n\n .unwrap()\n\n .read_exact(output.as_mut_slice())\n\n .unwrap();\n\n String::from_utf8(output).unwrap()\n\n}\n\n\n", "file_path": "tests/common/util.rs", "rank": 25, "score": 278310.5946913206 }, { "content": "fn show_line(out: &mut Stdout, sep: &[u8], dat: &[u8], before: bool) {\n\n if before {\n\n out.write_all(sep)\n\n .unwrap_or_else(|e| crash!(1, \"failed to write to stdout: {}\", e));\n\n }\n\n\n\n out.write_all(dat)\n\n .unwrap_or_else(|e| crash!(1, \"failed to write to stdout: {}\", e));\n\n\n\n if !before {\n\n out.write_all(sep)\n\n .unwrap_or_else(|e| crash!(1, \"failed to write to stdout: {}\", e));\n\n }\n\n}\n", "file_path": "src/uu/tac/src/tac.rs", "rank": 26, "score": 277800.2947707776 }, { "content": "fn is_seekable<T: Seek>(file: &mut T) -> bool {\n\n file.seek(SeekFrom::Current(0)).is_ok()\n\n}\n\n\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 27, "score": 275928.9640955951 }, { "content": "pub fn supports_pid_checks(_pid: self::Pid) -> bool {\n\n true\n\n}\n", "file_path": "src/uu/tail/src/platform/windows.rs", "rank": 28, "score": 274732.8736973067 }, { "content": "pub fn supports_pid_checks(pid: self::Pid) -> bool {\n\n true\n\n}\n", "file_path": "src/uu/tail/src/platform/redox.rs", "rank": 29, "score": 274732.8736973067 }, { "content": "pub fn supports_pid_checks(pid: self::Pid) -> bool {\n\n unsafe { !(libc::kill(pid, 0) != 0 && get_errno() == libc::ENOSYS) }\n\n}\n\n\n", "file_path": "src/uu/tail/src/platform/unix.rs", "rank": 30, "score": 274732.8736973067 }, { "content": "fn head_file(input: &mut std::fs::File, options: &HeadOptions) -> std::io::Result<()> {\n\n if options.all_but_last {\n\n head_backwards_file(input, options)\n\n } else {\n\n match options.mode {\n\n Modes::Bytes(n) => {\n\n rbuf_n_bytes(&mut std::io::BufReader::with_capacity(BUF_SIZE, input), n)\n\n }\n\n Modes::Lines(n) => rbuf_n_lines(\n\n &mut std::io::BufReader::with_capacity(BUF_SIZE, input),\n\n n,\n\n options.zeroed,\n\n ),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/uu/head/src/head.rs", "rank": 31, "score": 273479.3994456656 }, { "content": "fn tac(filenames: Vec<String>, before: bool, _: bool, separator: &str) -> i32 {\n\n let mut exit_code = 0;\n\n let mut out = stdout();\n\n let sbytes = separator.as_bytes();\n\n let slen = sbytes.len();\n\n\n\n for filename in &filenames {\n\n let mut file = BufReader::new(if filename == \"-\" {\n\n Box::new(stdin()) as Box<dyn Read>\n\n } else {\n\n let path = Path::new(filename);\n\n if path.is_dir() || path.metadata().is_err() {\n\n if path.is_dir() {\n\n show_error!(\"dir: read error: Invalid argument\");\n\n } else {\n\n show_error!(\n\n \"failed to open '{}' for reading: No such file or directory\",\n\n filename\n\n );\n\n }\n", "file_path": "src/uu/tac/src/tac.rs", "rank": 32, "score": 272886.96208166727 }, { "content": "fn follow<T: Read>(readers: &mut [BufReader<T>], filenames: &[String], settings: &Settings) {\n\n assert!(settings.follow);\n\n let mut last = readers.len() - 1;\n\n let mut read_some = false;\n\n let mut process = platform::ProcessChecker::new(settings.pid);\n\n\n\n loop {\n\n sleep(Duration::new(0, settings.sleep_msec * 1000));\n\n\n\n let pid_is_dead = !read_some && settings.pid != 0 && process.is_dead();\n\n read_some = false;\n\n\n\n for (i, reader) in readers.iter_mut().enumerate() {\n\n // Print all new content since the last pass\n\n loop {\n\n let mut datum = String::new();\n\n match reader.read_line(&mut datum) {\n\n Ok(0) => break,\n\n Ok(_) => {\n\n read_some = true;\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 33, "score": 272462.3337875926 }, { "content": "fn shuf_bytes(input: &mut Vec<&[u8]>, opts: Options) {\n\n let mut output = BufWriter::new(match opts.output {\n\n None => Box::new(stdout()) as Box<dyn Write>,\n\n Some(s) => match File::create(&s[..]) {\n\n Ok(f) => Box::new(f) as Box<dyn Write>,\n\n Err(e) => crash!(1, \"failed to open '{}' for writing: {}\", &s[..], e),\n\n },\n\n });\n\n\n\n let mut rng = match opts.random_source {\n\n Some(r) => WrappedRng::RngFile(rand::read::ReadRng::new(match File::open(&r[..]) {\n\n Ok(f) => f,\n\n Err(e) => crash!(1, \"failed to open random source '{}': {}\", &r[..], e),\n\n })),\n\n None => WrappedRng::RngDefault(rand::thread_rng()),\n\n };\n\n\n\n // we're generating a random usize. To keep things fair, we take this number mod ceil(log2(length+1))\n\n let mut len_mod = 1;\n\n let mut len = input.len();\n", "file_path": "src/uu/shuf/src/shuf.rs", "rank": 34, "score": 271959.4275069893 }, { "content": "#[inline(always)]\n\nfn is_word_separator(byte: u8) -> bool {\n\n byte == SPACE || byte == TAB || byte == CR || byte == SYN || byte == FF\n\n}\n\n\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct WordCount {\n\n pub bytes: usize,\n\n pub chars: usize,\n\n pub lines: usize,\n\n pub words: usize,\n\n pub max_line_length: usize,\n\n}\n\n\n\nimpl Add for WordCount {\n\n type Output = Self;\n\n\n\n fn add(self, other: Self) -> Self {\n\n Self {\n\n bytes: self.bytes + other.bytes,\n\n chars: self.chars + other.chars,\n", "file_path": "src/uu/wc/src/wordcount.rs", "rank": 35, "score": 271303.6101826773 }, { "content": "fn head_backwards_file(input: &mut std::fs::File, options: &HeadOptions) -> std::io::Result<()> {\n\n assert!(options.all_but_last);\n\n let size = input.seek(SeekFrom::End(0))?;\n\n let size = usize::try_from(size).unwrap();\n\n match options.mode {\n\n Modes::Bytes(n) => {\n\n if n >= size {\n\n return Ok(());\n\n } else {\n\n input.seek(SeekFrom::Start(0))?;\n\n rbuf_n_bytes(\n\n &mut std::io::BufReader::with_capacity(BUF_SIZE, input),\n\n size - n,\n\n )?;\n\n }\n\n }\n\n Modes::Lines(n) => {\n\n let mut buffer = [0u8; BUF_SIZE];\n\n let buffer = &mut buffer[..BUF_SIZE.min(size)];\n\n let mut i = 0usize;\n", "file_path": "src/uu/head/src/head.rs", "rank": 36, "score": 270336.29017677094 }, { "content": "pub fn arrnum_int_add(arrnum: &[u8], basenum: u8, base_ten_int_term: u8) -> Vec<u8> {\n\n let mut carry: u16 = u16::from(base_ten_int_term);\n\n let mut rem: u16;\n\n let mut new_amount: u16;\n\n let base: u16 = u16::from(basenum);\n\n\n\n let mut ret_rev: Vec<u8> = Vec::new();\n\n let mut it = arrnum.iter().rev();\n\n loop {\n\n let i = it.next();\n\n match i {\n\n Some(u) => {\n\n new_amount = u16::from(*u) + carry;\n\n rem = new_amount % base;\n\n carry = (new_amount - rem) / base;\n\n ret_rev.push(rem as u8)\n\n }\n\n None => {\n\n while carry != 0 {\n\n rem = carry % base;\n", "file_path": "src/uu/printf/src/tokenize/num_format/formatters/base_conv/mod.rs", "rank": 37, "score": 268542.42057455506 }, { "content": "pub fn arrnum_int_mult(arr_num: &[u8], basenum: u8, base_ten_int_fact: u8) -> Vec<u8> {\n\n let mut carry: u16 = 0;\n\n let mut rem: u16;\n\n let mut new_amount: u16;\n\n let fact: u16 = u16::from(base_ten_int_fact);\n\n let base: u16 = u16::from(basenum);\n\n\n\n let mut ret_rev: Vec<u8> = Vec::new();\n\n let mut it = arr_num.iter().rev();\n\n loop {\n\n let i = it.next();\n\n match i {\n\n Some(u) => {\n\n new_amount = (u16::from(*u) * fact) + carry;\n\n rem = new_amount % base;\n\n carry = (new_amount - rem) / base;\n\n ret_rev.push(rem as u8)\n\n }\n\n None => {\n\n while carry != 0 {\n", "file_path": "src/uu/printf/src/tokenize/num_format/formatters/base_conv/mod.rs", "rank": 39, "score": 266344.95455714635 }, { "content": "fn open(name: String, append: bool) -> Box<dyn Write> {\n\n let path = PathBuf::from(name.clone());\n\n let inner: Box<dyn Write> = {\n\n let mut options = OpenOptions::new();\n\n let mode = if append {\n\n options.append(true)\n\n } else {\n\n options.truncate(true)\n\n };\n\n match mode.write(true).create(true).open(path.as_path()) {\n\n Ok(file) => Box::new(file),\n\n Err(_) => Box::new(sink()),\n\n }\n\n };\n\n Box::new(NamedWriter { inner, name }) as Box<dyn Write>\n\n}\n\n\n", "file_path": "src/uu/tee/src/tee.rs", "rank": 40, "score": 265529.4084726728 }, { "content": "/// Evaluate a stack of Symbols, returning the result of the evaluation or\n\n/// an error message if evaluation failed.\n\nfn eval(stack: &mut Vec<Symbol>) -> Result<bool, String> {\n\n macro_rules! pop_literal {\n\n () => {\n\n match stack.pop() {\n\n Some(Symbol::Literal(s)) => s,\n\n _ => panic!(),\n\n }\n\n };\n\n }\n\n\n\n let s = stack.pop();\n\n\n\n match s {\n\n Some(Symbol::Bang) => {\n\n let result = eval(stack)?;\n\n\n\n Ok(!result)\n\n }\n\n Some(Symbol::StringOp(op)) => {\n\n let b = stack.pop();\n", "file_path": "src/uu/test/src/test.rs", "rank": 41, "score": 265237.94354595913 }, { "content": "pub fn factor_chunk(n_s: &mut [u64; CHUNK_SIZE], f_s: &mut [Factors; CHUNK_SIZE]) {\n\n for &(prime, inv, ceil) in P_INVS_U64 {\n\n if n_s[0] == 1 && n_s[1] == 1 && n_s[2] == 1 && n_s[3] == 1 {\n\n break;\n\n }\n\n\n\n for (num, factors) in n_s.iter_mut().zip(f_s.iter_mut()) {\n\n if *num == 1 {\n\n continue;\n\n }\n\n let mut k = 0;\n\n loop {\n\n let x = num.wrapping_mul(inv);\n\n\n\n // While prime divides num\n\n if x <= ceil {\n\n *num = x;\n\n k += 1;\n\n } else {\n\n if k > 0 {\n", "file_path": "src/uu/factor/src/table.rs", "rank": 42, "score": 264883.9686696038 }, { "content": "pub fn uumain(mut args: impl uucore::Args) -> i32 {\n\n // if there is no program name for some reason, default to \"hashsum\"\n\n let program = args.next().unwrap_or_else(|| OsString::from(NAME));\n\n let binary_name = Path::new(&program)\n\n .file_name()\n\n .unwrap_or_else(|| OsStr::new(NAME))\n\n .to_string_lossy();\n\n\n\n let args = iter::once(program.clone()).chain(args);\n\n\n\n // Default binary in Windows, text mode otherwise\n\n let binary_flag_default = cfg!(windows);\n\n\n\n let binary_help = format!(\n\n \"read in binary mode{}\",\n\n if binary_flag_default {\n\n \" (default)\"\n\n } else {\n\n \"\"\n\n }\n", "file_path": "src/uu/hashsum/src/hashsum.rs", "rank": 43, "score": 264717.3667321015 }, { "content": "#[inline]\n\nfn print_byte<T: Write>(stdout: &mut T, ch: u8) {\n\n if let Err(err) = stdout.write(&[ch]) {\n\n crash!(1, \"{}\", err);\n\n }\n\n}\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 44, "score": 261551.45372886042 }, { "content": "pub fn unsigned_to_arrnum(src: u16) -> Vec<u8> {\n\n let mut result: Vec<u8> = Vec::new();\n\n let mut src_tmp: u16 = src;\n\n while src_tmp > 0 {\n\n result.push((src_tmp % 10) as u8);\n\n src_tmp /= 10;\n\n }\n\n result.reverse();\n\n result\n\n}\n\n\n", "file_path": "src/uu/printf/src/tokenize/num_format/formatters/base_conv/mod.rs", "rank": 45, "score": 260712.71930236582 }, { "content": "pub fn wrap_write<W: Write>(mut writer: W, line_wrap: usize, res: String) -> io::Result<()> {\n\n use std::cmp::min;\n\n\n\n if line_wrap == 0 {\n\n return write!(writer, \"{}\", res);\n\n }\n\n\n\n let mut start = 0;\n\n while start < res.len() {\n\n let end = min(start + line_wrap, res.len());\n\n writeln!(writer, \"{}\", &res[start..end])?;\n\n start = end;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/uucore/src/lib/features/encoding.rs", "rank": 46, "score": 260063.3303744537 }, { "content": "#[inline(always)]\n\npub fn is_signal(num: usize) -> bool {\n\n // Named signals start at 1\n\n num <= ALL_SIGNALS.len()\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/signals.rs", "rank": 47, "score": 258953.36020443018 }, { "content": "/// Read file system list.\n\npub fn read_fs_list() -> Vec<MountInfo> {\n\n #[cfg(target_os = \"linux\")]\n\n {\n\n let (file_name, fobj) = File::open(LINUX_MOUNTINFO)\n\n .map(|f| (LINUX_MOUNTINFO, f))\n\n .or_else(|_| File::open(LINUX_MTAB).map(|f| (LINUX_MTAB, f)))\n\n .expect(\"failed to find mount list files\");\n\n let reader = BufReader::new(fobj);\n\n reader\n\n .lines()\n\n .filter_map(|line| line.ok())\n\n .filter_map(|line| {\n\n let raw_data = line.split_whitespace().collect::<Vec<&str>>();\n\n MountInfo::new(file_name, raw_data)\n\n })\n\n .collect::<Vec<_>>()\n\n }\n\n #[cfg(any(target_os = \"freebsd\", target_vendor = \"apple\"))]\n\n {\n\n let mut mptr: *mut Sstatfs = ptr::null_mut();\n", "file_path": "src/uucore/src/lib/features/fsext.rs", "rank": 48, "score": 257348.3778263182 }, { "content": "// args() ...\n\npub fn args() -> impl Iterator<Item = String> {\n\n wild::args()\n\n}\n\n\n", "file_path": "src/uucore/src/lib/lib.rs", "rank": 49, "score": 253006.54277331964 }, { "content": "/// Test if the program is running under CI\n\npub fn is_ci() -> bool {\n\n std::env::var(\"CI\")\n\n .unwrap_or(String::from(\"false\"))\n\n .eq_ignore_ascii_case(\"true\")\n\n}\n\n\n", "file_path": "tests/common/util.rs", "rank": 50, "score": 252662.33254956067 }, { "content": "/// Create an iterator over all but the last `n` elements of `iter`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// let data = [1, 2, 3, 4, 5];\n\n/// let n = 2;\n\n/// let mut iter = take_all_but(data.iter(), n);\n\n/// assert_eq!(Some(4), iter.next());\n\n/// assert_eq!(Some(5), iter.next());\n\n/// assert_eq!(None, iter.next());\n\n/// ```\n\npub fn take_all_but<I: Iterator>(iter: I, n: usize) -> TakeAllBut<I> {\n\n TakeAllBut::new(iter, n)\n\n}\n\n\n\n/// An iterator that only iterates over the last elements of another iterator.\n\npub struct TakeAllBut<I: Iterator> {\n\n iter: I,\n\n buf: RingBuffer<<I as Iterator>::Item>,\n\n}\n\n\n\nimpl<I: Iterator> TakeAllBut<I> {\n\n pub fn new(mut iter: I, n: usize) -> TakeAllBut<I> {\n\n // Create a new ring buffer and fill it up.\n\n //\n\n // If there are fewer than `n` elements in `iter`, then we\n\n // exhaust the iterator so that whenever `TakeAllBut::next()` is\n\n // called, it will return `None`, as expected.\n\n let mut buf = RingBuffer::new(n);\n\n for _ in 0..n {\n\n let value = match iter.next() {\n", "file_path": "src/uu/head/src/take.rs", "rank": 51, "score": 249276.01167307707 }, { "content": "fn fold(filenames: Vec<String>, bytes: bool, spaces: bool, width: usize) {\n\n for filename in &filenames {\n\n let filename: &str = &filename;\n\n let mut stdin_buf;\n\n let mut file_buf;\n\n let buffer = BufReader::new(if filename == \"-\" {\n\n stdin_buf = stdin();\n\n &mut stdin_buf as &mut dyn Read\n\n } else {\n\n file_buf = safe_unwrap!(File::open(Path::new(filename)));\n\n &mut file_buf as &mut dyn Read\n\n });\n\n\n\n if bytes {\n\n fold_file_bytewise(buffer, spaces, width);\n\n } else {\n\n fold_file(buffer, spaces, width);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/uu/fold/src/fold.rs", "rank": 52, "score": 246391.41816824436 }, { "content": "fn sort_by<'a>(unsorted: &mut Vec<Line<'a>>, settings: &GlobalSettings) {\n\n if settings.stable || settings.unique {\n\n unsorted.par_sort_by(|a, b| compare_by(a, b, &settings))\n\n } else {\n\n unsorted.par_sort_unstable_by(|a, b| compare_by(a, b, &settings))\n\n }\n\n}\n\n\n", "file_path": "src/uu/sort/src/sort.rs", "rank": 53, "score": 244938.6333339042 }, { "content": "pub fn args_os() -> impl Iterator<Item = OsString> {\n\n wild::args_os()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::ffi::OsStr;\n\n\n\n fn make_os_vec(os_str: &OsStr) -> Vec<OsString> {\n\n vec![\n\n OsString::from(\"test\"),\n\n OsString::from(\"สวัสดี\"),\n\n os_str.to_os_string(),\n\n ]\n\n }\n\n\n\n fn collect_os_str(vec: Vec<OsString>, handling: InvalidEncodingHandling) -> ConversionResult {\n\n vec.into_iter().collect_str(handling)\n\n }\n", "file_path": "src/uucore/src/lib/lib.rs", "rank": 54, "score": 244348.71516366227 }, { "content": "// TODO: implement one-file-system (this may get partially implemented in walkdir)\n\nfn remove(files: Vec<String>, options: Options) -> bool {\n\n let mut had_err = false;\n\n\n\n for filename in &files {\n\n let file = Path::new(filename);\n\n had_err = match file.symlink_metadata() {\n\n Ok(metadata) => {\n\n if metadata.is_dir() {\n\n handle_dir(file, &options)\n\n } else if is_symlink_dir(&metadata) {\n\n remove_dir(file, &options)\n\n } else {\n\n remove_file(file, &options)\n\n }\n\n }\n\n Err(_e) => {\n\n // TODO: actually print out the specific error\n\n // TODO: When the error is not about missing files\n\n // (e.g., permission), even rm -f should fail with\n\n // outputting the error, but there's no easy eay.\n", "file_path": "src/uu/rm/src/rm.rs", "rank": 55, "score": 243588.7651475931 }, { "content": "/// Returns an iterator over the lines of the given reader.\n\n///\n\n/// The iterator returned from this function will yield instances of\n\n/// [`io::Result`]<[`Vec`]<[`u8`]>>, representing the bytes of the line\n\n/// *including* the null character (with the possible exception of the\n\n/// last line, which may not have one).\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use std::io::Cursor;\n\n///\n\n/// let cursor = Cursor::new(b\"x\\0y\\0z\\0\");\n\n/// let mut iter = zlines(cursor).map(|l| l.unwrap());\n\n/// assert_eq!(iter.next(), Some(b\"x\\0\".to_vec()));\n\n/// assert_eq!(iter.next(), Some(b\"y\\0\".to_vec()));\n\n/// assert_eq!(iter.next(), Some(b\"z\\0\".to_vec()));\n\n/// assert_eq!(iter.next(), None);\n\n/// ```\n\npub fn zlines<B>(buf: B) -> ZLines<B> {\n\n ZLines { buf }\n\n}\n\n\n\n/// An iterator over the zero-terminated lines of an instance of `BufRead`.\n\npub struct ZLines<B> {\n\n buf: B,\n\n}\n\n\n\nimpl<B: BufRead> Iterator for ZLines<B> {\n\n type Item = std::io::Result<Vec<u8>>;\n\n\n\n fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> {\n\n let mut buf = Vec::new();\n\n match self.buf.read_until(ZERO, &mut buf) {\n\n Ok(0) => None,\n\n Ok(_) => Some(Ok(buf)),\n\n Err(e) => Some(Err(e)),\n\n }\n\n }\n", "file_path": "src/uu/head/src/lines.rs", "rank": 56, "score": 243173.71852653316 }, { "content": "/// Test if at least one of the given Ranges contain the supplied value.\n\n///\n\n/// Examples:\n\n///\n\n/// ```\n\n/// let ranges = uucore::ranges::Range::from_list(\"11,2,6-8\").unwrap();\n\n///\n\n/// assert!(!uucore::ranges::contain(&ranges, 0));\n\n/// assert!(!uucore::ranges::contain(&ranges, 1));\n\n/// assert!(!uucore::ranges::contain(&ranges, 5));\n\n/// assert!(!uucore::ranges::contain(&ranges, 10));\n\n///\n\n/// assert!(uucore::ranges::contain(&ranges, 2));\n\n/// assert!(uucore::ranges::contain(&ranges, 6));\n\n/// assert!(uucore::ranges::contain(&ranges, 7));\n\n/// assert!(uucore::ranges::contain(&ranges, 8));\n\n/// assert!(uucore::ranges::contain(&ranges, 11));\n\n/// ```\n\npub fn contain(ranges: &[Range], n: usize) -> bool {\n\n for range in ranges {\n\n if n >= range.low && n <= range.high {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n", "file_path": "src/uucore/src/lib/mods/ranges.rs", "rank": 57, "score": 243157.4128485988 }, { "content": "fn cmp_chars(a: char, b: char, ignore_case: bool) -> Ordering {\n\n if ignore_case {\n\n a.to_ascii_uppercase().cmp(&b.to_ascii_uppercase())\n\n } else {\n\n a.cmp(&b)\n\n }\n\n}\n\n\n", "file_path": "src/uu/sort/src/custom_str_cmp.rs", "rank": 58, "score": 241564.03905647108 }, { "content": "#[cfg(any(windows, target_os = \"fuchsia\"))]\n\n#[inline(always)]\n\nfn reset_term(_: &mut usize) {}\n\n\n", "file_path": "src/uu/more/src/more.rs", "rank": 59, "score": 241156.41944664618 }, { "content": "fn set_buffer(stream: *mut FILE, value: &str) {\n\n let (mode, size): (c_int, size_t) = match value {\n\n \"0\" => (_IONBF, 0_usize),\n\n \"L\" => (_IOLBF, 0_usize),\n\n input => {\n\n let buff_size: usize = match input.parse() {\n\n Ok(num) => num,\n\n Err(e) => crash!(1, \"incorrect size of buffer!: {}\", e),\n\n };\n\n (_IOFBF, buff_size as size_t)\n\n }\n\n };\n\n let res: c_int;\n\n unsafe {\n\n let buffer: *mut c_char = ptr::null_mut();\n\n assert!(buffer.is_null());\n\n res = libc::setvbuf(stream, buffer, mode, size);\n\n }\n\n if res != 0 {\n\n crash!(res, \"error while calling setvbuf!\");\n", "file_path": "src/uu/stdbuf/src/libstdbuf/src/libstdbuf.rs", "rank": 60, "score": 240929.85852213373 }, { "content": "fn output_sorted_lines<'a>(iter: impl Iterator<Item = &'a Line<'a>>, settings: &GlobalSettings) {\n\n if settings.unique {\n\n print_sorted(\n\n iter.dedup_by(|a, b| compare_by(a, b, &settings) == Ordering::Equal),\n\n &settings,\n\n );\n\n } else {\n\n print_sorted(iter, &settings);\n\n }\n\n}\n\n\n", "file_path": "src/uu/sort/src/sort.rs", "rank": 61, "score": 240879.73595619644 }, { "content": "pub fn is_wsl_2() -> bool {\n\n #[cfg(target_os = \"linux\")]\n\n {\n\n if let Ok(b) = std::fs::read(\"/proc/sys/kernel/osrelease\") {\n\n if let Ok(s) = std::str::from_utf8(&b) {\n\n let a = s.to_ascii_lowercase();\n\n return a.contains(\"wsl2\");\n\n }\n\n }\n\n }\n\n false\n\n}\n", "file_path": "src/uucore/src/lib/mods/os.rs", "rank": 62, "score": 240020.55878833716 }, { "content": "/// Test if the program is running under WSL\n\n// ref: <https://github.com/microsoft/WSL/issues/4555> @@ <https://archive.is/dP0bz>\n\npub fn is_wsl_1() -> bool {\n\n #[cfg(target_os = \"linux\")]\n\n {\n\n if is_wsl_2() {\n\n return false;\n\n }\n\n if let Ok(b) = std::fs::read(\"/proc/sys/kernel/osrelease\") {\n\n if let Ok(s) = std::str::from_utf8(&b) {\n\n let a = s.to_ascii_lowercase();\n\n return a.contains(\"microsoft\") || a.contains(\"wsl\");\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/uucore/src/lib/mods/os.rs", "rank": 63, "score": 240020.55878833716 }, { "content": "pub fn uumain(_: impl uucore::Args) -> i32 {\n\n 0\n\n}\n", "file_path": "src/uu/true/src/true.rs", "rank": 64, "score": 239074.29360012486 }, { "content": "pub fn uumain(_: impl uucore::Args) -> i32 {\n\n 1\n\n}\n", "file_path": "src/uu/false/src/false.rs", "rank": 65, "score": 239074.29360012486 }, { "content": "fn open_input_file(in_file_name: String) -> BufReader<Box<dyn Read + 'static>> {\n\n let in_file = if in_file_name == \"-\" {\n\n Box::new(stdin()) as Box<dyn Read>\n\n } else {\n\n let path = Path::new(&in_file_name[..]);\n\n let in_file = File::open(&path);\n\n let r = crash_if_err!(1, in_file);\n\n Box::new(r) as Box<dyn Read>\n\n };\n\n BufReader::new(in_file)\n\n}\n\n\n", "file_path": "src/uu/uniq/src/uniq.rs", "rank": 66, "score": 238973.95949195442 }, { "content": "fn init_byte_array() -> Vec<u8> {\n\n vec![0; 1024 * 1024]\n\n}\n\n\n", "file_path": "src/uu/cksum/src/cksum.rs", "rank": 67, "score": 237209.6489979633 }, { "content": "#[cfg(target_os = \"redox\")]\n\npub fn is_stdin_interactive() -> bool {\n\n termion::is_tty(&io::stdin())\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/fs.rs", "rank": 68, "score": 236301.12654584862 }, { "content": "#[cfg(target_os = \"redox\")]\n\npub fn is_stderr_interactive() -> bool {\n\n termion::is_tty(&io::stderr())\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/fs.rs", "rank": 69, "score": 236301.12654584862 }, { "content": "#[cfg(target_os = \"redox\")]\n\npub fn is_stdout_interactive() -> bool {\n\n termion::is_tty(&io::stdout())\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/fs.rs", "rank": 70, "score": 236301.12654584862 }, { "content": "pub fn exec(bytes: &[u8]) {\n\n let mut stdout_raw = io::stdout();\n\n let mut writer = ZeroCopyWriter::with_default(&mut stdout_raw, |stdout| stdout.lock());\n\n loop {\n\n // TODO: needs to check if pipe fails\n\n writer.write_all(bytes).unwrap();\n\n }\n\n}\n", "file_path": "src/uu/yes/src/yes.rs", "rank": 71, "score": 232363.08328408352 }, { "content": "fn open(path: String) -> BufReader<Box<dyn Read + 'static>> {\n\n let file_buf;\n\n if path == \"-\" {\n\n BufReader::new(Box::new(stdin()) as Box<dyn Read>)\n\n } else {\n\n file_buf = match File::open(&path[..]) {\n\n Ok(a) => a,\n\n Err(e) => crash!(1, \"{}: {}\\n\", &path[..], e),\n\n };\n\n BufReader::new(Box::new(file_buf) as Box<dyn Read>)\n\n }\n\n}\n\n\n", "file_path": "src/uu/expand/src/expand.rs", "rank": 72, "score": 231120.5397638608 }, { "content": "fn open(path: String) -> BufReader<Box<dyn Read + 'static>> {\n\n let file_buf;\n\n if path == \"-\" {\n\n BufReader::new(Box::new(stdin()) as Box<dyn Read>)\n\n } else {\n\n file_buf = match File::open(&path[..]) {\n\n Ok(a) => a,\n\n Err(e) => crash!(1, \"{}: {}\", &path[..], e),\n\n };\n\n BufReader::new(Box::new(file_buf) as Box<dyn Read>)\n\n }\n\n}\n\n\n", "file_path": "src/uu/unexpand/src/unexpand.rs", "rank": 73, "score": 231120.53976386075 }, { "content": "fn next_char_info(uflag: bool, buf: &[u8], byte: usize) -> (CharType, usize, usize) {\n\n let (ctype, cwidth, nbytes) = if uflag {\n\n let nbytes = char::from(buf[byte]).len_utf8();\n\n\n\n if byte + nbytes > buf.len() {\n\n // make sure we don't overrun the buffer because of invalid UTF-8\n\n (CharType::Other, 1, 1)\n\n } else if let Ok(t) = from_utf8(&buf[byte..byte + nbytes]) {\n\n // Now that we think it's UTF-8, figure out what kind of char it is\n\n match t.chars().next() {\n\n Some(' ') => (CharType::Space, 0, 1),\n\n Some('\\t') => (CharType::Tab, 0, 1),\n\n Some('\\x08') => (CharType::Backspace, 0, 1),\n\n Some(c) => (\n\n CharType::Other,\n\n UnicodeWidthChar::width(c).unwrap_or(0),\n\n nbytes,\n\n ),\n\n None => {\n\n // invalid char snuck past the utf8_validation_iterator somehow???\n", "file_path": "src/uu/unexpand/src/unexpand.rs", "rank": 74, "score": 231100.4699643473 }, { "content": "fn cut_files(mut filenames: Vec<String>, mode: Mode) -> i32 {\n\n let mut stdin_read = false;\n\n let mut exit_code = 0;\n\n\n\n if filenames.is_empty() {\n\n filenames.push(\"-\".to_owned());\n\n }\n\n\n\n for filename in &filenames {\n\n if filename == \"-\" {\n\n if stdin_read {\n\n continue;\n\n }\n\n\n\n exit_code |= match mode {\n\n Mode::Bytes(ref ranges, ref opts) => cut_bytes(stdin(), ranges, opts),\n\n Mode::Characters(ref ranges, ref opts) => cut_bytes(stdin(), ranges, opts),\n\n Mode::Fields(ref ranges, ref opts) => cut_fields(stdin(), ranges, opts),\n\n };\n\n\n", "file_path": "src/uu/cut/src/cut.rs", "rank": 75, "score": 230713.4701968232 }, { "content": "pub fn flush_bytes(bslice: &[u8]) {\n\n let _ = stdout().write(bslice);\n\n let _ = stdout().flush();\n\n}\n", "file_path": "src/uu/printf/src/cli.rs", "rank": 76, "score": 228641.26876475906 }, { "content": "fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {\n\n let delim: Vec<String> = (0..4).map(|col| mkdelim(col, opts)).collect();\n\n\n\n let ra = &mut String::new();\n\n let mut na = a.read_line(ra);\n\n let rb = &mut String::new();\n\n let mut nb = b.read_line(rb);\n\n\n\n while na.is_ok() || nb.is_ok() {\n\n let ord = match (na.is_ok(), nb.is_ok()) {\n\n (false, true) => Ordering::Greater,\n\n (true, false) => Ordering::Less,\n\n (true, true) => match (&na, &nb) {\n\n (&Ok(0), &Ok(0)) => break,\n\n (&Ok(0), _) => Ordering::Greater,\n\n (_, &Ok(0)) => Ordering::Less,\n\n _ => ra.cmp(&rb),\n\n },\n\n _ => unreachable!(),\n\n };\n", "file_path": "src/uu/comm/src/comm.rs", "rank": 77, "score": 228346.52625211375 }, { "content": "/// Parses an -c or -n argument,\n\n/// the bool specifies whether to read from the end\n\npub fn parse_num(src: &str) -> Result<(usize, bool), ParseError> {\n\n let mut num_start = 0;\n\n let mut chars = src.char_indices();\n\n let (mut chars, all_but_last) = match chars.next() {\n\n Some((_, c)) => {\n\n if c == '-' {\n\n num_start += 1;\n\n (chars, true)\n\n } else {\n\n (src.char_indices(), false)\n\n }\n\n }\n\n None => return Err(ParseError::Syntax),\n\n };\n\n let mut num_end = 0usize;\n\n let mut last_char = 0 as char;\n\n let mut num_count = 0usize;\n\n while let Some((n, c)) = chars.next() {\n\n if c.is_numeric() {\n\n num_end = n;\n", "file_path": "src/uu/head/src/parse.rs", "rank": 78, "score": 226438.87175239186 }, { "content": "pub fn handle_input<R: Read>(\n\n input: &mut R,\n\n format: Format,\n\n line_wrap: Option<usize>,\n\n ignore_garbage: bool,\n\n decode: bool,\n\n name: &str,\n\n) {\n\n let mut data = Data::new(input, format).ignore_garbage(ignore_garbage);\n\n if let Some(wrap) = line_wrap {\n\n data = data.line_wrap(wrap);\n\n }\n\n\n\n if !decode {\n\n let encoded = data.encode();\n\n wrap_print(&data, encoded);\n\n } else {\n\n match data.decode() {\n\n Ok(s) => {\n\n if stdout().write_all(&s).is_err() {\n", "file_path": "src/uu/base32/src/base_common.rs", "rank": 79, "score": 225134.61999305856 }, { "content": "/// Read a test scenario fixture, returning its bytes\n\nfn read_scenario_fixture<S: AsRef<OsStr>>(tmpd: &Option<Rc<TempDir>>, file_rel_path: S) -> Vec<u8> {\n\n let tmpdir_path = tmpd.as_ref().unwrap().as_ref().path();\n\n AtPath::new(tmpdir_path).read_bytes(file_rel_path.as_ref().to_str().unwrap())\n\n}\n\n\n\n/// A command result is the outputs of a command (streams and status code)\n\n/// within a struct which has convenience assertion functions about those outputs\n\n#[derive(Debug, Clone)]\n\npub struct CmdResult {\n\n //tmpd is used for convenience functions for asserts against fixtures\n\n tmpd: Option<Rc<TempDir>>,\n\n /// exit status for command (if there is one)\n\n code: Option<i32>,\n\n /// zero-exit from running the Command?\n\n /// see [`success`]\n\n success: bool,\n\n /// captured standard output after running the Command\n\n stdout: String,\n\n /// captured standard error after running the Command\n\n stderr: String,\n", "file_path": "tests/common/util.rs", "rank": 80, "score": 224883.0166005668 }, { "content": "pub fn str_to_arrnum(src: &str, radix_def_src: &dyn RadixDef) -> Vec<u8> {\n\n let mut intermed_in: Vec<u8> = Vec::new();\n\n for c in src.chars() {\n\n #[allow(clippy::single_match)]\n\n match radix_def_src.from_char(c) {\n\n Some(u) => {\n\n intermed_in.push(u);\n\n }\n\n None => {} //todo err msg on incorrect\n\n }\n\n }\n\n intermed_in\n\n}\n\n\n", "file_path": "src/uu/printf/src/tokenize/num_format/formatters/base_conv/mod.rs", "rank": 81, "score": 224339.9546159979 }, { "content": "#[cfg(unix)]\n\npub fn display_permissions_unix(mode: mode_t, display_file_type: bool) -> String {\n\n let mut result;\n\n if display_file_type {\n\n result = String::with_capacity(10);\n\n result.push(match mode & S_IFMT {\n\n S_IFDIR => 'd',\n\n S_IFCHR => 'c',\n\n S_IFBLK => 'b',\n\n S_IFREG => '-',\n\n S_IFIFO => 'p',\n\n S_IFLNK => 'l',\n\n S_IFSOCK => 's',\n\n // TODO: Other file types\n\n _ => '?',\n\n });\n\n } else {\n\n result = String::with_capacity(9);\n\n }\n\n\n\n result.push(if has!(mode, S_IRUSR) { 'r' } else { '-' });\n", "file_path": "src/uucore/src/lib/features/fs.rs", "rank": 82, "score": 224062.6494076434 }, { "content": "fn print_factors_str(num_str: &str, w: &mut impl io::Write) -> Result<(), Box<dyn Error>> {\n\n num_str\n\n .parse::<u64>()\n\n .map_err(|e| e.into())\n\n .and_then(|x| writeln!(w, \"{}:{}\", x, factor(x)).map_err(|e| e.into()))\n\n}\n\n\n", "file_path": "src/uu/factor/src/cli.rs", "rank": 83, "score": 223905.04305882426 }, { "content": "/// Writes handle to stdout with no configuration. This allows a\n\n/// simple memory copy.\n\nfn write_fast<R: Read>(handle: &mut InputHandle<R>) -> CatResult<()> {\n\n let stdout = io::stdout();\n\n let mut stdout_lock = stdout.lock();\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n\n {\n\n // If we're on Linux or Android, try to use the splice() system call\n\n // for faster writing. If it works, we're done.\n\n if !splice::write_fast_using_splice(handle, stdout_lock.as_raw_fd())? {\n\n return Ok(());\n\n }\n\n }\n\n // If we're not on Linux or Android, or the splice() call failed,\n\n // fall back on slower writing.\n\n let mut buf = [0; 1024 * 64];\n\n while let Ok(n) = handle.reader.read(&mut buf) {\n\n if n == 0 {\n\n break;\n\n }\n\n stdout_lock.write_all(&buf[..n])?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/cat/src/cat.rs", "rank": 84, "score": 223558.0585899342 }, { "content": "pub fn factor(num: &mut u64, factors: &mut Factors) {\n\n for &(prime, inv, ceil) in P_INVS_U64 {\n\n if *num == 1 {\n\n break;\n\n }\n\n\n\n // inv = prime^-1 mod 2^64\n\n // ceil = floor((2^64-1) / prime)\n\n // if (num * inv) mod 2^64 <= ceil, then prime divides num\n\n // See https://math.stackexchange.com/questions/1251327/\n\n // for a nice explanation.\n\n let mut k = 0;\n\n loop {\n\n let x = num.wrapping_mul(inv);\n\n\n\n // While prime divides num\n\n if x <= ceil {\n\n *num = x;\n\n k += 1;\n\n #[cfg(feature = \"coz\")]\n", "file_path": "src/uu/factor/src/table.rs", "rank": 85, "score": 223299.36537669305 }, { "content": "struct TransformContainer<'a, A: Write + AsRawObject + Sized, B: Write + Sized> {\n\n /// This field is never used and probably could be converted into PhantomData, but might be\n\n /// useful for restructuring later (at the moment it's basically left over from an earlier\n\n /// design)\n\n #[allow(dead_code)]\n\n original: Option<&'a mut A>,\n\n\n\n transformed: Option<B>,\n\n}\n\n\n\nimpl<'a, A: Write + AsRawObject + Sized, B: Write + Sized> Write for TransformContainer<'a, A, B> {\n\n fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {\n\n self.transformed.as_mut().unwrap().write(bytes)\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.transformed.as_mut().unwrap().flush()\n\n }\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/zero_copy.rs", "rank": 86, "score": 222295.67154563736 }, { "content": "pub fn strings_to_tokens(strings: &[String]) -> Result<Vec<(usize, Token)>, String> {\n\n let mut tokens_acc = Vec::with_capacity(strings.len());\n\n let mut tok_idx = 1;\n\n\n\n for s in strings {\n\n let token_if_not_escaped = match s.as_ref() {\n\n \"(\" => Token::ParOpen,\n\n \")\" => Token::ParClose,\n\n\n\n \"^\" => Token::new_infix_op(&s, false, 7),\n\n\n\n \":\" => Token::new_infix_op(&s, true, 6),\n\n\n\n \"*\" => Token::new_infix_op(&s, true, 5),\n\n \"/\" => Token::new_infix_op(&s, true, 5),\n\n \"%\" => Token::new_infix_op(&s, true, 5),\n\n\n\n \"+\" => Token::new_infix_op(&s, true, 4),\n\n \"-\" => Token::new_infix_op(&s, true, 4),\n\n\n", "file_path": "src/uu/expr/src/tokens.rs", "rank": 87, "score": 222288.39389055443 }, { "content": "fn pass_none(_: &str, _: &regex::Regex) -> bool {\n\n false\n\n}\n\n\n", "file_path": "src/uu/nl/src/nl.rs", "rank": 88, "score": 220800.3620130473 }, { "content": "#[cfg(unix)]\n\npub fn display_permissions(metadata: &fs::Metadata, display_file_type: bool) -> String {\n\n let mode: mode_t = metadata.mode() as mode_t;\n\n display_permissions_unix(mode, display_file_type)\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/fs.rs", "rank": 89, "score": 219313.811835472 }, { "content": "pub fn gcd(mut u: u64, mut v: u64) -> u64 {\n\n // Stein's binary GCD algorithm\n\n // Base cases: gcd(n, 0) = gcd(0, n) = n\n\n if u == 0 {\n\n return v;\n\n } else if v == 0 {\n\n return u;\n\n }\n\n\n\n // gcd(2ⁱ u, 2ʲ v) = 2ᵏ gcd(u, v) with u, v odd and k = min(i, j)\n\n // 2ᵏ is the greatest power of two that divides both u and v\n\n let k = {\n\n let i = u.trailing_zeros();\n\n let j = v.trailing_zeros();\n\n u >>= i;\n\n v >>= j;\n\n min(i, j)\n\n };\n\n\n\n loop {\n", "file_path": "src/uu/factor/src/numeric/gcd.rs", "rank": 90, "score": 218963.25886773836 }, { "content": "pub fn paths_refer_to_same_file(p1: &Path, p2: &Path) -> io::Result<bool> {\n\n // We have to take symlinks and relative paths into account.\n\n let pathbuf1 = canonicalize(p1, CanonicalizeMode::Normal)?;\n\n let pathbuf2 = canonicalize(p2, CanonicalizeMode::Normal)?;\n\n\n\n Ok(pathbuf1 == pathbuf2)\n\n}\n\n\n", "file_path": "src/uu/cp/src/cp.rs", "rank": 91, "score": 218471.91149932903 }, { "content": "#[cfg(not(windows))]\n\nfn build_command<'a, 'b>(args: &'a mut Vec<&'b str>) -> (Cow<'b, str>, &'a [&'b str]) {\n\n let progname = Cow::from(args[0]);\n\n (progname, &args[1..])\n\n}\n\n\n", "file_path": "src/uu/env/src/env.rs", "rank": 92, "score": 218116.13592771027 }, { "content": "// * This file is part of the uutils coreutils package.\n\n// *\n\n// * (c) Michael Debertol <michael.debertol..AT..gmail.com>\n\n// *\n\n// * For the full copyright and license information, please view the LICENSE\n\n// * file that was distributed with this source code.\n\n\n\n//! Check if a file is ordered\n\n\n\nuse crate::{\n\n chunks::{self, Chunk},\n\n compare_by, open, GlobalSettings,\n\n};\n\nuse itertools::Itertools;\n\nuse std::{\n\n cmp::Ordering,\n\n io::Read,\n\n iter,\n\n sync::mpsc::{sync_channel, Receiver, SyncSender},\n\n thread,\n\n};\n\n\n\n/// Check if the file at `path` is ordered.\n\n///\n\n/// # Returns\n\n///\n\n/// The code we should exit with.\n", "file_path": "src/uu/sort/src/check.rs", "rank": 94, "score": 46.6058445908067 }, { "content": "/// Wrapper for `std::io::Read` allowing to peek into the data to be read.\n\npub struct PeekReader<R> {\n\n inner: R,\n\n temp_buffer: Vec<u8>,\n\n}\n\n\n\nimpl<R> PeekReader<R> {\n\n /// Create a new `PeekReader` wrapping `inner`\n\n pub fn new(inner: R) -> Self {\n\n PeekReader {\n\n inner,\n\n temp_buffer: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl<R: Read> PeekReader<R> {\n\n fn read_from_tempbuffer(&mut self, mut out: &mut [u8]) -> usize {\n\n match out.write(self.temp_buffer.as_mut_slice()) {\n\n Ok(n) => {\n", "file_path": "src/uu/od/src/peekreader.rs", "rank": 96, "score": 45.417986220174306 }, { "content": " &mut carry_over,\n\n &mut file,\n\n &mut files,\n\n separator,\n\n Vec::new(),\n\n &settings,\n\n );\n\n if sender_option.is_none() {\n\n // We have already read the whole input. Since we are in our first two reads,\n\n // this means that we can fit the whole input into memory. Bypass writing below and\n\n // handle this case in a more straightforward way.\n\n return if let Ok(first_chunk) = receiver.recv() {\n\n if let Ok(second_chunk) = receiver.recv() {\n\n ReadResult::SortedTwoChunks([first_chunk, second_chunk])\n\n } else {\n\n ReadResult::SortedSingleChunk(first_chunk)\n\n }\n\n } else {\n\n ReadResult::EmptyInput\n\n };\n", "file_path": "src/uu/sort/src/ext_sort.rs", "rank": 98, "score": 40.22612088855604 }, { "content": "\n\nimpl<R> PartialReader<R> {\n\n /// Create a new `PartialReader` wrapping `inner`, which will skip\n\n /// `skip` bytes, and limits the output to `limit` bytes. Set `limit`\n\n /// to `None` if there should be no limit.\n\n pub fn new(inner: R, skip: usize, limit: Option<usize>) -> Self {\n\n PartialReader { inner, skip, limit }\n\n }\n\n}\n\n\n\nimpl<R: Read> Read for PartialReader<R> {\n\n fn read(&mut self, out: &mut [u8]) -> io::Result<usize> {\n\n if self.skip > 0 {\n\n let mut bytes = [0; MAX_SKIP_BUFFER];\n\n\n\n while self.skip > 0 {\n\n let skip_count = cmp::min(self.skip, MAX_SKIP_BUFFER);\n\n\n\n match self.inner.read(&mut bytes[..skip_count]) {\n\n Ok(0) => {\n", "file_path": "src/uu/od/src/partialreader.rs", "rank": 99, "score": 39.181165189015516 } ]
Rust
router/src/dml_handlers/partitioner.rs
r4ntix/influxdb_iox
5ff874925101e2afbafa6853385260a2ba044394
use super::DmlHandler; use async_trait::async_trait; use data_types::{DatabaseName, DeletePredicate, PartitionTemplate}; use hashbrown::HashMap; use mutable_batch::{MutableBatch, PartitionWrite, WritePayload}; use observability_deps::tracing::*; use thiserror::Error; use trace::ctx::SpanContext; #[derive(Debug, Error)] pub enum PartitionError { #[error("error batching into partitioned write: {0}")] BatchWrite(#[from] mutable_batch::Error), } #[derive(Debug, PartialEq, Clone)] pub struct Partitioned<T> { key: String, payload: T, } impl<T> Partitioned<T> { pub fn new(key: String, payload: T) -> Self { Self { key, payload } } pub fn payload(&self) -> &T { &self.payload } pub fn into_parts(self) -> (String, T) { (self.key, self.payload) } } #[derive(Debug)] pub struct Partitioner { partition_template: PartitionTemplate, } impl Partitioner { pub fn new(partition_template: PartitionTemplate) -> Self { Self { partition_template } } } #[async_trait] impl DmlHandler for Partitioner { type WriteError = PartitionError; type DeleteError = PartitionError; type WriteInput = HashMap<String, MutableBatch>; type WriteOutput = Vec<Partitioned<Self::WriteInput>>; async fn write( &self, _namespace: &DatabaseName<'static>, batch: Self::WriteInput, _span_ctx: Option<SpanContext>, ) -> Result<Self::WriteOutput, Self::WriteError> { let mut partitions: HashMap<_, HashMap<_, MutableBatch>> = HashMap::default(); for (table_name, batch) in batch { for (partition_key, partition_payload) in PartitionWrite::partition(&table_name, &batch, &self.partition_template) { let partition = partitions.entry(partition_key).or_default(); let table_batch = partition .raw_entry_mut() .from_key(&table_name) .or_insert_with(|| (table_name.to_owned(), MutableBatch::default())); partition_payload.write_to_batch(table_batch.1)?; } } Ok(partitions .into_iter() .map(|(key, batch)| Partitioned { key, payload: batch, }) .collect::<Vec<_>>()) } async fn delete( &self, _namespace: &DatabaseName<'static>, _table_name: &str, _predicate: &DeletePredicate, _span_ctx: Option<SpanContext>, ) -> Result<(), Self::DeleteError> { Ok(()) } } #[cfg(test)] mod tests { use super::*; use assert_matches::assert_matches; use data_types::TemplatePart; const DEFAULT_TIMESTAMP_NANOS: i64 = 42000000000000000; macro_rules! test_write { ( $name:ident, lp = $lp:expr, want_writes = [$($want_writes:tt)*], want_handler_ret = $($want_handler_ret:tt)+ ) => { paste::paste! { #[tokio::test] async fn [<test_write_ $name>]() { let partition_template = PartitionTemplate { parts: vec![TemplatePart::TimeFormat("%Y-%m-%d".to_owned())], }; let partitioner = Partitioner::new(partition_template); let ns = DatabaseName::new("bananas").expect("valid db name"); let (writes, _) = mutable_batch_lp::lines_to_batches_stats($lp, DEFAULT_TIMESTAMP_NANOS).expect("failed to parse test LP"); let handler_ret = partitioner.write(&ns, writes, None).await; assert_matches!(handler_ret, $($want_handler_ret)+); let got = handler_ret.unwrap_or_default() .into_iter() .map(|partition| { let mut tables = partition .payload .keys() .cloned() .collect::<Vec<String>>(); tables.sort(); (partition.key.clone(), tables) }) .collect::<HashMap<_, _>>(); test_write!(@assert_writes, got, $($want_writes)*); } } }; (@assert_writes, $got:ident, unchecked) => { let _x = $got; }; (@assert_writes, $got:ident, $($partition_key:expr => $want_tables:expr, )*) => { #[allow(unused_mut)] let mut want_writes: HashMap<String, _> = Default::default(); $( let mut want: Vec<String> = $want_tables.into_iter().map(|t| t.to_string()).collect(); want.sort(); want_writes.insert($partition_key.to_string(), want); )* pretty_assertions::assert_eq!(want_writes, $got); }; } test_write!( single_partition, lp = "\ bananas,tag1=A,tag2=B val=42i 1\n\ platanos,tag1=A,tag2=B value=42i 2\n\ another,tag1=A,tag2=B value=42i 3\n\ bananas,tag1=A,tag2=B val=42i 2\n\ table,tag1=A,tag2=B val=42i 1\n\ ", want_writes = [ "1970-01-01" => ["bananas", "platanos", "another", "table"], ], want_handler_ret = Ok(_) ); test_write!( multiple_partitions, lp = "\ bananas,tag1=A,tag2=B val=42i 1\n\ platanos,tag1=A,tag2=B value=42i 1465839830100400200\n\ another,tag1=A,tag2=B value=42i 1465839830100400200\n\ bananas,tag1=A,tag2=B val=42i 2\n\ table,tag1=A,tag2=B val=42i 1644347270670952000\n\ ", want_writes = [ "1970-01-01" => ["bananas"], "2016-06-13" => ["platanos", "another"], "2022-02-08" => ["table"], ], want_handler_ret = Ok(_) ); test_write!( multiple_partitions_upserted, lp = "\ bananas,tag1=A,tag2=B val=42i 1\n\ platanos,tag1=A,tag2=B value=42i 1465839830100400200\n\ platanos,tag1=A,tag2=B value=42i 1\n\ bananas,tag1=A,tag2=B value=42i 1465839830100400200\n\ bananas,tag1=A,tag2=B value=42i 1465839830100400200\n\ ", want_writes = [ "1970-01-01" => ["bananas", "platanos"], "2016-06-13" => ["bananas", "platanos"], ], want_handler_ret = Ok(_) ); }
use super::DmlHandler; use async_trait::async_trait; use data_types::{DatabaseName, DeletePredicate, PartitionTemplate}; use hashbrown::HashMap; use mutable_batch::{MutableBatch, PartitionWrite, WritePayload}; use observability_deps::tracing::*; use thiserror::Error; use trace::ctx::SpanContext; #[derive(Debug, Error)] pub enum PartitionError { #[error("error batching into partitioned write: {0}")] BatchWrite(#[from] mutable_batch::Error), } #[derive(Debug, PartialEq, Clone)] pub struct Partitioned<T> { key: String, payload: T, } impl<T> Partitioned<T> { pub fn new(key: String, payload: T) -> Self { Self { key, payload } } pub fn payload(&self) -> &T { &self.payload } pub fn into_parts(self) -> (String, T) { (self.key, self.payload) } } #[derive(Debug)] pub struct Partitioner { partition_template: PartitionTemplate, } impl Partitioner { pub fn new(partition_template: PartitionTemplate) -> Self { Self { partition_template } } } #[async_trait] impl DmlHandler for Partitioner { type WriteError = PartitionError; type DeleteError = PartitionError; type WriteInput = HashMap<String, MutableBatch>; type WriteOutput = Vec<Partitioned<Self::WriteInput>>; async fn write( &self, _namespace: &DatabaseName<'static>, batch: Self::WriteInput, _span_ctx: Option<SpanContext>, ) -> Result<Self::WriteOutput, Self::WriteError> { let mut partitions: HashMap<_, HashMap<_, MutableBatch>> = Hash
; let table_batch = partition .raw_entry_mut() .from_key(&table_name) .or_insert_with(|| (table_name.to_owned(), MutableBatch::default())); partition_payload.write_to_batch(table_batch.1)?; } } Ok(partitions .into_iter() .map(|(key, batch)| Partitioned { key, payload: batch, }) .collect::<Vec<_>>()) } async fn delete( &self, _namespace: &DatabaseName<'static>, _table_name: &str, _predicate: &DeletePredicate, _span_ctx: Option<SpanContext>, ) -> Result<(), Self::DeleteError> { Ok(()) } } #[cfg(test)] mod tests { use super::*; use assert_matches::assert_matches; use data_types::TemplatePart; const DEFAULT_TIMESTAMP_NANOS: i64 = 42000000000000000; macro_rules! test_write { ( $name:ident, lp = $lp:expr, want_writes = [$($want_writes:tt)*], want_handler_ret = $($want_handler_ret:tt)+ ) => { paste::paste! { #[tokio::test] async fn [<test_write_ $name>]() { let partition_template = PartitionTemplate { parts: vec![TemplatePart::TimeFormat("%Y-%m-%d".to_owned())], }; let partitioner = Partitioner::new(partition_template); let ns = DatabaseName::new("bananas").expect("valid db name"); let (writes, _) = mutable_batch_lp::lines_to_batches_stats($lp, DEFAULT_TIMESTAMP_NANOS).expect("failed to parse test LP"); let handler_ret = partitioner.write(&ns, writes, None).await; assert_matches!(handler_ret, $($want_handler_ret)+); let got = handler_ret.unwrap_or_default() .into_iter() .map(|partition| { let mut tables = partition .payload .keys() .cloned() .collect::<Vec<String>>(); tables.sort(); (partition.key.clone(), tables) }) .collect::<HashMap<_, _>>(); test_write!(@assert_writes, got, $($want_writes)*); } } }; (@assert_writes, $got:ident, unchecked) => { let _x = $got; }; (@assert_writes, $got:ident, $($partition_key:expr => $want_tables:expr, )*) => { #[allow(unused_mut)] let mut want_writes: HashMap<String, _> = Default::default(); $( let mut want: Vec<String> = $want_tables.into_iter().map(|t| t.to_string()).collect(); want.sort(); want_writes.insert($partition_key.to_string(), want); )* pretty_assertions::assert_eq!(want_writes, $got); }; } test_write!( single_partition, lp = "\ bananas,tag1=A,tag2=B val=42i 1\n\ platanos,tag1=A,tag2=B value=42i 2\n\ another,tag1=A,tag2=B value=42i 3\n\ bananas,tag1=A,tag2=B val=42i 2\n\ table,tag1=A,tag2=B val=42i 1\n\ ", want_writes = [ "1970-01-01" => ["bananas", "platanos", "another", "table"], ], want_handler_ret = Ok(_) ); test_write!( multiple_partitions, lp = "\ bananas,tag1=A,tag2=B val=42i 1\n\ platanos,tag1=A,tag2=B value=42i 1465839830100400200\n\ another,tag1=A,tag2=B value=42i 1465839830100400200\n\ bananas,tag1=A,tag2=B val=42i 2\n\ table,tag1=A,tag2=B val=42i 1644347270670952000\n\ ", want_writes = [ "1970-01-01" => ["bananas"], "2016-06-13" => ["platanos", "another"], "2022-02-08" => ["table"], ], want_handler_ret = Ok(_) ); test_write!( multiple_partitions_upserted, lp = "\ bananas,tag1=A,tag2=B val=42i 1\n\ platanos,tag1=A,tag2=B value=42i 1465839830100400200\n\ platanos,tag1=A,tag2=B value=42i 1\n\ bananas,tag1=A,tag2=B value=42i 1465839830100400200\n\ bananas,tag1=A,tag2=B value=42i 1465839830100400200\n\ ", want_writes = [ "1970-01-01" => ["bananas", "platanos"], "2016-06-13" => ["bananas", "platanos"], ], want_handler_ret = Ok(_) ); }
Map::default(); for (table_name, batch) in batch { for (partition_key, partition_payload) in PartitionWrite::partition(&table_name, &batch, &self.partition_template) { let partition = partitions.entry(partition_key).or_default()
function_block-random_span
[]
Rust
07-rust/stm32f446/stm32f446_pac/src/rcc/cr.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Reader of register CR"] pub type R = crate::R<u32, super::CR>; #[doc = "Writer for register CR"] pub type W = crate::W<u32, super::CR>; #[doc = "Register CR `reset()`'s with value 0x83"] impl crate::ResetValue for super::CR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x83 } } #[doc = "Reader of field `PLLI2SRDY`"] pub type PLLI2SRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLI2SON`"] pub type PLLI2SON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLLI2SON`"] pub struct PLLI2SON_W<'a> { w: &'a mut W, } impl<'a> PLLI2SON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Reader of field `PLLRDY`"] pub type PLLRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLON`"] pub type PLLON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLLON`"] pub struct PLLON_W<'a> { w: &'a mut W, } impl<'a> PLLON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `CSSON`"] pub type CSSON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CSSON`"] pub struct CSSON_W<'a> { w: &'a mut W, } impl<'a> CSSON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "Reader of field `HSEBYP`"] pub type HSEBYP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HSEBYP`"] pub struct HSEBYP_W<'a> { w: &'a mut W, } impl<'a> HSEBYP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `HSERDY`"] pub type HSERDY_R = crate::R<bool, bool>; #[doc = "Reader of field `HSEON`"] pub type HSEON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HSEON`"] pub struct HSEON_W<'a> { w: &'a mut W, } impl<'a> HSEON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `HSICAL`"] pub type HSICAL_R = crate::R<u8, u8>; #[doc = "Reader of field `HSITRIM`"] pub type HSITRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `HSITRIM`"] pub struct HSITRIM_W<'a> { w: &'a mut W, } impl<'a> HSITRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 3)) | (((value as u32) & 0x1f) << 3); self.w } } #[doc = "Reader of field `HSIRDY`"] pub type HSIRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `HSION`"] pub type HSION_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HSION`"] pub struct HSION_W<'a> { w: &'a mut W, } impl<'a> HSION_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 27 - PLLI2S clock ready flag"] #[inline(always)] pub fn plli2srdy(&self) -> PLLI2SRDY_R { PLLI2SRDY_R::new(((self.bits >> 27) & 0x01) != 0) } #[doc = "Bit 26 - PLLI2S enable"] #[inline(always)] pub fn plli2son(&self) -> PLLI2SON_R { PLLI2SON_R::new(((self.bits >> 26) & 0x01) != 0) } #[doc = "Bit 25 - Main PLL (PLL) clock ready flag"] #[inline(always)] pub fn pllrdy(&self) -> PLLRDY_R { PLLRDY_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 24 - Main PLL (PLL) enable"] #[inline(always)] pub fn pllon(&self) -> PLLON_R { PLLON_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 19 - Clock security system enable"] #[inline(always)] pub fn csson(&self) -> CSSON_R { CSSON_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 18 - HSE clock bypass"] #[inline(always)] pub fn hsebyp(&self) -> HSEBYP_R { HSEBYP_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 17 - HSE clock ready flag"] #[inline(always)] pub fn hserdy(&self) -> HSERDY_R { HSERDY_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 16 - HSE clock enable"] #[inline(always)] pub fn hseon(&self) -> HSEON_R { HSEON_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bits 8:15 - Internal high-speed clock calibration"] #[inline(always)] pub fn hsical(&self) -> HSICAL_R { HSICAL_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 3:7 - Internal high-speed clock trimming"] #[inline(always)] pub fn hsitrim(&self) -> HSITRIM_R { HSITRIM_R::new(((self.bits >> 3) & 0x1f) as u8) } #[doc = "Bit 1 - Internal high-speed clock ready flag"] #[inline(always)] pub fn hsirdy(&self) -> HSIRDY_R { HSIRDY_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - Internal high-speed clock enable"] #[inline(always)] pub fn hsion(&self) -> HSION_R { HSION_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 26 - PLLI2S enable"] #[inline(always)] pub fn plli2son(&mut self) -> PLLI2SON_W { PLLI2SON_W { w: self } } #[doc = "Bit 24 - Main PLL (PLL) enable"] #[inline(always)] pub fn pllon(&mut self) -> PLLON_W { PLLON_W { w: self } } #[doc = "Bit 19 - Clock security system enable"] #[inline(always)] pub fn csson(&mut self) -> CSSON_W { CSSON_W { w: self } } #[doc = "Bit 18 - HSE clock bypass"] #[inline(always)] pub fn hsebyp(&mut self) -> HSEBYP_W { HSEBYP_W { w: self } } #[doc = "Bit 16 - HSE clock enable"] #[inline(always)] pub fn hseon(&mut self) -> HSEON_W { HSEON_W { w: self } } #[doc = "Bits 3:7 - Internal high-speed clock trimming"] #[inline(always)] pub fn hsitrim(&mut self) -> HSITRIM_W { HSITRIM_W { w: self } } #[doc = "Bit 0 - Internal high-speed clock enable"] #[inline(always)] pub fn hsion(&mut self) -> HSION_W { HSION_W { w: self } } }
#[doc = "Reader of register CR"] pub type R = crate::R<u32, super::CR>; #[doc = "Writer for register CR"] pub type W = crate::W<u32, super::CR>; #[doc = "Register CR `reset()`'s with value 0x83"] impl crate::ResetValue for super::CR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x83 } } #[doc = "Reader of field `PLLI2SRDY`"] pub type PLLI2SRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLI2SON`"] pub type PLLI2SON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLLI2SON`"] pub struct PLLI2SON_W<'a> { w: &'a mut W, } impl<'a> PLLI2SON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Reader of field `PLLRDY`"] pub type PLLRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLON`"] pub type PLLON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PLLON`"] pub struct PLLON_W<'a> { w: &'a mut W, } impl<'a> PLLON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `CSSON`"] pub type CSSON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CSSON`"] pub struct CSSON_W<'a> { w: &'a mut W, } impl<'a> CSSON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "Reader of field `HSEBYP`"] pub type HSEBYP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HSEBYP`"] pub struct HSEBYP_W<'a> { w: &'a mut W, } impl<'a> HSEBYP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `HSERDY`"] pub type HSERDY_R = crate::R<bool, bool>; #[doc = "Reader of field `HSEON`"] pub type HSEON_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HSEON`"] pub struct HSEON_W<'a> { w: &'a mut W, } impl<'a> HSEON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `HSICAL`"] pub type HSICAL_R = crate::R<u8, u8>; #[doc = "Reader of field `HSITRIM`"] pub type HSITRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `HSITRIM`"] pub struct HSITRIM_W<'a> { w: &'a mut W, } impl<'a> HSITRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 3)) | (((value as u32) & 0x1f) << 3); self.w
18) & 0x01) != 0) } #[doc = "Bit 17 - HSE clock ready flag"] #[inline(always)] pub fn hserdy(&self) -> HSERDY_R { HSERDY_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 16 - HSE clock enable"] #[inline(always)] pub fn hseon(&self) -> HSEON_R { HSEON_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bits 8:15 - Internal high-speed clock calibration"] #[inline(always)] pub fn hsical(&self) -> HSICAL_R { HSICAL_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 3:7 - Internal high-speed clock trimming"] #[inline(always)] pub fn hsitrim(&self) -> HSITRIM_R { HSITRIM_R::new(((self.bits >> 3) & 0x1f) as u8) } #[doc = "Bit 1 - Internal high-speed clock ready flag"] #[inline(always)] pub fn hsirdy(&self) -> HSIRDY_R { HSIRDY_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - Internal high-speed clock enable"] #[inline(always)] pub fn hsion(&self) -> HSION_R { HSION_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 26 - PLLI2S enable"] #[inline(always)] pub fn plli2son(&mut self) -> PLLI2SON_W { PLLI2SON_W { w: self } } #[doc = "Bit 24 - Main PLL (PLL) enable"] #[inline(always)] pub fn pllon(&mut self) -> PLLON_W { PLLON_W { w: self } } #[doc = "Bit 19 - Clock security system enable"] #[inline(always)] pub fn csson(&mut self) -> CSSON_W { CSSON_W { w: self } } #[doc = "Bit 18 - HSE clock bypass"] #[inline(always)] pub fn hsebyp(&mut self) -> HSEBYP_W { HSEBYP_W { w: self } } #[doc = "Bit 16 - HSE clock enable"] #[inline(always)] pub fn hseon(&mut self) -> HSEON_W { HSEON_W { w: self } } #[doc = "Bits 3:7 - Internal high-speed clock trimming"] #[inline(always)] pub fn hsitrim(&mut self) -> HSITRIM_W { HSITRIM_W { w: self } } #[doc = "Bit 0 - Internal high-speed clock enable"] #[inline(always)] pub fn hsion(&mut self) -> HSION_W { HSION_W { w: self } } }
} } #[doc = "Reader of field `HSIRDY`"] pub type HSIRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `HSION`"] pub type HSION_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HSION`"] pub struct HSION_W<'a> { w: &'a mut W, } impl<'a> HSION_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 27 - PLLI2S clock ready flag"] #[inline(always)] pub fn plli2srdy(&self) -> PLLI2SRDY_R { PLLI2SRDY_R::new(((self.bits >> 27) & 0x01) != 0) } #[doc = "Bit 26 - PLLI2S enable"] #[inline(always)] pub fn plli2son(&self) -> PLLI2SON_R { PLLI2SON_R::new(((self.bits >> 26) & 0x01) != 0) } #[doc = "Bit 25 - Main PLL (PLL) clock ready flag"] #[inline(always)] pub fn pllrdy(&self) -> PLLRDY_R { PLLRDY_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 24 - Main PLL (PLL) enable"] #[inline(always)] pub fn pllon(&self) -> PLLON_R { PLLON_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 19 - Clock security system enable"] #[inline(always)] pub fn csson(&self) -> CSSON_R { CSSON_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 18 - HSE clock bypass"] #[inline(always)] pub fn hsebyp(&self) -> HSEBYP_R { HSEBYP_R::new(((self.bits >>
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 0, "score": 192988.70578231275 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 1, "score": 192988.70578231275 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 2, "score": 192988.70578231278 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32F0 by default uses the 8MHz HSI on boot\n\n // (See section 6.2 of the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32f0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahbenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/src/main.rs", "rank": 3, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~1s period; STM32F4 by default uses the 16MHz HSI on boot\n\n // (See section 6.2.2 in the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin A5 as push-pull output\n\n let p = stm32f446::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahb1enr.write(|w| w.gpioaen().set_bit());\n\n\n\n // Set moder on fifth pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/src/main.rs", "rank": 4, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32L0 boots to a ~2.1MHz internal oscillator\n\n // (See Section 7.2 of the STM32L0x1 reference manual)\n\n syst.set_reload(4_200_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32l0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.iopenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/src/main.rs", "rank": 5, "score": 134178.0685062561 }, { "content": "TickType_t uxTaskResetEventItemValue( void )\n\n{\n\nTickType_t uxReturn;\n\n\n\n\tuxReturn = listGET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ) );\n\n\n\n\t/* Reset the event list item to its normal value - so it can be used with\n\n\tqueues and semaphores. */\n\n\tlistSET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ), ( ( TickType_t ) configMAX_PRIORITIES - ( TickType_t ) pxCurrentTCB->uxPriority ) ); /*lint !e961 MISRA exception as the casts are only redundant for some ports. */\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 6, "score": 104903.10307163426 }, { "content": "#define portMAX_8_BIT_VALUE\t\t\t\t\t( ( uint8_t ) 0xff )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 7, "score": 100509.13876308527 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/build.rs", "rank": 8, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/build.rs", "rank": 9, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/build.rs", "rank": 10, "score": 88441.66588380146 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/build.rs", "rank": 11, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/build.rs", "rank": 12, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/build.rs", "rank": 13, "score": 86885.72247686045 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 14, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 15, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 16, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 17, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 18, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 19, "score": 79431.945204443 }, { "content": "#[doc = \"Write proxy for field `HSITRIM`\"]\n\npub struct HSITRIM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HSITRIM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x1f << 3)) | (((value as u32) & 0x1f) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HSICAL`\"]\n\npub type HSICAL_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `HSEON`\"]\n\npub type HSEON_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HSEON`\"]\n\npub struct HSEON_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cr.rs", "rank": 20, "score": 60354.77446648838 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x0300\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0300\n\n }\n\n}\n\n#[doc = \"Reader of field `PLLRDY`\"]\n\npub type PLLRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `PLLON`\"]\n\npub type PLLON_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PLLON`\"]\n\npub struct PLLON_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rcc/cr.rs", "rank": 22, "score": 60353.90354753722 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CTPH`\"]\n\npub type CTPH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CTPH`\"]\n\npub struct CTPH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CTPH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/cr.rs", "rank": 23, "score": 60350.606008150055 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `PRESCALER`\"]\n\npub type PRESCALER_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PRESCALER`\"]\n\npub struct PRESCALER_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PRESCALER_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/quadspi/cr.rs", "rank": 24, "score": 60350.606008150055 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x2000\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x2000\n\n }\n\n}\n\n#[doc = \"Reader of field `TRIM`\"]\n\npub type TRIM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TRIM`\"]\n\npub struct TRIM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRIM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crs/cr.rs", "rank": 25, "score": 60349.71151087441 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESET`\"]\n\npub type RESET_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RESET`\"]\n\npub struct RESET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESET_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crc/cr.rs", "rank": 26, "score": 60346.74330077801 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXEOM`\"]\n\npub type TXEOM_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXEOM`\"]\n\npub struct TXEOM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXEOM_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/cr.rs", "rank": 27, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COE`\"]\n\npub type COE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `COE`\"]\n\npub struct COE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/cr.rs", "rank": 28, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `REV_OUT`\"]\n\npub type REV_OUT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `REV_OUT`\"]\n\npub struct REV_OUT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REV_OUT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/crc/cr.rs", "rank": 29, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DMAOUTEN`\"]\n\npub type DMAOUTEN_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DMAOUTEN`\"]\n\npub struct DMAOUTEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DMAOUTEN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/aes/cr.rs", "rank": 30, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COE`\"]\n\npub type COE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `COE`\"]\n\npub struct COE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/cr.rs", "rank": 31, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DBP`\"]\n\npub type DBP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DBP`\"]\n\npub struct DBP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/pwr/cr.rs", "rank": 32, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADEN`\"]\n\npub type ADEN_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ADEN`\"]\n\npub struct ADEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADEN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/adc/cr.rs", "rank": 33, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DMAUDRIE2`\"]\n\npub type DMAUDRIE2_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DMAUDRIE2`\"]\n\npub struct DMAUDRIE2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DMAUDRIE2_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dac/cr.rs", "rank": 34, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CNTSTRT`\"]\n\npub type CNTSTRT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CNTSTRT`\"]\n\npub struct CNTSTRT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CNTSTRT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/cr.rs", "rank": 35, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADCAL`\"]\n\npub type ADCAL_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ADCAL`\"]\n\npub struct ADCAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADCAL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/cr.rs", "rank": 36, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `EN1`\"]\n\npub type EN1_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EN1`\"]\n\npub struct EN1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EN1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 37, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEDGE`\"]\n\npub type TSEDGE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TSEDGE`\"]\n\npub struct TSEDGE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEDGE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/cr.rs", "rank": 38, "score": 60345.09627611665 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x7f\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f\n\n }\n\n}\n\n#[doc = \"Reader of field `WDGA`\"]\n\npub type WDGA_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WDGA`\"]\n\npub struct WDGA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WDGA_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/wwdg/cr.rs", "rank": 39, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x8000_0000\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x8000_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `PG`\"]\n\npub type PG_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PG`\"]\n\npub struct PG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PG_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/flash/cr.rs", "rank": 40, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x7f\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f\n\n }\n\n}\n\n#[doc = \"Reader of field `WDGA`\"]\n\npub type WDGA_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WDGA`\"]\n\npub struct WDGA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WDGA_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/wwdg/cr.rs", "rank": 41, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x1000\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x1000\n\n }\n\n}\n\n#[doc = \"Reader of field `LPDS`\"]\n\npub type LPDS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `LPDS`\"]\n\npub struct LPDS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LPDS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/pwr/cr.rs", "rank": 42, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x83\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x83\n\n }\n\n}\n\n#[doc = \"Reader of field `HSION`\"]\n\npub type HSION_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HSION`\"]\n\npub struct HSION_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HSION_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cr.rs", "rank": 43, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0xc000\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xc000\n\n }\n\n}\n\n#[doc = \"Reader of field `LPDS`\"]\n\npub type LPDS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `LPDS`\"]\n\npub struct LPDS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LPDS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/pwr/cr.rs", "rank": 44, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x7f\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f\n\n }\n\n}\n\n#[doc = \"Reader of field `WDGA`\"]\n\npub type WDGA_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WDGA`\"]\n\npub struct WDGA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WDGA_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/wwdg/cr.rs", "rank": 45, "score": 60344.25154298166 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DBG_STOP`\"]\n\npub type DBG_STOP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DBG_STOP`\"]\n\npub struct DBG_STOP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBG_STOP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dbgmcu/cr.rs", "rank": 46, "score": 60343.014928722194 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DBG_STOP`\"]\n\npub type DBG_STOP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DBG_STOP`\"]\n\npub struct DBG_STOP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBG_STOP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/dbg/cr.rs", "rank": 47, "score": 60343.014928722194 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0x80\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x80\n\n }\n\n}\n\n#[doc = \"Reader of field `FORCE_OPTLOAD`\"]\n\npub type FORCE_OPTLOAD_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `FORCE_OPTLOAD`\"]\n\npub struct FORCE_OPTLOAD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FORCE_OPTLOAD_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/flash/cr.rs", "rank": 48, "score": 60342.21009745823 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CAPTURE`\"]\n\npub type CAPTURE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CM`\"]\n\npub type CM_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CM`\"]\n\npub struct CM_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/cr.rs", "rank": 49, "score": 60341.308299926735 }, { "content": " self.w.bits = (self.w.bits & !(0x03 << 3)) | (((value as u32) & 0x03) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DATATYPE`\"]\n\npub type DATATYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DATATYPE`\"]\n\npub struct DATATYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATATYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 1)) | (((value as u32) & 0x03) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `EN`\"]\n\npub type EN_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/aes/cr.rs", "rank": 51, "score": 60333.89587353308 }, { "content": "#[doc = \"Reader of field `SSD`\"]\n\npub type SSD_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SSD`\"]\n\npub struct SSD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SSD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x7f << 17)) | (((value as u32) & 0x7f) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SSE`\"]\n\npub type SSE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SSE`\"]\n\npub struct SSE_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/cr.rs", "rank": 52, "score": 60333.885865199045 }, { "content": "#[doc = \"Reader of field `OSEL`\"]\n\npub type OSEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `OSEL`\"]\n\npub struct OSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 21)) | (((value as u32) & 0x03) << 21);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `COE`\"]\n\npub type COE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `COE`\"]\n\npub struct COE_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/cr.rs", "rank": 53, "score": 60333.885865199045 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `EDM`\"]\n\npub type EDM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `EDM`\"]\n\npub struct EDM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EDM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENABLE`\"]\n\npub type ENABLE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENABLE`\"]\n\npub struct ENABLE_W<'a> {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/cr.rs", "rank": 54, "score": 60333.616938410465 }, { "content": "#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CR`\"]\n\npub struct CR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/crc/cr.rs", "rank": 55, "score": 60333.02835381125 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `UDEN`\"]\n\npub type UDEN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `UDEN`\"]\n\npub struct UDEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> UDEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 18)) | (((value as u32) & 0x03) << 18);\n\n self.w\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/pwr/cr.rs", "rank": 56, "score": 60332.97207230133 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VOS`\"]\n\npub type VOS_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `VOS`\"]\n\npub struct VOS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VOS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 14)) | (((value as u32) & 0x03) << 14);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/pwr/cr.rs", "rank": 58, "score": 60331.973891505484 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEL2`\"]\n\npub type TSEL2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEL2`\"]\n\npub struct TSEL2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEL2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 19)) | (((value as u32) & 0x07) << 19);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 59, "score": 60331.973891505484 }, { "content": "#[doc = \"Write proxy for field `RTCPRE`\"]\n\npub struct RTCPRE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RTCPRE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CSSLSEON`\"]\n\npub type CSSLSEON_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CSSLSEON`\"]\n\npub struct CSSLSEON_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CSSLSEON_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rcc/cr.rs", "rank": 60, "score": 60331.683660339535 }, { "content": "#[doc = \"Write proxy for field `PSIZE`\"]\n\npub struct PSIZE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PSIZE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `STRT`\"]\n\npub type STRT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `STRT`\"]\n\npub struct STRT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STRT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/flash/cr.rs", "rank": 61, "score": 60331.683660339535 }, { "content": "impl<'a> WAVE2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEL2`\"]\n\npub type TSEL2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEL2`\"]\n\npub struct TSEL2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEL2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 19)) | (((value as u32) & 0x07) << 19);\n\n self.w\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dac/cr.rs", "rank": 62, "score": 60331.53716761787 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> REV_IN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 5)) | (((value as u32) & 0x03) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `POLYSIZE`\"]\n\npub type POLYSIZE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `POLYSIZE`\"]\n\npub struct POLYSIZE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POLYSIZE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/crc/cr.rs", "rank": 63, "score": 60331.15767934246 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> WAVE1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEL1`\"]\n\npub type TSEL1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEL1`\"]\n\npub struct TSEL1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEL1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dac/cr.rs", "rank": 64, "score": 60331.15767934246 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CHMOD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 5)) | (((value as u32) & 0x03) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MODE`\"]\n\npub struct MODE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MODE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/aes/cr.rs", "rank": 65, "score": 60331.15767934246 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> POLYSIZE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 3)) | (((value as u32) & 0x03) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `REV_IN`\"]\n\npub type REV_IN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `REV_IN`\"]\n\npub struct REV_IN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REV_IN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crc/cr.rs", "rank": 66, "score": 60331.15767934246 }, { "content": "#[doc = \"Write proxy for field `WAVE1`\"]\n\npub struct WAVE1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WAVE1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MAMP1`\"]\n\npub type MAMP1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MAMP1`\"]\n\npub struct MAMP1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MAMP1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 67, "score": 60330.17507624711 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> OSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 21)) | (((value as u32) & 0x03) << 21);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `POL`\"]\n\npub type POL_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `POL`\"]\n\npub struct POL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/cr.rs", "rank": 68, "score": 60329.957715032506 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> PLS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 5)) | (((value as u32) & 0x07) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PVDE`\"]\n\npub type PVDE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PVDE`\"]\n\npub struct PVDE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PVDE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/pwr/cr.rs", "rank": 69, "score": 60329.957715032506 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> OSEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 21)) | (((value as u32) & 0x03) << 21);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `POL`\"]\n\npub type POL_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `POL`\"]\n\npub struct POL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/cr.rs", "rank": 70, "score": 60329.957715032506 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> MCV_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 5)) | (((value as u32) & 0x07) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `IODEF`\"]\n\npub type IODEF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `IODEF`\"]\n\npub struct IODEF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IODEF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/cr.rs", "rank": 71, "score": 60329.957715032506 }, { "content": "#[doc = \"Reader of field `BSM`\"]\n\npub type BSM_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `OEBS`\"]\n\npub type OEBS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LSM`\"]\n\npub type LSM_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OELS`\"]\n\npub type OELS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `FCRC`\"]\n\npub type FCRC_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `FCRC`\"]\n\npub struct FCRC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FCRC_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);\n\n self.w\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/cr.rs", "rank": 72, "score": 60329.594887712294 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CTPL`\"]\n\npub type CTPL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CTPL`\"]\n\npub struct CTPL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CTPL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/cr.rs", "rank": 73, "score": 60329.57513254114 }, { "content": "impl<'a> MAMP2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DMAEN2`\"]\n\npub type DMAEN2_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DMAEN2`\"]\n\npub struct DMAEN2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DMAEN2_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 74, "score": 60329.46026135658 }, { "content": "impl<'a> FTHRES_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x1f << 8)) | (((value as u32) & 0x1f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `FSEL`\"]\n\npub type FSEL_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `FSEL`\"]\n\npub struct FSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FSEL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/quadspi/cr.rs", "rank": 75, "score": 60329.46026135658 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PLLON`\"]\n\npub type PLLON_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PLLON`\"]\n\npub struct PLLON_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PLLON_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cr.rs", "rank": 76, "score": 60329.11389968372 }, { "content": "#[doc = \"Reader of field `MAMP2`\"]\n\npub type MAMP2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MAMP2`\"]\n\npub struct MAMP2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MAMP2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `WAVE2`\"]\n\npub type WAVE2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `WAVE2`\"]\n\npub struct WAVE2_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dac/cr.rs", "rank": 78, "score": 60329.08966393905 }, { "content": "#[doc = \"Reader of field `WAVE2`\"]\n\npub type WAVE2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `WAVE2`\"]\n\npub struct WAVE2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WAVE2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MAMP2`\"]\n\npub type MAMP2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MAMP2`\"]\n\npub struct MAMP2_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 79, "score": 60329.08966393905 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEL1`\"]\n\npub type TSEL1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEL1`\"]\n\npub struct TSEL1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEL1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 3)) | (((value as u32) & 0x07) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `WAVE1`\"]\n\npub type WAVE1_R = crate::R<u8, u8>;\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 80, "score": 60328.99381025143 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SNB`\"]\n\npub type SNB_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SNB`\"]\n\npub struct SNB_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SNB_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 3)) | (((value as u32) & 0x0f) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PSIZE`\"]\n\npub type PSIZE_R = crate::R<u8, u8>;\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/flash/cr.rs", "rank": 81, "score": 60328.99381025143 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `MAMP1`\"]\n\npub type MAMP1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MAMP1`\"]\n\npub struct MAMP1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MAMP1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `WAVE1`\"]\n\npub type WAVE1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `WAVE1`\"]\n\npub struct WAVE1_W<'a> {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dac/cr.rs", "rank": 82, "score": 60328.7505549622 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `PGPSC`\"]\n\npub type PGPSC_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PGPSC`\"]\n\npub struct PGPSC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PGPSC_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MCV`\"]\n\npub type MCV_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `MCV`\"]\n\npub struct MCV_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/cr.rs", "rank": 83, "score": 60328.7505549622 }, { "content": "#[doc = \"Write proxy for field `VOS`\"]\n\npub struct VOS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VOS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 11)) | (((value as u32) & 0x03) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DS_EE_KOFF`\"]\n\npub type DS_EE_KOFF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DS_EE_KOFF`\"]\n\npub struct DS_EE_KOFF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DS_EE_KOFF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/pwr/cr.rs", "rank": 85, "score": 60327.80907937279 }, { "content": "#[doc = \"Reader of field `HSERDY`\"]\n\npub type HSERDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `HSEON`\"]\n\npub type HSEON_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HSEON`\"]\n\npub struct HSEON_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HSEON_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rcc/cr.rs", "rank": 86, "score": 60326.44402600813 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PLS`\"]\n\npub type PLS_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PLS`\"]\n\npub struct PLS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PLS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/pwr/cr.rs", "rank": 87, "score": 60325.806756911246 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PLS`\"]\n\npub type PLS_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PLS`\"]\n\npub struct PLS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PLS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/pwr/cr.rs", "rank": 88, "score": 60325.806756911246 }, { "content": "impl<'a> HSEON_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HSERDY`\"]\n\npub type HSERDY_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cr.rs", "rank": 89, "score": 60324.871141616364 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HSIRDY`\"]\n\npub type HSIRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `HSITRIM`\"]\n\npub type HSITRIM_R = crate::R<u8, u8>;\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cr.rs", "rank": 91, "score": 60323.63678067189 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HSEBYP`\"]\n\npub type HSEBYP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HSEBYP`\"]\n\npub struct HSEBYP_W<'a> {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rcc/cr.rs", "rank": 92, "score": 60323.30954466493 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DMAEN1`\"]\n\npub type DMAEN1_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DMAEN1`\"]\n\npub struct DMAEN1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DMAEN1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/cr.rs", "rank": 93, "score": 60322.597569985744 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PMM`\"]\n\npub type PMM_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PMM`\"]\n\npub struct PMM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PMM_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/quadspi/cr.rs", "rank": 94, "score": 60322.597569985744 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x3f << 8)) | (((value as u32) & 0x3f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SWSYNC`\"]\n\npub type SWSYNC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SWSYNC`\"]\n\npub struct SWSYNC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SWSYNC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crs/cr.rs", "rank": 95, "score": 60322.597569985744 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 5)) | (((value as u32) & 0x07) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DBP`\"]\n\npub type DBP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DBP`\"]\n\npub struct DBP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/pwr/cr.rs", "rank": 96, "score": 60322.597569985744 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 5)) | (((value as u32) & 0x07) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DBP`\"]\n\npub type DBP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DBP`\"]\n\npub struct DBP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/pwr/cr.rs", "rank": 97, "score": 60322.597569985744 }, { "content": "#[doc = \"Reader of field `HSEBYP`\"]\n\npub type HSEBYP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HSEBYP`\"]\n\npub struct HSEBYP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HSEBYP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cr.rs", "rank": 99, "score": 60322.35904984796 } ]
Rust
06/chronal-manhattan/src/main.rs
mnem/advent-of-code-2018
f2f5821d29fcf111ef4c1ff963bc4636626781f6
extern crate regex; use std::fs; use std::cmp; use regex::Regex; fn main() { let input = read_input(); let result = process(&input); println!("Result: {}\n", result); } fn read_input() -> String { let input_filename = String::from("input.txt"); fs::read_to_string(input_filename) .expect("Failed to read file") } fn process(input: &str) -> i32 { let points = Point::from_lines(&input); let (max_x, max_y) = extent(&points); let mut scored_grids = Vec::new(); for point in points { let scored_grid = scored_grid_from(point, max_x, max_y); scored_grids.push(scored_grid); } let cell_count = (max_x * max_y) as usize; for cell_index in 0..cell_count { let mut min_score = i32::max_value(); for grid_index in 0..scored_grids.len() { let grid_cell_score = scored_grids[grid_index][cell_index]; min_score = cmp::min(min_score, grid_cell_score); } let mut min_cells_seen = 0; for grid_index in 0..scored_grids.len() { let grid_cell_score = scored_grids[grid_index][cell_index]; if min_score == grid_cell_score { min_cells_seen += 1; } } if min_cells_seen > 1 { for grid_index in 0..scored_grids.len() { scored_grids[grid_index][cell_index] = -1; } } else { for grid_index in 0..scored_grids.len() { let grid_cell_score = scored_grids[grid_index][cell_index]; if grid_cell_score == min_score { continue; } scored_grids[grid_index][cell_index] = -1; } } } let mut areas = Vec::new(); for grid in &scored_grids { if grid_is_infinite(grid, max_x, max_y) { areas.push(0); continue; } let area = grid.iter().fold(0, |acc, value| { if *value >= 0 { return acc + 1; } else { return acc; } }); areas.push(area); } areas.sort(); return *areas.last().unwrap() as i32; } fn grid_is_infinite(grid: &Vec<i32>, extent_x: usize, extent_y: usize) -> bool { let max = extent_x * extent_y; let top_range = 0..extent_x; let bottom_range = (extent_x * (extent_y - 1))..max; for (top, bottom) in top_range.zip(bottom_range) { if grid[top] >= 0 || grid[bottom] >= 0 { return true; } } let left_range = (0 ..max).step_by(extent_x); let right_range = ((extent_x - 1)..max).step_by(extent_x); for (left, right) in left_range.zip(right_range) { if grid[left] >= 0 || grid[right] >= 0 { return true; } } return false; } fn scored_grid_from(point: Point, extent_x: usize, extend_y: usize) -> Vec<i32> { let mut scores = Vec::new(); for y in 0..extend_y { for x in 0..extent_x { let score = ((y as i32) - point.y).abs() + ((x as i32) - point.x).abs(); scores.push(score); } } return scores; } fn extent(points: &Vec<Point>) -> (usize, usize) { let mut max_x = 0; let mut max_y = 0; for point in points { max_x = cmp::max(max_x, point.x); max_y = cmp::max(max_y, point.y); } max_x += 1; max_y += 1; (max_x as usize, max_y as usize) } #[derive(Debug,PartialEq)] struct Point { x: i32, y: i32, } impl Point { fn from_string(string: &str) -> Point { let re = Regex::new(r"(?P<x>\d*)\D*(?P<y>\d*)").unwrap(); let captures = re.captures(string).unwrap(); return Point { x: captures["x"].parse().unwrap(), y: captures["y"].parse().unwrap() }; } fn from_lines(lines: &str) -> Vec<Point> { let mut points = Vec::new(); for line in lines.lines() { let line = line.trim(); if line.len() == 0 { continue; } points.push(Point::from_string(line)); } return points; } } #[cfg(test)] mod tests { use super::*; #[test] fn test_point_from_string() { let subject = Point::from_string("1, 2"); assert_eq!(Point { x: 1, y: 2 }, subject); let subject = Point::from_string("100 , 25699"); assert_eq!(Point { x: 100, y: 25699 }, subject); } #[test] fn test_point_from_lines_trailing_newline() { let subject = Point::from_lines("1, 2\n3, 4\n5, 6\n"); let expected = vec![Point {x: 1, y: 2},Point {x: 3, y: 4},Point {x: 5, y: 6},]; assert_eq!(expected, subject); } #[test] fn test_point_from_lines_trailing() { let subject = Point::from_lines("1, 2\n3, 4\n5, 6"); let expected = vec![Point {x: 1, y: 2},Point {x: 3, y: 4},Point {x: 5, y: 6},]; assert_eq!(expected, subject); } #[test] fn test_extent() { let input = vec![Point {x: 0, y: 10}, Point {x: 20, y: 0}, Point {x: 19, y: 10},]; let (max_x, max_y) = extent(&input); assert_eq!(21, max_x); assert_eq!(11, max_y); } #[test] fn test_scored_grid_1() { let in_point = Point { x: 1, y: 1}; let extent_x = 3; let extent_y = 3; let result = scored_grid_from(in_point, extent_x, extent_y); let expected = vec![ 2, 1, 2, 1, 0, 1, 2, 1, 2, ]; assert_eq!(expected, result); } #[test] fn test_scored_grid_2() { let in_point = Point { x: 1, y: 1}; let extent_x = 5; let extent_y = 7; let result = scored_grid_from(in_point, extent_x, extent_y); let expected = vec![ 2, 1, 2, 3, 4, 1, 0, 1, 2, 3, 2, 1, 2, 3, 4, 3, 2, 3, 4, 5, 4, 3, 4, 5, 6, 5, 4, 5, 6, 7, 6, 5, 6, 7, 8, ]; assert_eq!(expected, result); } #[test] fn test_grid_is_infinite_top() { let extent_x = 3; let extent_y = 3; let grid = vec![ 0, -1, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, 0, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, 0, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); } #[test] fn test_grid_is_infinite_centre() { let extent_x = 3; let extent_y = 3; let grid = vec![ -1, -1, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(false, result); let grid = vec![ -1, -1, -1, -1, 0, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(false, result); } #[test] fn test_grid_is_infinite_bottom() { let extent_x = 3; let extent_y = 3; let grid = vec![ -1, -1, -1, -1, -1, -1, 0, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, -1, -1, -1, -1, 0, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, -1, -1, -1, -1, -1, 0, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); } #[test] fn test_grid_is_infinite_left() { let extent_x = 3; let extent_y = 3; let grid = vec![ 0, -1, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, 0, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, 0, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); } #[test] fn test_example() { let input = "1, 1\n1, 6\n8, 3\n3, 4\n5, 5\n8, 9"; let result = process(input); assert_eq!(17, result); } }
extern crate regex; use std::fs; use std::cmp; use regex::Regex; fn main() { let input = read_input(); let result = process(&input); println!("Result: {}\n", result); } fn read_input() -> String { let input_filename = String::from("input.txt"); fs::read_to_string(input_filename) .expect("Failed to read file") } fn process(input: &str) -> i32 { let points = Point::from_lines(&input); let (max_x, max_y) = extent(&points); let mut scored_grids = Vec::new(); for point in points { let scored_grid = scored_grid_from(point, max_x, max_y); scored_grids.push(scored_grid); } let cell_count = (max_x * max_y) as usize; for cell_index in 0..cell_count { let mut min_score = i32::max_value(); for grid_index in 0..scored_grids.len() { let grid_cell_score = scored_grids[grid_index][cell_index]; min_score = cmp::min(min_score, grid_cell_score); } let mut min_cells_seen = 0; for grid_index in 0..scored_grids.len() { let grid_cell_score = scored_grids[grid_index][cell_index]; if min_score == grid_cell_score { min_cells_seen += 1; } } if min_cells_seen > 1 { for grid_index in 0..scored_grids.len() { scored_grids[grid_index][cell_index] = -1; } } else { for grid_index in 0..scored_grids.len() { let grid_cell_score = scored_grids[grid_index][cell_index]; if grid_cell_score == min_score { continue; } scored_grids[grid_index][cell_index] = -1; } } } let mut areas = Vec::new(); for grid in &scored_grids { if grid_is_infinite(grid, max_x, max_y) { areas.push(0); continue; } let area = grid.iter().fold(0, |acc, value| { if *value >= 0 { return acc + 1; } else { return acc; } }); areas.push(area); } areas.sort(); return *areas.last().unwrap() as i32; } fn grid_is_infinite(grid: &Vec<i32>, extent_x: usize, extent_y: usize) -> bool { let max = extent_x * extent_y; let top_range = 0..extent_x; let bottom_range = (extent_x * (extent_y - 1))..max; for (top, bottom) in top_range.zip(bottom_range) { if grid[top] >= 0 || grid[bottom] >= 0 { return true; } } let left_range = (0 ..max).step_by(extent_x); let right_range = ((extent_x - 1)..max).step_by(extent_x); for (left, right) in left_range.zip(right_range) { if grid[left] >= 0 || grid[right] >= 0 { return true; } } return false; } fn scored_grid_from(point: Point, extent_x: usize, extend_y: usize) -> Vec<i32> { let mut scores = Vec::new(); for y in 0..extend_y { for x in 0..extent_x { let score = ((y as i32) - point.y).abs() + ((x as i32) - point.x).abs(); scores.push(score); } } return scores; } fn extent(points: &Vec<Point>) -> (usize, usize) { let mut max_x = 0; let mut max_y = 0; for point in points { max_x = cmp::max(max_x, point.x); max_y = cmp::max(max_y, point.y); } max_x += 1; max_y += 1; (max_x as usize, max_y as usize) } #[derive(Debug,PartialEq)] struct Point { x: i32, y: i32, } impl Point { fn from_string(string: &str) -> Point { let re = Regex::new(r"(?P<x>\d*)\D*(?P<y>\d*)").unwrap(); let captures = re.captures(string).unwrap(); return Point { x: captures["x"].parse().unwrap(), y: captures["y"].parse().unwrap() };
te(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, 0, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, 0, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); } #[test] fn test_grid_is_infinite_centre() { let extent_x = 3; let extent_y = 3; let grid = vec![ -1, -1, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(false, result); let grid = vec![ -1, -1, -1, -1, 0, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(false, result); } #[test] fn test_grid_is_infinite_bottom() { let extent_x = 3; let extent_y = 3; let grid = vec![ -1, -1, -1, -1, -1, -1, 0, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, -1, -1, -1, -1, 0, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, -1, -1, -1, -1, -1, 0, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); } #[test] fn test_grid_is_infinite_left() { let extent_x = 3; let extent_y = 3; let grid = vec![ 0, -1, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, 0, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); let grid = vec![ -1, -1, -1, 0, -1, -1, -1, -1, -1, ]; let result = grid_is_infinite(&grid, extent_x, extent_y); assert_eq!(true, result); } #[test] fn test_example() { let input = "1, 1\n1, 6\n8, 3\n3, 4\n5, 5\n8, 9"; let result = process(input); assert_eq!(17, result); } }
} fn from_lines(lines: &str) -> Vec<Point> { let mut points = Vec::new(); for line in lines.lines() { let line = line.trim(); if line.len() == 0 { continue; } points.push(Point::from_string(line)); } return points; } } #[cfg(test)] mod tests { use super::*; #[test] fn test_point_from_string() { let subject = Point::from_string("1, 2"); assert_eq!(Point { x: 1, y: 2 }, subject); let subject = Point::from_string("100 , 25699"); assert_eq!(Point { x: 100, y: 25699 }, subject); } #[test] fn test_point_from_lines_trailing_newline() { let subject = Point::from_lines("1, 2\n3, 4\n5, 6\n"); let expected = vec![Point {x: 1, y: 2},Point {x: 3, y: 4},Point {x: 5, y: 6},]; assert_eq!(expected, subject); } #[test] fn test_point_from_lines_trailing() { let subject = Point::from_lines("1, 2\n3, 4\n5, 6"); let expected = vec![Point {x: 1, y: 2},Point {x: 3, y: 4},Point {x: 5, y: 6},]; assert_eq!(expected, subject); } #[test] fn test_extent() { let input = vec![Point {x: 0, y: 10}, Point {x: 20, y: 0}, Point {x: 19, y: 10},]; let (max_x, max_y) = extent(&input); assert_eq!(21, max_x); assert_eq!(11, max_y); } #[test] fn test_scored_grid_1() { let in_point = Point { x: 1, y: 1}; let extent_x = 3; let extent_y = 3; let result = scored_grid_from(in_point, extent_x, extent_y); let expected = vec![ 2, 1, 2, 1, 0, 1, 2, 1, 2, ]; assert_eq!(expected, result); } #[test] fn test_scored_grid_2() { let in_point = Point { x: 1, y: 1}; let extent_x = 5; let extent_y = 7; let result = scored_grid_from(in_point, extent_x, extent_y); let expected = vec![ 2, 1, 2, 3, 4, 1, 0, 1, 2, 3, 2, 1, 2, 3, 4, 3, 2, 3, 4, 5, 4, 3, 4, 5, 6, 5, 4, 5, 6, 7, 6, 5, 6, 7, 8, ]; assert_eq!(expected, result); } #[test] fn test_grid_is_infinite_top() { let extent_x = 3; let extent_y = 3; let grid = vec![ 0, -1, -1, -1, -1, -1, -1, -1, -1, ]; let result = grid_is_infini
random
[ { "content": "fn scored_grid_from(point: Point, extent_x: usize, extend_y: usize) -> Vec<i32> {\n\n let mut scores = Vec::new();\n\n for y in 0..extend_y {\n\n for x in 0..extent_x {\n\n let score = ((y as i32) - point.y).abs() + ((x as i32) - point.x).abs();\n\n scores.push(score);\n\n }\n\n }\n\n return scores;\n\n}\n\n\n", "file_path": "06/chronal-manhattan-sum/src/main.rs", "rank": 1, "score": 234394.8312634665 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "08/root-value/src/main.rs", "rank": 3, "score": 223873.184000428 }, { "content": "fn process(input: &str, max_distance: i32) -> i32 {\n\n let points = Point::from_lines(&input);\n\n\n\n // Work out how large a grid we have to consider\n\n let (max_x, max_y) = extent(&points);\n\n\n\n // Score a grid for each of the points\n\n let mut scored_grids = Vec::new();\n\n for point in points {\n\n let scored_grid = scored_grid_from(point, max_x, max_y);\n\n scored_grids.push(scored_grid);\n\n }\n\n\n\n // Add the grids\n\n let mut total_grid = Vec::new();\n\n let cell_count = (max_x * max_y) as usize;\n\n for cell_index in 0..cell_count {\n\n let mut total = 0;\n\n for grid_index in 0..scored_grids.len() {\n\n let grid_cell_score = scored_grids[grid_index][cell_index];\n", "file_path": "06/chronal-manhattan-sum/src/main.rs", "rank": 4, "score": 223036.91948227154 }, { "content": "fn grid_is_infinite(grid: &Vec<i32>, extent_x: usize, extent_y: usize) -> bool {\n\n let max = extent_x * extent_y;\n\n\n\n let top_range = 0..extent_x;\n\n let bottom_range = (extent_x * (extent_y - 1))..max;\n\n for (top, bottom) in top_range.zip(bottom_range) {\n\n if grid[top] >= 0 || grid[bottom] >= 0 {\n\n return true;\n\n }\n\n }\n\n\n\n let left_range = (0 ..max).step_by(extent_x);\n\n let right_range = ((extent_x - 1)..max).step_by(extent_x);\n\n for (left, right) in left_range.zip(right_range) {\n\n if grid[left] >= 0 || grid[right] >= 0 {\n\n return true;\n\n }\n\n }\n\n\n\n return false;\n\n}\n\n\n", "file_path": "06/chronal-manhattan-sum/src/main.rs", "rank": 5, "score": 222706.78547782698 }, { "content": "fn process(input: &str) -> i32 {\n\n let mut starfield = starfield_from(input);\n\n let mut last_area = i64::max_value();\n\n let mut second = 0;\n\n\n\n loop {\n\n starfield_step(&mut starfield, StepDirection::Forwards);\n\n let (top_left, bottom_right) = starfield_extent(&starfield);\n\n let area = (bottom_right.x - top_left.x) * (bottom_right.y - top_left.y);\n\n if area > last_area {\n\n break;\n\n }\n\n last_area = area;\n\n second += 1;\n\n }\n\n starfield_step(&mut starfield, StepDirection::Backwards);\n\n starfield_to_bmp(&starfield, \"message\");\n\n\n\n return second;\n\n}\n\n\n", "file_path": "10/stars/src/main.rs", "rank": 6, "score": 209856.37423565268 }, { "content": "fn process_lines(input: String) -> i32 {\n\n let mut num_pairs = 0;\n\n let mut num_triplets = 0;\n\n\n\n for line in input.lines() {\n\n // Cleanup the input line\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue\n\n }\n\n\n\n let letter_counts = count_letters(line);\n\n let (pairs_adjustment, triplets_adjustment) = score_pairs_and_triplets(letter_counts);\n\n\n\n // Update the counts\n\n num_pairs += pairs_adjustment;\n\n num_triplets += triplets_adjustment;\n\n }\n\n\n\n checksum(num_pairs, num_triplets)\n\n}\n\n\n", "file_path": "02/ims/src/main.rs", "rank": 8, "score": 205320.18445128208 }, { "content": "fn process_lines(input: String) -> i32 {\n\n let mut claims = claims_from(input);\n\n let mut overlapping = HashMap::new();\n\n for claim in &claims {\n\n overlapping.insert(claim.claim_id, false);\n\n }\n\n\n\n for y in 0..1_000 {\n\n for x in 0..1_000 {\n\n let mut overlap_count = 0;\n\n let mut first_overlap = None;\n\n for claim in &mut claims {\n\n if claim.contains_point(x, y) {\n\n overlap_count += 1;\n\n if overlap_count == 1 {\n\n first_overlap = Some(claim.claim_id);\n\n } else {\n\n if overlap_count == 2 {\n\n let first_overlap_id = first_overlap.unwrap();\n\n overlapping.insert(first_overlap_id, true);\n", "file_path": "03/no-overlaps/src/main.rs", "rank": 9, "score": 205320.18445128205 }, { "content": "fn process_lines(input: String) -> i32 {\n\n let claims = claims_from(input);\n\n\n\n let mut overlap_squares = 0;\n\n for y in 0..1_000 {\n\n for x in 0..1_000 {\n\n let mut overlap_count = 0;\n\n for claim in &claims {\n\n if claim.contains_point(x, y) {\n\n overlap_count += 1;\n\n if overlap_count >= 2 {\n\n overlap_squares += 1;\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n return overlap_squares;\n\n}\n\n\n", "file_path": "03/overlaps/src/main.rs", "rank": 10, "score": 205320.18445128205 }, { "content": "fn process(input: &str) -> String {\n\n let instructions = create_instruction_graph(input);\n\n\n\n // Get all the possible start steps and use them as starting points\n\n let mut available = Vec::new();\n\n for ready_step in instructions.externals(Incoming) {\n\n available.push(ready_step);\n\n }\n\n\n\n // Work out the order to complete the steps in.\n\n let mut steps = String::new();\n\n let mut complete = HashSet::new();\n\n while available.len() > 0 {\n\n // Sort the available steps so we process them in order\n\n available.sort_by(|a, b| {\n\n let a_step = instructions.node_weight(*a).unwrap();\n\n let b_step = instructions.node_weight(*b).unwrap();\n\n a_step.cmp(b_step)\n\n });\n\n\n", "file_path": "07/some-assembly-required/src/main.rs", "rank": 11, "score": 204178.75350915125 }, { "content": "fn calibrate_from_lines(input: String) -> i32 {\n\n let mut seen = HashSet::new();\n\n let mut total_calibration = 0;\n\n seen.insert(total_calibration);\n\n\n\n let mut loop_count = 0;\n\n let max_loops = 10_000;\n\n loop {\n\n loop_count += 1;\n\n if loop_count >= max_loops {\n\n panic!(\"No repeat found after looping over all input {} times!\", max_loops);\n\n }\n\n\n\n for line in input.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n let calibration: i32 = line.parse().unwrap();\n", "file_path": "01/looping-calibrator/src/main.rs", "rank": 12, "score": 201413.8763121575 }, { "content": "fn process(input: &str, step_base_time: i32, num_workers: usize) -> i32 {\n\n let instructions = create_instruction_graph(input);\n\n\n\n // Get all the possible start steps and use them as starting points\n\n let mut available = Vec::new();\n\n for ready_step in instructions.externals(Incoming) {\n\n available.push(ready_step);\n\n }\n\n\n\n // Setup the timer and workers\n\n let mut timer = -1;\n\n let mut workers = vec![ Worker { node: None, time_remaining: 0 }; num_workers];\n\n\n\n // Follow the instructions\n\n let mut steps = String::new();\n\n let mut complete = HashSet::new();\n\n while available.len() > 0 || workers.iter().any( |w| {w.time_remaining > 0} ) {\n\n // Sort the available steps so we process them in order\n\n available.sort_by(|a, b| {\n\n let a_step = instructions.node_weight(*a).unwrap();\n", "file_path": "07/time-assembly-required/src/main.rs", "rank": 13, "score": 200436.45074738865 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "09/marbles/src/main.rs", "rank": 14, "score": 199460.72287345445 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "09/marbles-100/src/main.rs", "rank": 15, "score": 199460.72287345445 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "05/reducer/src/main.rs", "rank": 16, "score": 199460.72287345445 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "10/stars/src/main.rs", "rank": 17, "score": 199460.72287345445 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "07/some-assembly-required/src/main.rs", "rank": 18, "score": 194942.41032094462 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "08/mem-man/src/main.rs", "rank": 20, "score": 194942.41032094462 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "12/underground-gol/src/main.rs", "rank": 21, "score": 194942.41032094456 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "07/time-assembly-required/src/main.rs", "rank": 22, "score": 190749.21047696559 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "05/good-better-bestest/src/main.rs", "rank": 23, "score": 190749.21047696559 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "06/chronal-manhattan-sum/src/main.rs", "rank": 24, "score": 190749.21047696559 }, { "content": "fn read_input() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 25, "score": 190749.21047696559 }, { "content": "fn process(input: &str) -> usize {\n\n let input = input.trim().to_string();\n\n let output = reduce(input);\n\n return output.len();\n\n}\n\n\n", "file_path": "05/reducer/src/main.rs", "rank": 26, "score": 184402.08958741475 }, { "content": "fn process(input: &str) -> usize {\n\n let setup = GameSetup::from(input);\n\n let mut game = GameState::new(setup);\n\n\n\n game.play();\n\n\n\n let (_, high_score) = game.highest_score();\n\n\n\n return high_score;\n\n}\n\n\n", "file_path": "09/marbles/src/main.rs", "rank": 27, "score": 184402.0895874147 }, { "content": "fn process(input: &str) -> usize {\n\n let setup = GameSetup::from(input);\n\n let mut game = GameState::new(setup);\n\n\n\n game.play();\n\n\n\n let (_, high_score) = game.highest_score();\n\n\n\n return high_score;\n\n}\n\n\n", "file_path": "09/marbles-100/src/main.rs", "rank": 28, "score": 184402.08958741475 }, { "content": "fn create_instruction_graph(input: &str) -> Graph<String, String, Directed, u32> {\n\n let raw_instructions = parse_raw_instructions(input);\n\n let mut instructions = Graph::<String, String>::new();\n\n let mut node_map = HashMap::new();\n\n for raw_instruction in &raw_instructions {\n\n let (step, depends_on) = raw_instruction;\n\n if !node_map.contains_key(step) {\n\n let node = instructions.add_node(step.clone());\n\n node_map.insert(step, node);\n\n }\n\n if !node_map.contains_key(depends_on) {\n\n let node = instructions.add_node(depends_on.clone());\n\n node_map.insert(depends_on, node);\n\n }\n\n }\n\n let mut edges = Vec::new();\n\n for raw_instruction in &raw_instructions {\n\n let (step, depends_on) = raw_instruction;\n\n let depends_on_node = node_map[depends_on];\n\n let step_node = node_map[step];\n", "file_path": "07/some-assembly-required/src/main.rs", "rank": 29, "score": 182378.2390262428 }, { "content": "fn process(input: &str) -> u32 {\n\n let mut data = input_to_vec(&input);\n\n let node = make_node(&mut data);\n\n\n\n return node.value();\n\n}\n\n\n", "file_path": "08/root-value/src/main.rs", "rank": 30, "score": 180283.0427000086 }, { "content": "fn create_instruction_graph(input: &str) -> Graph<String, String, Directed, u32> {\n\n let raw_instructions = parse_raw_instructions(input);\n\n let mut instructions = Graph::<String, String>::new();\n\n let mut node_map = HashMap::new();\n\n for raw_instruction in &raw_instructions {\n\n let (step, depends_on) = raw_instruction;\n\n if !node_map.contains_key(step) {\n\n let node = instructions.add_node(step.clone());\n\n node_map.insert(step, node);\n\n }\n\n if !node_map.contains_key(depends_on) {\n\n let node = instructions.add_node(depends_on.clone());\n\n node_map.insert(depends_on, node);\n\n }\n\n }\n\n let mut edges = Vec::new();\n\n for raw_instruction in &raw_instructions {\n\n let (step, depends_on) = raw_instruction;\n\n let depends_on_node = node_map[depends_on];\n\n let step_node = node_map[step];\n", "file_path": "07/time-assembly-required/src/main.rs", "rank": 31, "score": 179714.7071118175 }, { "content": "fn input_to_vec(input: &str) -> Vec<u32> {\n\n let mut vec: Vec<u32> = input.trim().split(\" \").map(|s| { s.parse().unwrap() }).collect();\n\n vec.reverse();\n\n vec\n\n}\n\n\n", "file_path": "08/root-value/src/main.rs", "rank": 32, "score": 177817.12118667478 }, { "content": "fn process(input: &str) -> usize {\n\n let input = input.trim().to_string();\n\n let units = unique_units(&input);\n\n\n\n let mut shortest = usize::max_value();\n\n for unit in units {\n\n let filtered = input.chars().filter(|u| { u.to_ascii_lowercase() != unit }).collect();\n\n let output = reduce(filtered);\n\n shortest = min(shortest, output.len());\n\n }\n\n\n\n return shortest;\n\n}\n\n\n", "file_path": "05/good-better-bestest/src/main.rs", "rank": 33, "score": 176283.08645293518 }, { "content": "fn process_lines(input: String) -> String {\n\n let mut box_ids: Vec<&str> = input.lines()\n\n .map(|id| {id.trim()})\n\n .filter(|id| {id.len() > 0})\n\n .collect();\n\n\n\n while let Some(id) = box_ids.pop() {\n\n for other_id in box_ids.iter().cloned() {\n\n let matching = matching_chars(id, other_id);\n\n if matching.len() == (id.len() - 1) {\n\n return matching;\n\n }\n\n }\n\n }\n\n return String::new();\n\n}\n\n\n", "file_path": "02/find/src/main.rs", "rank": 34, "score": 169328.534783157 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "02/find/src/main.rs", "rank": 35, "score": 165176.35290027552 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "03/overlaps/src/main.rs", "rank": 36, "score": 165176.35290027552 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "02/ims/src/main.rs", "rank": 37, "score": 165176.35290027552 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "04/repose/src/main.rs", "rank": 38, "score": 165176.35290027552 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "03/no-overlaps/src/main.rs", "rank": 39, "score": 165176.35290027552 }, { "content": "fn make_node(input: &mut Vec<u32>) -> Node {\n\n let num_children = input.pop().unwrap();\n\n let num_metadatum = input.pop().unwrap();\n\n\n\n let mut children = Vec::new();\n\n for _ in 0..num_children {\n\n children.push(Box::new(make_node(input)));\n\n }\n\n\n\n let mut metadata = Vec::new();\n\n for _ in 0..num_metadatum {\n\n metadata.push(input.pop().unwrap());\n\n }\n\n\n\n Node {children, metadata}\n\n}\n\n\n", "file_path": "08/root-value/src/main.rs", "rank": 40, "score": 162725.2371923442 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n\n\n", "file_path": "01/looping-calibrator/src/main.rs", "rank": 41, "score": 160983.15305629652 }, { "content": "fn read_input_lines() -> String {\n\n let input_filename = String::from(\"input.txt\");\n\n fs::read_to_string(input_filename)\n\n .expect(\"Failed to read file\")\n\n}\n\n\n", "file_path": "04/sleepiest-minute/src/main.rs", "rank": 42, "score": 160983.15305629652 }, { "content": "fn read_initial_input_line(line: &str) -> VecDeque<usize> {\n\n let re = Regex::new(r\": (?P<state>.*)\").expect(\"Broken regex\");\n\n let captures = re.captures(line).expect(\"Malformed state line\");\n\n let state_str = &captures[\"state\"];\n\n\n\n let mut state = VecDeque::new();\n\n for c in state_str.chars() {\n\n if c == '#' {\n\n state.push_back(1);\n\n } else {\n\n state.push_back(0);\n\n }\n\n }\n\n return state;\n\n}\n\n\n", "file_path": "12/underground-gol/src/main.rs", "rank": 43, "score": 158443.07834668725 }, { "content": "fn parse_raw_instruction(instruction: &str) -> (String, String) {\n\n let re = Regex::new(r\"Step (?P<previous>.).*step (?P<step>.)\").unwrap();\n\n let captures = re.captures(instruction).unwrap();\n\n return (captures[\"step\"].to_string(), captures[\"previous\"].to_string());\n\n}\n\n\n", "file_path": "07/some-assembly-required/src/main.rs", "rank": 44, "score": 156799.85674687574 }, { "content": "fn matching_chars(id: &str, other_id: &str) -> String {\n\n id.chars().zip(other_id.chars())\n\n .filter_map(|(a, b)| {\n\n if a == b {\n\n Some(a)\n\n } else {\n\n None\n\n }\n\n }).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_example() {\n\n let input = String::from(\"abcde\\nfghij\\nklmno\\npqrst\\nfguij\\naxcye\\nwvxyz\\n\");\n\n let result = process_lines(input);\n\n assert_eq!(\"fgij\", result);\n\n }\n\n}\n", "file_path": "02/find/src/main.rs", "rank": 45, "score": 155791.58886432077 }, { "content": "fn read_initial_input_line(line: &str) -> VecDeque<usize> {\n\n let re = Regex::new(r\": (?P<state>.*)\").expect(\"Broken regex\");\n\n let captures = re.captures(line).expect(\"Malformed state line\");\n\n let state_str = &captures[\"state\"];\n\n\n\n let mut state = VecDeque::new();\n\n for c in state_str.chars() {\n\n if c == '#' {\n\n state.push_back(1);\n\n } else {\n\n state.push_back(0);\n\n }\n\n }\n\n return state;\n\n}\n\n\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 46, "score": 154924.62437119114 }, { "content": "fn parse_raw_instruction(instruction: &str) -> (String, String) {\n\n let re = Regex::new(r\"Step (?P<previous>.).*step (?P<step>.)\").unwrap();\n\n let captures = re.captures(instruction).unwrap();\n\n return (captures[\"step\"].to_string(), captures[\"previous\"].to_string());\n\n}\n\n\n", "file_path": "07/time-assembly-required/src/main.rs", "rank": 47, "score": 154414.44710501342 }, { "content": "fn process(input: &str) -> u32 {\n\n let mut data = input_to_vec(&input);\n\n let node = make_node(&mut data);\n\n\n\n return node.sum_metadata();\n\n}\n\n\n", "file_path": "08/mem-man/src/main.rs", "rank": 48, "score": 152393.11801907507 }, { "content": "fn process_lines(input: &str) -> u32 {\n\n let records = records_from_lines(input);\n\n let sleep_records = sleep_records_from(records);\n\n let guard_durations = sleep_durations_by_guard_from(sleep_records);\n\n let sleepist_guard = sleepiest_guard(&guard_durations);\n\n\n\n let mut sleep_by_minute = HashMap::new();\n\n for period in &guard_durations[&sleepist_guard] {\n\n for minute in period.start.minute()..(period.start.minute() + period.duration.num_minutes() as u32) {\n\n let current = sleep_by_minute.entry(minute).or_insert(0);\n\n *current += 1;\n\n }\n\n }\n\n\n\n let mut sleepiest_minute = 0;\n\n let mut sleepiest_value = -1;\n\n for (minute, value) in sleep_by_minute {\n\n if value > sleepiest_value {\n\n sleepiest_minute = minute;\n\n sleepiest_value = value;\n\n }\n\n }\n\n\n\n\n\n return sleepiest_minute * (sleepist_guard as u32);\n\n}\n\n\n", "file_path": "04/repose/src/main.rs", "rank": 49, "score": 152393.11801907507 }, { "content": "fn process(input: &str) -> i64 {\n\n let mut game = read_game_from_text(input);\n\n\n\n for i in 1..=20 {\n\n step(&mut game);\n\n }\n\n\n\n game_sum(&game)\n\n}\n\n\n", "file_path": "12/underground-gol/src/main.rs", "rank": 50, "score": 152393.11801907507 }, { "content": "fn input_to_vec(input: &str) -> Vec<u32> {\n\n let mut vec: Vec<u32> = input.trim().split(\" \").map(|s| { s.parse().unwrap() }).collect();\n\n vec.reverse();\n\n vec\n\n}\n\n\n", "file_path": "08/mem-man/src/main.rs", "rank": 51, "score": 151799.29496190525 }, { "content": "fn process(input: &str) -> i64 {\n\n let mut game = read_game_from_text(input);\n\n\n\n let mut result = 0i64;\n\n let sample_size = 100;\n\n let mut sums = Vec::new();\n\n for i in 1..=10_000 {\n\n step(&mut game);\n\n sums.push(game_sum(&game));\n\n if sums.len() >= (sample_size + 1) {\n\n let test = sums[sums.len() - sample_size] - sums[sums.len() - sample_size - 1];\n\n let mut all_match = true;\n\n for i in (sums.len() - sample_size + 1)..sums.len() {\n\n if (sums[i] - sums[i-1]) != test {\n\n all_match = false;\n\n break;\n\n }\n\n }\n\n if all_match {\n\n let stabilised_after = i - sample_size;\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 52, "score": 149445.2181278412 }, { "content": "fn process_lines(input: &str) -> u32 {\n\n let records = records_from_lines(input);\n\n let sleep_records = sleep_records_from(records);\n\n let guard_durations = sleep_durations_by_guard_from(sleep_records);\n\n\n\n let mut most_frequent_guard = 0;\n\n let mut most_frequent_minute = 0;\n\n let mut most_frequent_value = 0;\n\n for (guard, periods) in &guard_durations {\n\n let (sleepiest_minute, sleepiest_value) = sleepiest_minute(periods);\n\n if sleepiest_value > most_frequent_value {\n\n most_frequent_guard = *guard;\n\n most_frequent_minute = sleepiest_minute;\n\n most_frequent_value = sleepiest_value;\n\n }\n\n }\n\n\n\n return most_frequent_minute * (most_frequent_guard as u32);\n\n}\n\n\n", "file_path": "04/sleepiest-minute/src/main.rs", "rank": 53, "score": 149445.2181278412 }, { "content": "fn parse_raw_instructions(instruction_text: &str) -> Vec<(String, String)> {\n\n let mut instructions = Vec::new();\n\n for instruction in instruction_text.lines() {\n\n let instruction = instruction.trim();\n\n if instruction.len() == 0 {\n\n continue;\n\n }\n\n instructions.push(parse_raw_instruction(instruction));\n\n }\n\n return instructions\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_raw_instruction() {\n\n let input = \"Step C must be finished before step A can begin.\";\n\n let (step, previous) = parse_raw_instruction(input);\n", "file_path": "07/some-assembly-required/src/main.rs", "rank": 54, "score": 148509.7805428266 }, { "content": "fn starfield_from(string: &str) -> Vec<Star> {\n\n let mut starfield = Vec::new();\n\n\n\n for line in string.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n let star = Star::from(line);\n\n starfield.push(star);\n\n }\n\n\n\n return starfield;\n\n}\n\n\n", "file_path": "10/stars/src/main.rs", "rank": 55, "score": 147452.35013094678 }, { "content": "fn parse_raw_instructions(instruction_text: &str) -> Vec<(String, String)> {\n\n let mut instructions = Vec::new();\n\n for instruction in instruction_text.lines() {\n\n let instruction = instruction.trim();\n\n if instruction.len() == 0 {\n\n continue;\n\n }\n\n instructions.push(parse_raw_instruction(instruction));\n\n }\n\n return instructions\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_raw_instruction() {\n\n let input = \"Step C must be finished before step A can begin.\";\n\n let (step, previous) = parse_raw_instruction(input);\n", "file_path": "07/time-assembly-required/src/main.rs", "rank": 56, "score": 146377.56338022306 }, { "content": "fn date_from(string: &str) -> DateTime<Utc> {\n\n Utc.datetime_from_str(string, \"%Y-%m-%d %H:%M\")\n\n .expect(\"Unexpected date format\")\n\n}\n\n\n", "file_path": "04/repose/src/main.rs", "rank": 57, "score": 144503.767370336 }, { "content": "fn date_from(string: &str) -> DateTime<Utc> {\n\n Utc.datetime_from_str(string, \"%Y-%m-%d %H:%M\")\n\n .expect(\"Unexpected date format\")\n\n}\n\n\n", "file_path": "04/sleepiest-minute/src/main.rs", "rank": 58, "score": 141753.10688083788 }, { "content": "fn unique_units(string: &str) -> Vec<char> {\n\n let mut unique: Vec<char> = string.chars().map( |c| { c.to_ascii_lowercase() } ).collect();\n\n unique.sort();\n\n unique.dedup();\n\n return unique;\n\n}\n\n\n", "file_path": "05/good-better-bestest/src/main.rs", "rank": 59, "score": 139181.0876151219 }, { "content": "fn make_node(input: &mut Vec<u32>) -> Node {\n\n let num_children = input.pop().unwrap();\n\n let num_metadatum = input.pop().unwrap();\n\n\n\n let mut children = Vec::new();\n\n for _ in 0..num_children {\n\n children.push(Box::new(make_node(input)));\n\n }\n\n\n\n let mut metadata = Vec::new();\n\n for _ in 0..num_metadatum {\n\n metadata.push(input.pop().unwrap());\n\n }\n\n\n\n Node {children, metadata}\n\n}\n\n\n", "file_path": "08/mem-man/src/main.rs", "rank": 61, "score": 137552.27283524087 }, { "content": "fn count_letters(line: &str) -> HashMap<char, i32> {\n\n let mut letter_counts = HashMap::new();\n\n for letter in line.chars() {\n\n letter_counts.entry(letter)\n\n .and_modify(|e| { *e += 1 })\n\n .or_insert(1);\n\n }\n\n letter_counts\n\n}\n\n\n", "file_path": "02/ims/src/main.rs", "rank": 62, "score": 136752.12559899662 }, { "content": "#[derive(PartialEq)]\n\nstruct Point {\n\n x: i64,\n\n y: i64,\n\n}\n\n\n", "file_path": "10/stars/src/main.rs", "rank": 63, "score": 135578.89716562236 }, { "content": "fn extent(points: &Vec<Point>) -> (usize, usize) {\n\n let mut max_x = 0;\n\n let mut max_y = 0;\n\n for point in points {\n\n max_x = cmp::max(max_x, point.x);\n\n max_y = cmp::max(max_y, point.y);\n\n }\n\n max_x += 1;\n\n max_y += 1;\n\n\n\n (max_x as usize, max_y as usize)\n\n}\n\n\n\n//fn print_scores_as_grid(scores: &Vec<i32>, extent_x: usize, extent_y: usize) {\n\n// println!(\"----[Grid]----\");\n\n// for y in 0..extent_y {\n\n// for x in 0..extent_x {\n\n// let index = (extent_x * y + x) as usize;\n\n// print!(\"{:>4}\", scores[index]);\n\n// }\n\n// println!(\"\");\n\n// }\n\n// println!(\"--------------\");\n\n//}\n\n\n", "file_path": "06/chronal-manhattan-sum/src/main.rs", "rank": 64, "score": 135182.26320965635 }, { "content": "fn score_pairs_and_triplets(letter_counts: HashMap<char, i32>) -> (i32, i32) {\n\n let mut pairs_adjustment = 0;\n\n let mut triplets_adjustment = 0;\n\n for (_, count) in letter_counts {\n\n match count {\n\n 2 => pairs_adjustment = 1,\n\n 3 => triplets_adjustment = 1,\n\n _ => {}\n\n }\n\n }\n\n (pairs_adjustment, triplets_adjustment)\n\n}\n\n\n", "file_path": "02/ims/src/main.rs", "rank": 65, "score": 134792.90402471542 }, { "content": "#[derive(Debug,PartialEq)]\n\nstruct Point {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n\nimpl Point {\n\n fn from_string(string: &str) -> Point {\n\n let re = Regex::new(r\"(?P<x>\\d*)\\D*(?P<y>\\d*)\").unwrap();\n\n let captures = re.captures(string).unwrap();\n\n return Point { x: captures[\"x\"].parse().unwrap(), y: captures[\"y\"].parse().unwrap() };\n\n }\n\n\n\n fn from_lines(lines: &str) -> Vec<Point> {\n\n let mut points = Vec::new();\n\n for line in lines.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n points.push(Point::from_string(line));\n", "file_path": "06/chronal-manhattan-sum/src/main.rs", "rank": 67, "score": 130056.00506911604 }, { "content": "fn reduce(string: String) -> String {\n\n string.chars().fold(String::new(), |mut acc, c| {\n\n match acc.pop() {\n\n Some(last) => {\n\n if !is_reactive(last, c) {\n\n acc.push(last);\n\n acc.push(c);\n\n }\n\n },\n\n None => acc.push(c),\n\n }\n\n acc\n\n })\n\n}\n\n\n", "file_path": "05/reducer/src/main.rs", "rank": 68, "score": 127377.58561567137 }, { "content": "fn reduce(string: String) -> String {\n\n string.chars().fold(String::new(), |mut acc, c| {\n\n match acc.pop() {\n\n Some(last) => {\n\n if !is_reactive(last, c) {\n\n acc.push(last);\n\n acc.push(c);\n\n }\n\n },\n\n None => acc.push(c),\n\n }\n\n acc\n\n })\n\n}\n\n\n", "file_path": "05/good-better-bestest/src/main.rs", "rank": 69, "score": 123699.89754387936 }, { "content": "fn grow_state(state: &mut VecDeque<usize>) -> usize {\n\n if state[state.len() - 1] == 1 {\n\n state.push_back(0);\n\n state.push_back(0);\n\n state.push_back(0);\n\n state.push_back(0);\n\n } else if state[state.len() - 2] == 1 {\n\n state.push_back(0);\n\n state.push_back(0);\n\n state.push_back(0);\n\n } else if state[state.len() - 3] == 1 {\n\n state.push_back(0);\n\n state.push_back(0);\n\n } else if state[state.len() - 4] == 1 {\n\n state.push_back(0);\n\n }\n\n\n\n if state[0] == 1 {\n\n state.push_front(0);\n\n state.push_front(0);\n", "file_path": "12/underground-gol/src/main.rs", "rank": 70, "score": 123071.93505849665 }, { "content": "fn read_rule_from_line(line: &str) -> Rule {\n\n let re = Regex::new(r\"(?P<pattern>.*) => (?P<result>.)\").expect(\"Broken regex\");\n\n let captures = re.captures(line).expect(\"Malformed state line\");\n\n let pattern_str = &captures[\"pattern\"];\n\n let result_str = &captures[\"result\"];\n\n\n\n let mut pattern = 0;\n\n for (index, c) in pattern_str.chars().enumerate() {\n\n if c == '#' {\n\n pattern = pattern | (1 << (pattern_str.len() - 1 - index));\n\n }\n\n }\n\n\n\n let result = match result_str {\n\n \"#\" => 1usize,\n\n _ => 0usize,\n\n };\n\n\n\n Rule { pattern, result }\n\n}\n\n\n", "file_path": "12/underground-gol/src/main.rs", "rank": 71, "score": 121795.43710244642 }, { "content": "fn read_game_from_text(text: &str) -> Game {\n\n let mut state = None;\n\n let mut rules = Vec::new();\n\n\n\n for line in text.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n if state.is_none() {\n\n state = Some(read_initial_input_line(line));\n\n } else {\n\n rules.push(read_rule_from_line(line));\n\n }\n\n }\n\n\n\n return Game { state: state.unwrap(), zero_index: 0, rules }\n\n}\n\n\n", "file_path": "12/underground-gol/src/main.rs", "rank": 72, "score": 121795.43710244639 }, { "content": "fn grow_state(state: &mut VecDeque<usize>) -> usize {\n\n if state[state.len() - 1] == 1 {\n\n state.push_back(0);\n\n state.push_back(0);\n\n state.push_back(0);\n\n state.push_back(0);\n\n } else if state[state.len() - 2] == 1 {\n\n state.push_back(0);\n\n state.push_back(0);\n\n state.push_back(0);\n\n } else if state[state.len() - 3] == 1 {\n\n state.push_back(0);\n\n state.push_back(0);\n\n } else if state[state.len() - 4] == 1 {\n\n state.push_back(0);\n\n }\n\n\n\n if state[0] == 1 {\n\n state.push_front(0);\n\n state.push_front(0);\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 73, "score": 120787.65438577242 }, { "content": "fn main() {\n\n let input = read_input();\n\n let result = process(&input);\n\n println!(\"Result: {}\\n\", result);\n\n}\n\n\n", "file_path": "08/root-value/src/main.rs", "rank": 74, "score": 120787.49803566345 }, { "content": "fn read_rule_from_line(line: &str) -> Rule {\n\n let re = Regex::new(r\"(?P<pattern>.*) => (?P<result>.)\").expect(\"Broken regex\");\n\n let captures = re.captures(line).expect(\"Malformed state line\");\n\n let pattern_str = &captures[\"pattern\"];\n\n let result_str = &captures[\"result\"];\n\n\n\n let mut pattern = 0;\n\n for (index, c) in pattern_str.chars().enumerate() {\n\n if c == '#' {\n\n pattern = pattern | (1 << (pattern_str.len() - 1 - index));\n\n }\n\n }\n\n\n\n let result = match result_str {\n\n \"#\" => 1usize,\n\n _ => 0usize,\n\n };\n\n\n\n Rule { pattern, result }\n\n}\n\n\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 75, "score": 119211.4267404587 }, { "content": "fn read_game_from_text(text: &str) -> Game {\n\n let mut state = None;\n\n let mut rules = Vec::new();\n\n\n\n for line in text.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n if state.is_none() {\n\n state = Some(read_initial_input_line(line));\n\n } else {\n\n rules.push(read_rule_from_line(line));\n\n }\n\n }\n\n\n\n return Game { state: state.unwrap(), zero_index: 0, rules }\n\n}\n\n\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 76, "score": 119211.4267404587 }, { "content": "fn checksum(num_pairs: i32, num_triplets: i32) -> i32 {\n\n num_pairs * num_triplets\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_example() {\n\n let input = String::from(\"abcdef\\nbababc\\nabbcde\\nabcccd\\naabcdd\\nabcdee\\nababab\\n\");\n\n let result = process_lines(input);\n\n assert_eq!(12, result);\n\n }\n\n}\n", "file_path": "02/ims/src/main.rs", "rank": 77, "score": 118091.82037030254 }, { "content": "fn sample_grid(grid: &Vec<i64>, x: i64, y: i64) -> i64 {\n\n let y1 = (((y + 0) * 300) + x) as usize;\n\n let y2 = (((y + 1) * 300) + x) as usize;\n\n let y3 = (((y + 2) * 300) + x) as usize;\n\n\n\n grid[y1 + 0] + grid[y1 + 1] + grid[y1 + 2] +\n\n grid[y2 + 0] + grid[y2 + 1] + grid[y2 + 2] +\n\n grid[y3 + 0] + grid[y3 + 1] + grid[y3 + 2]\n\n}\n\n\n", "file_path": "11/chronal-charge/src/main.rs", "rank": 78, "score": 112228.7739364451 }, { "content": "fn max_power(grid: &Vec<i64>) -> (i64, i64, i64) {\n\n let mut power = i64::min_value();\n\n let mut power_x = 0i64;\n\n let mut power_y = 0i64;\n\n for y in 0..(300 - 3 as i64) {\n\n for x in 0..(300 - 3 as i64) {\n\n let sample_power = sample_grid(grid, x, y);\n\n if sample_power > power {\n\n power = sample_power;\n\n power_x = x;\n\n power_y = y;\n\n }\n\n }\n\n }\n\n\n\n (power_x, power_y, power)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "11/chronal-charge/src/main.rs", "rank": 79, "score": 111221.7075778994 }, { "content": "fn starfield_extent(starfield: &Vec<Star>) -> (Point, Point) {\n\n let mut top_left = Point { x: i64::max_value(), y: i64::max_value() };\n\n let mut bottom_right = Point { x: i64::min_value(), y: i64::min_value() };\n\n\n\n for star in starfield {\n\n top_left.x = cmp::min(top_left.x, star.position.x);\n\n top_left.y = cmp::min(top_left.y, star.position.y);\n\n\n\n bottom_right.x = cmp::max(bottom_right.x, star.position.x);\n\n bottom_right.y = cmp::max(bottom_right.y, star.position.y);\n\n }\n\n\n\n (top_left, bottom_right)\n\n}\n\n\n", "file_path": "10/stars/src/main.rs", "rank": 80, "score": 110860.21085243492 }, { "content": "fn split_entry(entry_string: &str) -> (DateTime<Utc>, GuardState) {\n\n let re = Regex::new(r\"\\[(?P<date>.*)\\] (?P<entry>.*)\").unwrap();\n\n let captures = re.captures(entry_string)\n\n .expect(\"Unexpected entry format\");\n\n let date = date_from(&captures[\"date\"]);\n\n let state = GuardState::from(&captures[\"entry\"]);\n\n (date, state)\n\n}\n\n\n", "file_path": "04/repose/src/main.rs", "rank": 81, "score": 109133.08315631206 }, { "content": "fn split_entry(entry_string: &str) -> (DateTime<Utc>, GuardState) {\n\n let re = Regex::new(r\"\\[(?P<date>.*)\\] (?P<entry>.*)\").unwrap();\n\n let captures = re.captures(entry_string)\n\n .expect(\"Unexpected entry format\");\n\n let date = date_from(&captures[\"date\"]);\n\n let state = GuardState::from(&captures[\"entry\"]);\n\n (date, state)\n\n}\n\n\n", "file_path": "04/sleepiest-minute/src/main.rs", "rank": 82, "score": 107003.77428819449 }, { "content": "fn game_state_to_string(game: &Game) -> String {\n\n let mut s = String::new();\n\n for (index, value) in game.state.iter().enumerate() {\n\n if index == game.zero_index {\n\n if *value == 1 {\n\n s += \"O\";\n\n } else {\n\n s += \"|\";\n\n }\n\n } else if *value == 1 {\n\n s += \"#\";\n\n } else {\n\n s += \".\";\n\n }\n\n }\n\n return s;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "12/underground-gol/src/main.rs", "rank": 83, "score": 105119.3475287963 }, { "content": "fn max_power(grid: &Vec<i64>) -> (i64, i64, i64, i64) {\n\n let mut max_power = 0i64;\n\n let mut max_side = 0i64;\n\n let mut max_x = 0i64;\n\n let mut max_y = 0i64;\n\n\n\n for side in 1..=300 {\n\n let (x, y, power) = max_power_for_size(&grid, side);\n\n if power > max_power {\n\n max_power = power;\n\n max_side = side;\n\n max_x = x;\n\n max_y = y;\n\n }\n\n }\n\n\n\n return (max_x, max_y, max_power, max_side );\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "11/chronal-charge-deluxe/src/main.rs", "rank": 84, "score": 104847.62306011368 }, { "content": "#[cfg(test)]\n\nfn starfield_to_string(starfield: &Vec<Star>) -> String {\n\n let mut output = String::new();\n\n let (top_left, bottom_right) = starfield_extent(starfield);\n\n\n\n for y in top_left.y..=bottom_right.y {\n\n for x in top_left.x..=bottom_right.x {\n\n let plot = Point { x, y };\n\n let has_star = starfield.iter().any( |star| { star.position == plot } );\n\n if has_star {\n\n output += \"*\";\n\n } else {\n\n output += \" \";\n\n }\n\n }\n\n output += \"\\n\";\n\n }\n\n\n\n return output;\n\n}\n\n\n", "file_path": "10/stars/src/main.rs", "rank": 85, "score": 104249.6320912546 }, { "content": "fn game_state_to_string(game: &Game) -> String {\n\n let mut s = String::new();\n\n for (index, value) in game.state.iter().enumerate() {\n\n if index == game.zero_index {\n\n if *value == 1 {\n\n s += \"O\";\n\n } else {\n\n s += \"|\";\n\n }\n\n } else if *value == 1 {\n\n s += \"#\";\n\n } else {\n\n s += \".\";\n\n }\n\n }\n\n return s;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 86, "score": 103418.05049911585 }, { "content": "fn max_power(grid: &Vec<i64>) -> (i64, i64, i64, i64) {\n\n let mut max_power = 0i64;\n\n let mut max_side = 0i64;\n\n let mut max_x = 0i64;\n\n let mut max_y = 0i64;\n\n\n\n for side in 1..=300 {\n\n let (x, y, power) = max_power_for_size(&grid, side);\n\n if power > max_power {\n\n max_power = power;\n\n max_side = side;\n\n max_x = x;\n\n max_y = y;\n\n }\n\n }\n\n\n\n return (max_x, max_y, max_power, max_side );\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "11/chronal-charge-deluxe-royale/src/main.rs", "rank": 87, "score": 102812.29206488367 }, { "content": "fn sleepiest_guard(guard_durations: &HashMap<i32, Vec<SleepPeriod>>) -> i32 {\n\n let mut guard_totals = HashMap::new();\n\n for (guard, periods) in guard_durations {\n\n for period in periods {\n\n let current = guard_totals.entry(guard).or_insert(0);\n\n *current += period.duration.num_minutes();\n\n }\n\n }\n\n let mut max_guard = -1;\n\n let mut max_duration = -1;\n\n for (guard, total) in guard_totals {\n\n if total > max_duration {\n\n max_guard = *guard;\n\n max_duration = total;\n\n }\n\n }\n\n\n\n return max_guard;\n\n}\n\n\n", "file_path": "04/repose/src/main.rs", "rank": 88, "score": 101747.34280731327 }, { "content": "fn step(game: &mut Game ) {\n\n game.zero_index += grow_state(&mut game.state);\n\n\n\n let in_state = &game.state;\n\n let mut out_state = in_state.clone();\n\n\n\n for i in 0..in_state.len() - 5 {\n\n let window = in_state[i + 0] << 4 | in_state[i + 1] << 3 | in_state[i + 2] << 2 | in_state[i + 3] << 1 | in_state[i + 4];\n\n if let Some(rule) = game.rules.iter().find(|r| {r.pattern == window} ) {\n\n out_state[i + 2] = rule.result;\n\n } else {\n\n out_state[i + 2] = 0;\n\n }\n\n }\n\n\n\n game.state = out_state;\n\n}\n\n\n", "file_path": "12/underground-gol/src/main.rs", "rank": 89, "score": 101601.69347462032 }, { "content": "struct Node {\n\n children: Vec<Box<Node>>,\n\n metadata: Vec<u32>,\n\n}\n\n\n\nimpl Node {\n\n #[cfg(test)]\n\n fn num_all_children(&self) -> usize {\n\n let mut num_all_children: usize = 0;\n\n for child in &self.children {\n\n num_all_children += child.num_all_children();\n\n }\n\n return num_all_children + self.children.len();\n\n }\n\n\n\n #[cfg(test)]\n\n fn sum_metadata(&self) -> u32 {\n\n let mut sum: u32 = self.metadata.iter().sum();\n\n\n\n for child in &self.children {\n", "file_path": "08/root-value/src/main.rs", "rank": 90, "score": 100164.1465948153 }, { "content": "fn step(game: &mut Game ) {\n\n game.zero_index += grow_state(&mut game.state);\n\n\n\n let in_state = &game.state;\n\n let mut out_state = in_state.clone();\n\n\n\n for i in 0..in_state.len() - 5 {\n\n let window = in_state[i + 0] << 4 | in_state[i + 1] << 3 | in_state[i + 2] << 2 | in_state[i + 3] << 1 | in_state[i + 4];\n\n if let Some(rule) = game.rules.iter().find(|r| {r.pattern == window} ) {\n\n out_state[i + 2] = rule.result;\n\n } else {\n\n out_state[i + 2] = 0;\n\n }\n\n }\n\n\n\n game.state = out_state;\n\n}\n\n\n", "file_path": "12/underground-gol-betterer/src/main.rs", "rank": 91, "score": 99578.1046782914 }, { "content": "fn max_power_for_size(grid: &Vec<i64>, side: i64) -> (i64, i64, i64) {\n\n let mut power = i64::min_value();\n\n let mut power_x = 0i64;\n\n let mut power_y = 0i64;\n\n for y in 0..(300 - side) {\n\n for x in 0..(300 - side) {\n\n let sample_power = sample_grid(grid, x, y, side);\n\n if sample_power > power {\n\n power = sample_power;\n\n power_x = x;\n\n power_y = y;\n\n }\n\n }\n\n }\n\n\n\n (power_x, power_y, power)\n\n}\n\n\n", "file_path": "11/chronal-charge-deluxe/src/main.rs", "rank": 92, "score": 99165.91266114349 }, { "content": "fn is_reactive(a: char, b: char) -> bool {\n\n assert_eq!(true, a.is_ascii());\n\n assert_eq!(true, b.is_ascii());\n\n\n\n if a.to_ascii_lowercase() != b.to_ascii_lowercase() {\n\n return false;\n\n }\n\n\n\n if (a.is_lowercase() && b.is_uppercase()) || (a.is_uppercase() && b.is_lowercase()) {\n\n return true;\n\n }\n\n\n\n return false;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "05/reducer/src/main.rs", "rank": 93, "score": 98776.50785772823 }, { "content": "fn max_power_for_size(grid: &Vec<i64>, side: i64) -> (i64, i64, i64) {\n\n let mut power = i64::min_value();\n\n let mut power_x = 0i64;\n\n let mut power_y = 0i64;\n\n for y in 0..(300 - side) {\n\n for x in 0..(300 - side) {\n\n let sample_power = sample_grid(grid, x, y, side);\n\n if sample_power > power {\n\n power = sample_power;\n\n power_x = x;\n\n power_y = y;\n\n }\n\n }\n\n }\n\n\n\n (power_x, power_y, power)\n\n}\n\n\n", "file_path": "11/chronal-charge-deluxe-royale/src/main.rs", "rank": 94, "score": 97348.75057038345 }, { "content": "fn get_next_step(instructions: &Graph<String, String, Directed, u32>, available: &mut Vec<NodeIndex<u32>>, complete: &HashSet<NodeIndex<u32>>) -> Option<NodeIndex<u32>> {\n\n // Work out the current step. The current step must available\n\n // and it must have had all it's prerequisites completed so\n\n // that it can be worked on\n\n let mut next_suitable_step_index = None;\n\n for (step_index, step) in available.iter().enumerate() {\n\n let mut satisfied = true;\n\n for step_prereq in instructions.neighbors_directed(*step, Incoming) {\n\n if !complete.contains(&step_prereq) {\n\n satisfied = false;\n\n break;\n\n }\n\n }\n\n\n\n if satisfied {\n\n next_suitable_step_index = Some(step_index);\n\n break;\n\n }\n\n }\n\n\n\n match next_suitable_step_index {\n\n Some(next_suitable_step_index) => Some(available.remove(next_suitable_step_index)),\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "07/time-assembly-required/src/main.rs", "rank": 95, "score": 97275.172708623 }, { "content": "fn claims_from(lines: String) -> Vec<Claim> {\n\n let mut claims = Vec::new();\n\n for line in lines.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n claims.push(Claim::from(line));\n\n }\n\n\n\n return claims;\n\n}\n\n\n", "file_path": "03/overlaps/src/main.rs", "rank": 96, "score": 96908.35962945985 }, { "content": "fn claims_from(lines: String) -> Vec<Claim> {\n\n let mut claims = Vec::new();\n\n for line in lines.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n claims.push(Claim::from(line));\n\n }\n\n\n\n return claims;\n\n}\n\n\n", "file_path": "03/no-overlaps/src/main.rs", "rank": 97, "score": 96908.35962945985 }, { "content": "fn records_from_lines(text: &str) -> Vec<Record> {\n\n let mut records = Vec::new();\n\n\n\n // Initial parse\n\n for line in text.lines() {\n\n let line = line.trim();\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n\n\n let (moment, state) = split_entry(line);\n\n records.push(Record { moment, guard: -1, state });\n\n }\n\n records.sort_by( |a, b| { a.moment.cmp(&b.moment) });\n\n\n\n // Fixup the IDs\n\n let mut current_guard = -1;\n\n for record in &mut records {\n\n match record.state {\n\n GuardState::Begin(guard_id) => current_guard = guard_id,\n\n _ => assert_ne!(-1, current_guard),\n\n }\n\n record.guard = current_guard;\n\n }\n\n\n\n return records;\n\n}\n\n\n", "file_path": "04/repose/src/main.rs", "rank": 98, "score": 95225.47979700338 }, { "content": "fn is_reactive(a: char, b: char) -> bool {\n\n assert_eq!(true, a.is_ascii());\n\n assert_eq!(true, b.is_ascii());\n\n\n\n if a.to_ascii_lowercase() != b.to_ascii_lowercase() {\n\n return false;\n\n }\n\n\n\n if (a.is_lowercase() && b.is_uppercase()) || (a.is_uppercase() && b.is_lowercase()) {\n\n return true;\n\n }\n\n\n\n return false;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "05/good-better-bestest/src/main.rs", "rank": 99, "score": 94573.36726377551 } ]
Rust
src/commands/kv/bucket/sync.rs
aleclarson/wrangler
37d0506a845d6122190b0a43865a59e839347c1b
use std::collections::HashSet; use std::fs::metadata; use std::iter::FromIterator; use std::path::Path; use cloudflare::endpoints::workerskv::write_bulk::KeyValuePair; use crate::commands::kv; use crate::commands::kv::bucket::directory_keys_only; use crate::commands::kv::bucket::directory_keys_values; use crate::commands::kv::key::KeyList; use crate::settings::global_user::GlobalUser; use crate::settings::toml::Target; use crate::terminal::message; use super::manifest::AssetManifest; pub fn sync( target: &Target, user: &GlobalUser, namespace_id: &str, path: &Path, verbose: bool, ) -> Result<(Vec<KeyValuePair>, Vec<String>, AssetManifest), failure::Error> { kv::validate_target(target)?; let remote_keys_iter = KeyList::new(target, &user, namespace_id, None)?; let mut remote_keys: HashSet<String> = HashSet::new(); for remote_key in remote_keys_iter { match remote_key { Ok(remote_key) => { remote_keys.insert(remote_key.name); } Err(e) => failure::bail!(kv::format_error(e)), } } let (pairs, asset_manifest): (Vec<KeyValuePair>, AssetManifest) = directory_keys_values(target, path, verbose)?; let to_upload = filter_files(pairs, &remote_keys); let local_keys_vec: Vec<String> = match &metadata(path) { Ok(file_type) if file_type.is_dir() => directory_keys_only(target, path), Ok(_) => failure::bail!("{} should be a directory", path.display()), Err(e) => failure::bail!("{}", e), }?; let local_keys: HashSet<_> = HashSet::from_iter(local_keys_vec.into_iter()); let to_delete: Vec<_> = remote_keys .difference(&local_keys) .map(|key| key.to_owned()) .collect(); message::success("Success"); Ok((to_upload, to_delete, asset_manifest)) } fn filter_files(pairs: Vec<KeyValuePair>, already_uploaded: &HashSet<String>) -> Vec<KeyValuePair> { let mut filtered_pairs: Vec<KeyValuePair> = Vec::new(); for pair in pairs { if !already_uploaded.contains(&pair.key) { filtered_pairs.push(pair); } } filtered_pairs } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; use std::path::Path; use crate::commands::kv::bucket::generate_path_and_key; use cloudflare::endpoints::workerskv::write_bulk::KeyValuePair; #[test] fn it_can_filter_preexisting_files() { let (_, key_a_old) = generate_path_and_key(Path::new("/a"), Path::new("/"), Some("old".to_string())) .unwrap(); let (_, key_b_old) = generate_path_and_key(Path::new("/b"), Path::new("/"), Some("old".to_string())) .unwrap(); let (_, key_b_new) = generate_path_and_key(Path::new("/b"), Path::new("/"), Some("new".to_string())) .unwrap(); let mut exclude_keys = HashSet::new(); exclude_keys.insert(key_a_old.clone()); exclude_keys.insert(key_b_old); let pairs_to_upload = vec![ KeyValuePair { key: key_a_old, value: "old".to_string(), expiration_ttl: None, expiration: None, base64: None, }, KeyValuePair { key: key_b_new.clone(), value: "new".to_string(), expiration_ttl: None, expiration: None, base64: None, }, ]; let expected = vec![KeyValuePair { key: key_b_new, value: "new".to_string(), expiration_ttl: None, expiration: None, base64: None, }]; let actual = filter_files(pairs_to_upload, &exclude_keys); check_kv_pairs_equality(expected, actual); } fn check_kv_pairs_equality(expected: Vec<KeyValuePair>, actual: Vec<KeyValuePair>) { assert!(expected.len() == actual.len()); for (idx, pair) in expected.into_iter().enumerate() { assert!(pair.key == actual[idx].key); assert!(pair.value == actual[idx].value); } } }
use std::collections::HashSet; use std::fs::metadata; use std::iter::FromIterator; use std::path::Path; use cloudflare::endpoints::workerskv::write_bulk::KeyValuePair; use crate::commands::kv; use crate::commands::kv::bucket::directory_keys_only; use crate::commands::kv::bucket::directory_keys_values; use crate::commands::kv::key::KeyList; use crate::settings::global_user::GlobalUser; use crate::settings::toml::Target; use crate::terminal::message; use super::manifest::AssetManifest; pub fn sync( target: &Target, user: &GlobalUser, namespace_id: &str, path: &Path, verbose: bool, ) -> Result<(Vec<KeyValuePair>, Vec<String>, AssetManifest), failure::Error> { kv::validate_target(target)?; let remote_keys_iter = KeyList::new(target, &user, namespace_id, None)?; let mut remote_keys: HashSet<String> = HashSet::new(); for remote_key in remote_keys_iter { match remote_key { Ok(remote_key) => { remote_keys.insert(remote_key.name); } Err(e) => failure::bail!(kv::format_error(e)), } } let (pairs, asset_manifest): (Vec<KeyValuePair>, AssetManifest) = directory_keys_values(target, path, verbose)?; let to_upload = filter_files(pairs, &remote_keys); let local_keys_vec: Vec<String> = match &metadata(path) { Ok(file_type) if file_type.is_dir() => directory_keys_only(target, path), Ok(_) => failure::bail!("{} should be a directory", path.display()), Err(e) => failure::bail!("{}", e), }?; let local_keys: HashSet<_> = HashSet::from_iter(local_keys_vec.into_iter()); let to_delete: Vec<_> = remote_keys .difference(&local_keys) .map(|key| key.to_owned()) .collect(); message::success("Success"); Ok((to_upload, to_delete, asset_manifest)) } fn filter_files(pairs: Vec<KeyValuePair>, already_uploaded: &HashSet<String>) -> Vec<KeyValuePair> { let mut filtered_pairs: Vec<KeyValuePair> = Vec::new(); for pair in pairs { if !already_uploaded.contains(&pair.key) { filtered_pairs.push(pair); } } filtered_pairs } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; use std::path::Path; use crate::commands::kv::bucket::generate_path_and_key; use cloudflare::endpoints::workerskv::write_bulk::KeyValuePair; #[test] fn it_can_filter_preexisting_files() { let (_, key_a_old) = generate_path_and_key(Path::new("/a"), Path::new("/"), Some("old".to_string())) .unwrap(); let (_, key_b_old) = generate_path_and_key(Path::new("/b"), Path::new("/"), Some("old".to_string())) .unwrap(); let (_, key_b_new) = generate_path_and_key(Path::new("/b"), Path::new("/"), Some("new".to_string())) .unwrap(); let mut exclude_ke
fn check_kv_pairs_equality(expected: Vec<KeyValuePair>, actual: Vec<KeyValuePair>) { assert!(expected.len() == actual.len()); for (idx, pair) in expected.into_iter().enumerate() { assert!(pair.key == actual[idx].key); assert!(pair.value == actual[idx].value); } } }
ys = HashSet::new(); exclude_keys.insert(key_a_old.clone()); exclude_keys.insert(key_b_old); let pairs_to_upload = vec![ KeyValuePair { key: key_a_old, value: "old".to_string(), expiration_ttl: None, expiration: None, base64: None, }, KeyValuePair { key: key_b_new.clone(), value: "new".to_string(), expiration_ttl: None, expiration: None, base64: None, }, ]; let expected = vec![KeyValuePair { key: key_b_new, value: "new".to_string(), expiration_ttl: None, expiration: None, base64: None, }]; let actual = filter_files(pairs_to_upload, &exclude_keys); check_kv_pairs_equality(expected, actual); }
function_block-function_prefixed
[ { "content": "pub fn create_secret(name: &str, user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {\n\n validate_target(target)?;\n\n\n\n let secret_value = interactive::get_user_input(&format!(\n\n \"Enter the secret text you'd like assigned to the variable {} on the script named {}:\",\n\n name, target.name\n\n ));\n\n\n\n if secret_value.is_empty() {\n\n failure::bail!(\"Your secret cannot be empty.\")\n\n }\n\n\n\n message::working(&format!(\n\n \"Creating the secret for script name {}\",\n\n target.name\n\n ));\n\n\n\n let client = http::cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n\n\n let params = CreateSecretParams {\n", "file_path": "src/commands/secret/mod.rs", "rank": 0, "score": 304483.93287606415 }, { "content": "pub fn delete_secret(name: &str, user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {\n\n validate_target(target)?;\n\n\n\n match interactive::delete(&format!(\n\n \"Are you sure you want to permanently delete the variable {} on the script named {}?\",\n\n name, target.name\n\n )) {\n\n Ok(true) => (),\n\n Ok(false) => {\n\n message::info(&format!(\"Not deleting secret {}.\", name));\n\n return Ok(());\n\n }\n\n Err(e) => failure::bail!(e),\n\n }\n\n\n\n message::working(&format!(\n\n \"Deleting the secret {} on script {}.\",\n\n name, target.name\n\n ));\n\n\n", "file_path": "src/commands/secret/mod.rs", "rank": 1, "score": 304483.93287606415 }, { "content": "pub fn set_subdomain(name: &str, user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {\n\n if target.account_id.is_empty() {\n\n failure::bail!(format!(\n\n \"{} You must provide an account_id in your wrangler.toml before creating a subdomain!\",\n\n emoji::WARN\n\n ))\n\n }\n\n let subdomain = Subdomain::get(&target.account_id, user)?;\n\n if let Some(subdomain) = subdomain {\n\n let msg = if subdomain == name {\n\n format!(\"You have previously registered {}.workers.dev\", subdomain)\n\n } else {\n\n format!(\"This account already has a registered subdomain. You can only register one subdomain per account. Your subdomain is {}.workers.dev\", subdomain)\n\n };\n\n failure::bail!(msg)\n\n } else {\n\n register_subdomain(&name, &user, &target)\n\n }\n\n}\n\n\n", "file_path": "src/commands/subdomain/mod.rs", "rank": 2, "score": 304483.93287606415 }, { "content": "pub fn get(target: &Target, user: &GlobalUser, id: &str, key: &str) -> Result<(), failure::Error> {\n\n kv::validate_target(target)?;\n\n let api_endpoint = format!(\n\n \"https://api.cloudflare.com/client/v4/accounts/{}/storage/kv/namespaces/{}/values/{}\",\n\n target.account_id,\n\n id,\n\n kv::url_encode_key(key)\n\n );\n\n\n\n let client = http::auth_client(None, &user);\n\n\n\n let res = client.get(&api_endpoint).send()?;\n\n\n\n let response_status = res.status();\n\n if response_status.is_success() {\n\n let body_text = res.text()?;\n\n // We don't use message::success because we don't want to include the emoji/formatting\n\n // in case someone is piping this to stdin\n\n print!(\"{}\", &body_text);\n\n } else {\n", "file_path": "src/commands/kv/key/get.rs", "rank": 3, "score": 282370.7918785146 }, { "content": "pub fn delete(target: &Target, user: &GlobalUser, id: &str) -> Result<(), failure::Error> {\n\n kv::validate_target(target)?;\n\n let client = kv::api_client(user)?;\n\n\n\n match interactive::delete(&format!(\n\n \"Are you sure you want to delete namespace {}?\",\n\n id\n\n )) {\n\n Ok(true) => (),\n\n Ok(false) => {\n\n message::info(&format!(\"Not deleting namespace {}\", id));\n\n return Ok(());\n\n }\n\n Err(e) => failure::bail!(e),\n\n }\n\n\n\n let msg = format!(\"Deleting namespace {}\", id);\n\n message::working(&msg);\n\n\n\n let response = client.request(&RemoveNamespace {\n", "file_path": "src/commands/kv/namespace/delete.rs", "rank": 4, "score": 275893.6114757572 }, { "content": "pub fn get_subdomain(user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {\n\n let subdomain = Subdomain::get(&target.account_id, user)?;\n\n if let Some(subdomain) = subdomain {\n\n let msg = format!(\"{}.workers.dev\", subdomain);\n\n message::info(&msg);\n\n } else {\n\n let msg =\n\n \"No subdomain registered. Use `wrangler subdomain <name>` to register one.\".to_string();\n\n message::user_error(&msg);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/subdomain/mod.rs", "rank": 5, "score": 268003.7969053103 }, { "content": "pub fn list_secrets(user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {\n\n validate_target(target)?;\n\n let client = http::cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n\n\n let response = client.request(&ListSecrets {\n\n account_identifier: &target.account_id,\n\n script_name: &target.name,\n\n });\n\n\n\n match response {\n\n Ok(success) => {\n\n let secrets = success.result;\n\n println!(\"{}\", serde_json::to_string(&secrets)?);\n\n }\n\n Err(e) => failure::bail!(format_error(e)),\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/commands/secret/mod.rs", "rank": 6, "score": 268003.7969053103 }, { "content": "fn tool_needs_update(tool_name: &str, path: &Path) -> Result<bool, failure::Error> {\n\n let no_version_msg = format!(\"failed to find version for {}\", tool_name);\n\n\n\n let tool_version_output = Command::new(path.as_os_str())\n\n .arg(\"--version\")\n\n .output()\n\n .expect(&no_version_msg);\n\n\n\n if !tool_version_output.status.success() {\n\n let error = String::from_utf8_lossy(&tool_version_output.stderr);\n\n log::debug!(\"could not find version for {}\\n{}\", tool_name, error);\n\n return Ok(true);\n\n }\n\n\n\n let installed_tool_version = String::from_utf8_lossy(&tool_version_output.stdout);\n\n let installed_tool_version = match installed_tool_version.split_whitespace().last() {\n\n None => return Ok(true),\n\n Some(v) => v,\n\n };\n\n let latest_tool_version = get_latest_version(tool_name)?;\n", "file_path": "src/install/mod.rs", "rank": 7, "score": 258341.75559567224 }, { "content": "// Get namespace id for a given binding name.\n\npub fn get_namespace_id(target: &Target, binding: &str) -> Result<String, failure::Error> {\n\n if check_duplicate_namespaces(&target) {\n\n failure::bail!(\n\n \"Namespace binding \\\"{}\\\" is duplicated in \\\"{}\\\"\",\n\n binding,\n\n target.name\n\n )\n\n }\n\n\n\n if let Some(namespaces) = &target.kv_namespaces {\n\n for namespace in namespaces {\n\n if namespace.binding == binding {\n\n return Ok(namespace.id.to_string());\n\n }\n\n }\n\n }\n\n failure::bail!(\n\n \"Namespace binding \\\"{}\\\" not found in \\\"{}\\\"\",\n\n binding,\n\n target.name\n\n )\n\n}\n\n\n", "file_path": "src/commands/kv/mod.rs", "rank": 8, "score": 249507.34299414448 }, { "content": "fn generate_config_with(eol: &str, use_api_key: bool) {\n\n let fake_home_dir = env::current_dir()\n\n .expect(\"could not retrieve cwd\")\n\n .join(format!(\".it_generates_the_config_{}\", random_chars(5)));\n\n let cmd = config_with_wrangler_home(fake_home_dir.to_str().unwrap(), use_api_key);\n\n let mut stdin = cmd.stdin.unwrap();\n\n\n\n if use_api_key {\n\n write!(stdin, \"email@example.com{}\", eol).unwrap();\n\n write!(stdin, \"apikeythisissecretandlong{}\", eol).unwrap();\n\n } else {\n\n write!(stdin, \"apitokenthisissecretandlong{}\", eol).unwrap();\n\n }\n\n\n\n let mut buffer = \"\".to_string();\n\n let mut stdout = cmd.stdout.expect(\"stdout\");\n\n stdout\n\n .read_to_string(&mut buffer)\n\n .expect(\"could not read output\");\n\n assert!(buffer.contains(\"Successfully configured.\"));\n", "file_path": "tests/config.rs", "rank": 9, "score": 248305.98218637687 }, { "content": "pub fn global_config(user: &GlobalUser, verify: bool) -> Result<(), failure::Error> {\n\n if verify {\n\n message::info(\"Validating credentials...\");\n\n validate_credentials(user)?;\n\n }\n\n\n\n let config_file = get_global_config_path()?;\n\n user.to_file(&config_file)?;\n\n\n\n // set permissions on the file\n\n #[cfg(not(target_os = \"windows\"))]\n\n set_file_mode(&config_file);\n\n\n\n message::success(&format!(\n\n \"Successfully configured. You can find your configuration file at: {}\",\n\n &config_file.to_string_lossy()\n\n ));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/config/mod.rs", "rank": 10, "score": 247761.06553741015 }, { "content": "// Returns only the hashed keys for a directory's files.\n\nfn directory_keys_only(target: &Target, directory: &Path) -> Result<Vec<String>, failure::Error> {\n\n let mut key_vec: Vec<String> = Vec::new();\n\n\n\n let dir_walker = get_dir_iterator(target, directory)?;\n\n\n\n for entry in dir_walker {\n\n let entry = entry.unwrap();\n\n let path = entry.path();\n\n if path.is_file() {\n\n let value = std::fs::read(path)?;\n\n\n\n // Need to base64 encode value\n\n let b64_value = base64::encode(&value);\n\n\n\n let (_, key) = generate_path_and_key(path, directory, Some(b64_value))?;\n\n\n\n validate_key_size(&key)?;\n\n\n\n key_vec.push(key);\n\n }\n\n }\n\n Ok(key_vec)\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 11, "score": 240706.139213386 }, { "content": "fn check_duplicate_namespaces(target: &Target) -> bool {\n\n // HashSet for detecting duplicate namespace bindings\n\n let mut binding_names: HashSet<String> = HashSet::new();\n\n\n\n if let Some(namespaces) = &target.kv_namespaces {\n\n for namespace in namespaces {\n\n // Check if this is a duplicate binding\n\n if binding_names.contains(&namespace.binding) {\n\n return true;\n\n } else {\n\n binding_names.insert(namespace.binding.clone());\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/commands/kv/mod.rs", "rank": 12, "score": 238303.15154909366 }, { "content": "pub fn list(target: &Target, user: &GlobalUser) -> Result<(), failure::Error> {\n\n kv::validate_target(target)?;\n\n\n\n let client = kv::api_client(user)?;\n\n let result = call_api(&client, target);\n\n match result {\n\n Ok(success) => {\n\n let namespaces = success.result;\n\n println!(\"{}\", serde_json::to_string(&namespaces)?);\n\n }\n\n Err(e) => failure::bail!(\"{}\", kv::format_error(e)),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/kv/namespace/list.rs", "rank": 13, "score": 238165.3856981267 }, { "content": "fn build_ignore(target: &Target, directory: &Path) -> Result<Override, failure::Error> {\n\n let mut required_override = OverrideBuilder::new(directory);\n\n // First include files that must be ignored.\n\n for ignored in REQUIRED_IGNORE_FILES {\n\n required_override.add(&format!(\"!{}\", ignored))?;\n\n log::info!(\"Ignoring {}\", ignored);\n\n }\n\n\n\n if let Some(site) = &target.site {\n\n // If `include` present, use it and don't touch the `exclude` field\n\n if let Some(included) = &site.include {\n\n for i in included {\n\n required_override.add(&i)?;\n\n log::info!(\"Including {}\", i);\n\n }\n\n // If `exclude` only present, ignore anything in it.\n\n } else if let Some(excluded) = &site.exclude {\n\n for e in excluded {\n\n required_override.add(&format!(\"!{}\", e))?;\n\n log::info!(\"Ignoring {}\", e);\n\n }\n\n }\n\n }\n\n\n\n let exclude = required_override.build()?;\n\n Ok(exclude)\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 14, "score": 235859.8652564013 }, { "content": "// Run the given command and return its stdout.\n\npub fn run(mut command: Command, command_name: &str) -> Result<(), failure::Error> {\n\n log::info!(\"Running {:?}\", command);\n\n\n\n let status = command.status()?;\n\n\n\n if !status.success() {\n\n failure::bail!(\n\n \"tried running command:\\n{}\\nexited with {}\",\n\n command_name.replace(\"\\\"\", \"\"),\n\n status\n\n )\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 15, "score": 235760.19120070696 }, { "content": "fn get_dir_iterator(target: &Target, directory: &Path) -> Result<Walk, failure::Error> {\n\n // The directory provided should never be node_modules!\n\n if let Some(name) = directory.file_name() {\n\n if name == NODE_MODULES {\n\n failure::bail!(\"Your directory of files to upload cannot be named node_modules.\");\n\n }\n\n };\n\n\n\n let ignore = build_ignore(target, directory)?;\n\n Ok(WalkBuilder::new(directory)\n\n .git_ignore(false)\n\n .overrides(ignore)\n\n .build())\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 16, "score": 232666.06110327327 }, { "content": "fn config_with_wrangler_home(home_dir: &str, use_api_key: bool) -> Child {\n\n let mut wrangler = Command::cargo_bin(env!(\"CARGO_PKG_NAME\")).unwrap();\n\n // Don't verify provided information in the `wrangler config` invocation below;\n\n // this is distinct from the parsing functionality this I/O test focuses on\n\n if use_api_key {\n\n wrangler\n\n .arg(\"config\")\n\n .arg(\"--api-key\")\n\n .arg(\"--no-verify\")\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .env(\"WRANGLER_HOME\", home_dir)\n\n .spawn()\n\n .unwrap()\n\n } else {\n\n wrangler\n\n .arg(\"config\")\n\n .arg(\"--no-verify\")\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .env(\"WRANGLER_HOME\", home_dir)\n\n .spawn()\n\n .unwrap()\n\n }\n\n}\n\n\n", "file_path": "tests/config.rs", "rank": 17, "score": 232315.81738850864 }, { "content": "pub fn auth_client(feature: Option<&str>, user: &GlobalUser) -> Client {\n\n let mut headers = headers(feature);\n\n add_auth_headers(&mut headers, user);\n\n\n\n builder()\n\n .default_headers(headers)\n\n .redirect(Policy::none())\n\n .build()\n\n .expect(\"could not create authenticated http client\")\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 18, "score": 229317.60757700846 }, { "content": "pub fn user_error(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::EYES, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 19, "score": 224152.35371541147 }, { "content": "fn toml_fixture_path(fixture: &str) -> PathBuf {\n\n base_fixture_path().join(fixture)\n\n}\n", "file_path": "src/settings/toml/tests/mod.rs", "rank": 20, "score": 223689.5813387337 }, { "content": "pub fn validate_target(target: &Target) -> Result<(), failure::Error> {\n\n let mut missing_fields = Vec::new();\n\n\n\n if target.account_id.is_empty() {\n\n missing_fields.push(\"account_id\")\n\n };\n\n\n\n if !missing_fields.is_empty() {\n\n failure::bail!(\n\n \"Your wrangler.toml is missing the following field(s): {:?}\",\n\n missing_fields\n\n )\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/commands/kv/mod.rs", "rank": 21, "score": 223108.57751167275 }, { "content": "pub fn put(target: &Target, user: &GlobalUser, data: KVMetaData) -> Result<(), failure::Error> {\n\n kv::validate_target(target)?;\n\n\n\n let api_endpoint = format!(\n\n \"https://api.cloudflare.com/client/v4/accounts/{}/storage/kv/namespaces/{}/values/{}\",\n\n target.account_id,\n\n &data.namespace_id,\n\n kv::url_encode_key(&data.key)\n\n );\n\n\n\n // Add expiration and expiration_ttl query options as necessary.\n\n let mut query_params: Vec<(&str, &str)> = vec![];\n\n\n\n if let Some(exp) = &data.expiration {\n\n query_params.push((\"expiration\", exp))\n\n };\n\n if let Some(ttl) = &data.expiration_ttl {\n\n query_params.push((\"expiration_ttl\", ttl))\n\n };\n\n let url = Url::parse_with_params(&api_endpoint, query_params);\n", "file_path": "src/commands/kv/key/put.rs", "rank": 22, "score": 221262.69558143013 }, { "content": "// For interactively handling deletes (and discouraging accidental deletes).\n\n// Input like \"yes\", \"Yes\", \"no\", \"No\" will be accepted, thanks to the whitespace-stripping\n\n// and lowercasing logic below.\n\npub fn delete(prompt_string: &str) -> Result<bool, failure::Error> {\n\n println!(\"{} [y/n]\", prompt_string);\n\n let mut response: String = read!(\"{}\\n\");\n\n response = response.split_whitespace().collect(); // remove whitespace\n\n response.make_ascii_lowercase(); // ensure response is all lowercase\n\n response.truncate(INTERACTIVE_RESPONSE_LEN); // at this point, all valid input will be \"y\" or \"n\"\n\n match response.as_ref() {\n\n YES => Ok(true),\n\n NO => Ok(false),\n\n _ => failure::bail!(\"Response must either be \\\"y\\\" for yes or \\\"n\\\" for no\"),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_trims_user_input_right_whitespace_chars() {\n\n let test_str = \"mysecret\\r\".to_string();\n\n\n\n let truncated_str = strip_trailing_whitespace(test_str);\n\n assert_eq!(truncated_str, \"mysecret\")\n\n }\n\n}\n", "file_path": "src/terminal/interactive.rs", "rank": 23, "score": 219223.82900306815 }, { "content": "// Format errors from the cloudflare-rs cli for printing.\n\n// Optionally takes an argument for providing a function that maps error code numbers to\n\n// helpful additional information about why someone is getting an error message and how to fix it.\n\npub fn format_error(e: ApiFailure, err_helper: Option<&dyn Fn(u16) -> &'static str>) -> String {\n\n match e {\n\n ApiFailure::Error(status, api_errors) => {\n\n print_status_code_context(status);\n\n let mut complete_err = \"\".to_string();\n\n for error in api_errors.errors {\n\n let error_msg = format!(\"{} Code {}: {}\\n\", emoji::WARN, error.code, error.message);\n\n\n\n if let Some(annotate_help) = err_helper {\n\n let suggestion_text = annotate_help(error.code);\n\n let help_msg = format!(\"{} {}\\n\", emoji::SLEUTH, suggestion_text);\n\n complete_err.push_str(&format!(\"{}{}\", error_msg, help_msg));\n\n } else {\n\n complete_err.push_str(&error_msg)\n\n }\n\n }\n\n complete_err.trim_end().to_string() // Trimming strings in place for String is apparently not a thing...\n\n }\n\n ApiFailure::Invalid(reqwest_err) => format!(\"{} Error: {}\", emoji::WARN, reqwest_err),\n\n }\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 25, "score": 210558.704952238 }, { "content": "pub fn generate(name: Option<&str>, template: Option<&str>, project_type: Option<&str>) {\n\n let mut wrangler = Command::cargo_bin(env!(\"CARGO_PKG_NAME\")).unwrap();\n\n if name.is_none() && template.is_none() && project_type.is_none() {\n\n wrangler.arg(\"generate\").assert().success();\n\n } else if name.is_some() && template.is_some() && project_type.is_some() {\n\n wrangler\n\n .arg(\"generate\")\n\n .arg(name.unwrap())\n\n .arg(template.unwrap())\n\n .arg(\"--type\")\n\n .arg(project_type.unwrap())\n\n .assert()\n\n .success();\n\n }\n\n}\n\n\n", "file_path": "tests/generate.rs", "rank": 26, "score": 210431.71538375426 }, { "content": "// For interactively handling reading in a string\n\npub fn get_user_input(prompt_string: &str) -> String {\n\n println!(\"{}\", prompt_string);\n\n let mut input: String = read!(\"{}\\n\");\n\n input = strip_trailing_whitespace(input);\n\n input\n\n}\n\n\n", "file_path": "src/terminal/interactive.rs", "rank": 27, "score": 209129.96438837016 }, { "content": "pub fn whoami(user: &GlobalUser) -> Result<(), failure::Error> {\n\n // If using email + API key for auth, simply prints out email from config file.\n\n let auth: String = match user {\n\n GlobalUser::GlobalKeyAuth { email, .. } => {\n\n format!(\"a Global API Key, associated with the email '{}'\", email,)\n\n }\n\n GlobalUser::TokenAuth { .. } => format!(\"an API Token\"),\n\n };\n\n\n\n println!(\"\\n{} You are logged in with {}.\\n\", emoji::WAVING, auth,);\n\n let accounts = fetch_accounts(user)?;\n\n let table = format_accounts(user, accounts);\n\n println!(\"{}\", &table);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/whoami/mod.rs", "rank": 28, "score": 208068.05046257126 }, { "content": "// validate_credentials() checks the /user/tokens/verify endpoint (for API token)\n\n// or /user endpoint (for global API key) to ensure provided credentials actually work.\n\npub fn validate_credentials(user: &GlobalUser) -> Result<(), failure::Error> {\n\n let client = http::cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n\n\n match user {\n\n GlobalUser::TokenAuth { .. } => {\n\n match client.request(&GetUserTokenStatus {}) {\n\n Ok(success) => {\n\n if success.result.status == \"active\" {\n\n Ok(())\n\n } else {\n\n failure::bail!(\"Authentication check failed. Your token has status \\\"{}\\\", not \\\"active\\\".\\nTry rolling your token on the Cloudflare dashboard.\")\n\n }\n\n },\n\n Err(e) => failure::bail!(\"Authentication check failed. Please make sure your API token is correct.\\n{}\", http::format_error(e, None))\n\n }\n\n }\n\n GlobalUser::GlobalKeyAuth { .. } => {\n\n match client.request(&GetUserDetails {}) {\n\n Ok(_) => Ok(()),\n\n Err(e) => failure::bail!(\"Authentication check failed. Please make sure your email and global API key pair are correct. (https://developers.cloudflare.com/workers/quickstart/#global-api-key)\\n{}\", http::format_error(e, None)),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/commands/config/mod.rs", "rank": 29, "score": 205226.54024198157 }, { "content": "pub fn run_generate(name: &str, template: &str) -> Result<(), failure::Error> {\n\n let tool_name = \"cargo-generate\";\n\n let binary_path = install::install(tool_name, \"ashleygwilliams\")?.binary(tool_name)?;\n\n\n\n let args = [\"generate\", \"--git\", template, \"--name\", name, \"--force\"];\n\n\n\n let command = command(binary_path, &args);\n\n let command_name = format!(\"{:?}\", command);\n\n\n\n commands::run(command, &command_name)\n\n}\n\n\n", "file_path": "src/commands/generate/mod.rs", "rank": 30, "score": 204788.98179103553 }, { "content": "pub fn install(tool_name: &str, owner: &str) -> Result<Download, failure::Error> {\n\n if let Some(download) = tool_exists(tool_name)? {\n\n return Ok(download);\n\n }\n\n\n\n let binaries = &[tool_name];\n\n let latest_version = get_latest_version(tool_name)?;\n\n let download = download_prebuilt(tool_name, owner, &latest_version, binaries);\n\n match download {\n\n Ok(download) => Ok(download),\n\n Err(e) => {\n\n failure::bail!(\"could not download pre-built `{}` ({}).\", tool_name, e);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/install/mod.rs", "rank": 31, "score": 201964.518801447 }, { "content": "fn validate(target: &Target) -> Vec<&str> {\n\n let mut missing_fields = Vec::new();\n\n\n\n if target.account_id.is_empty() {\n\n missing_fields.push(\"account_id\")\n\n };\n\n if target.name.is_empty() {\n\n missing_fields.push(\"name\")\n\n };\n\n\n\n match &target.kv_namespaces {\n\n Some(kv_namespaces) => {\n\n for kv in kv_namespaces {\n\n if kv.binding.is_empty() {\n\n missing_fields.push(\"kv-namespace binding\")\n\n }\n\n\n\n if kv.id.is_empty() {\n\n missing_fields.push(\"kv-namespace id\")\n\n }\n\n }\n\n }\n\n None => {}\n\n }\n\n\n\n missing_fields\n\n}\n\n\n", "file_path": "src/commands/preview/upload.rs", "rank": 32, "score": 201288.7897807437 }, { "content": "// Returns the hashed key and value pair for all files in a directory.\n\npub fn directory_keys_values(\n\n target: &Target,\n\n directory: &Path,\n\n verbose: bool,\n\n) -> Result<(Vec<KeyValuePair>, AssetManifest), failure::Error> {\n\n match &fs::metadata(directory) {\n\n Ok(file_type) if file_type.is_dir() => {\n\n let mut upload_vec: Vec<KeyValuePair> = Vec::new();\n\n let mut asset_manifest: AssetManifest = AssetManifest::new();\n\n\n\n let dir_walker = get_dir_iterator(target, directory)?;\n\n\n\n for entry in dir_walker {\n\n let entry = entry.unwrap();\n\n let path = entry.path();\n\n if path.is_file() {\n\n if verbose {\n\n message::working(&format!(\"Preparing {}\", path.display()));\n\n }\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 33, "score": 197521.42229630938 }, { "content": "// Adds the SHA-256 hash of the path's file contents to the url-safe path of a file to\n\n// generate a versioned key for the file and its contents. Returns the url-safe path prefix\n\n// for the key, as well as the key with hash appended.\n\n// e.g (sitemap.xml, sitemap.ec717eb2131fdd4fff803b851d2aa5b1dc3e0af36bc3c8c40f2095c747e80d1e.xml)\n\npub fn generate_path_and_key(\n\n path: &Path,\n\n directory: &Path,\n\n value: Option<String>,\n\n) -> Result<(String, String), failure::Error> {\n\n // strip the bucket directory from both paths for ease of reference.\n\n let relative_path = path.strip_prefix(directory).unwrap();\n\n\n\n let url_safe_path = generate_url_safe_path(relative_path)?;\n\n\n\n let path_with_hash = if let Some(value) = value {\n\n let digest = get_digest(value)?;\n\n\n\n generate_path_with_hash(relative_path, digest)?\n\n } else {\n\n url_safe_path.to_owned()\n\n };\n\n\n\n Ok((url_safe_path, path_with_hash))\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 34, "score": 197375.32592768816 }, { "content": "// Ensures that Worker name is valid.\n\npub fn validate_worker_name(name: &str) -> Result<(), failure::Error> {\n\n let re = Regex::new(r\"^[a-z0-9_][a-z0-9-_]*$\").unwrap();\n\n if !re.is_match(&name) {\n\n failure::bail!(\"Worker name \\\"{}\\\" invalid. Ensure that you only use lowercase letters, dashes, underscores, and numbers.\", name)\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_can_detect_invalid_worker_name() {\n\n let invalid_names = vec![\"mySite\", \"nicky.fun\"];\n\n for name in invalid_names {\n\n assert!(validate_worker_name(name).is_err());\n\n }\n\n }\n\n\n\n #[test]\n\n fn it_can_detect_valid_worker_name() {\n\n let valid_names = vec![\"my-blog\", \"blog123\", \"bloggyity_blog\"];\n\n for name in valid_names {\n\n assert!(validate_worker_name(name).is_ok());\n\n }\n\n }\n\n}\n", "file_path": "src/commands/mod.rs", "rank": 35, "score": 196794.10274050292 }, { "content": "pub fn worker(user: &GlobalUser, deploy_config: &DeployConfig) -> Result<(), failure::Error> {\n\n match deploy_config {\n\n DeployConfig::Zoneless(zoneless_config) => {\n\n // this is a zoneless deploy\n\n log::info!(\"publishing to workers.dev subdomain\");\n\n let deploy_address = publish_zoneless(user, zoneless_config)?;\n\n\n\n message::success(&format!(\n\n \"Successfully published your script to {}\",\n\n deploy_address\n\n ));\n\n\n\n Ok(())\n\n }\n\n DeployConfig::Zoned(zoned_config) => {\n\n // this is a zoned deploy\n\n log::info!(\"publishing to zone {}\", zoned_config.zone_id);\n\n\n\n let published_routes = publish_routes(&user, zoned_config)?;\n\n\n", "file_path": "src/deploy/mod.rs", "rank": 36, "score": 193349.25470401754 }, { "content": "pub fn list(zone_identifier: String, user: &GlobalUser) -> Result<(), failure::Error> {\n\n let client = http::cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n\n\n let result = client.request(&ListRoutes {\n\n zone_identifier: &zone_identifier,\n\n });\n\n\n\n match result {\n\n Ok(success) => {\n\n let routes = success.result;\n\n println!(\"{}\", serde_json::to_string(&routes)?);\n\n }\n\n\n\n Err(e) => failure::bail!(\"{}\", http::format_error(e, None)),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/route/mod.rs", "rank": 37, "score": 193349.25470401757 }, { "content": "pub fn warn(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::WARN, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 38, "score": 190387.70678929068 }, { "content": "pub fn billboard(msg: &str) {\n\n let billboard = Billboard::builder()\n\n .border_style(BorderStyle::Round)\n\n .border_color(BorderColor::Cyan)\n\n .margin(1)\n\n .build();\n\n billboard.display(msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 39, "score": 190387.70678929068 }, { "content": "pub fn working(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::SWIRL, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 40, "score": 190387.70678929068 }, { "content": "pub fn info(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::INFO, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 41, "score": 190387.70678929068 }, { "content": "pub fn help(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::SLEUTH, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 42, "score": 190387.70678929068 }, { "content": "pub fn success(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::SPARKLES, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 43, "score": 190387.70678929068 }, { "content": "pub fn preview(msg: &str) {\n\n let msg = format!(\"{} {}\", emoji::WORKER, msg);\n\n message(&msg);\n\n}\n\n\n", "file_path": "src/terminal/message.rs", "rank": 44, "score": 190387.70678929068 }, { "content": "pub fn deprecation_warning(msg: &str) {\n\n let bb = Billboard::builder()\n\n .border_style(BorderStyle::Round)\n\n .border_color(BorderColor::Red)\n\n .margin(1)\n\n .build();\n\n bb.display(msg);\n\n}\n", "file_path": "src/terminal/message.rs", "rank": 45, "score": 187565.76137760514 }, { "content": "fn command(binary_path: PathBuf, args: &[&str]) -> Command {\n\n let mut c = if cfg!(target_os = \"windows\") {\n\n let mut c = Command::new(\"cmd\");\n\n c.arg(\"/C\");\n\n c.arg(binary_path);\n\n c\n\n } else {\n\n Command::new(binary_path)\n\n };\n\n\n\n c.args(args);\n\n c\n\n}\n", "file_path": "src/commands/generate/mod.rs", "rank": 46, "score": 185751.12184039262 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\npub fn set_file_mode(file: &PathBuf) {\n\n File::open(&file)\n\n .unwrap()\n\n .set_permissions(PermissionsExt::from_mode(0o600))\n\n .expect(\"could not set permissions on file\");\n\n}\n\n\n", "file_path": "src/commands/config/mod.rs", "rank": 47, "score": 183430.9315448835 }, { "content": "pub fn get_global_config_path() -> Result<PathBuf, failure::Error> {\n\n let home_dir = if let Ok(value) = env::var(\"WRANGLER_HOME\") {\n\n log::info!(\"Using $WRANGLER_HOME: {}\", value);\n\n Path::new(&value).to_path_buf()\n\n } else {\n\n log::info!(\"No $WRANGLER_HOME detected, using $HOME\");\n\n dirs::home_dir()\n\n .expect(\"Could not find home directory\")\n\n .join(\".wrangler\")\n\n };\n\n let global_config_file = home_dir.join(\"config\").join(DEFAULT_CONFIG_FILE_NAME);\n\n log::info!(\"Using global config file: {}\", global_config_file.display());\n\n Ok(global_config_file)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::File;\n\n use tempfile::tempdir;\n", "file_path": "src/settings/global_user.rs", "rank": 48, "score": 180838.67087288207 }, { "content": "pub fn client(feature: Option<&str>) -> Client {\n\n builder()\n\n .default_headers(headers(feature))\n\n .build()\n\n .expect(\"could not create http client\")\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 49, "score": 178608.09381448667 }, { "content": "pub fn structure_request(parts: &mut RequestParts) {\n\n prepend_request_headers_prefix(parts)\n\n}\n\n\n", "file_path": "src/commands/dev/headers.rs", "rank": 50, "score": 176360.053093189 }, { "content": "////---------------------------OLD API CLIENT CODE---------------------------////\n\n// TODO: remove this and replace it entirely with cloudflare-rs\n\npub fn headers(feature: Option<&str>) -> HeaderMap {\n\n let version = if install::target::DEBUG {\n\n \"dev\"\n\n } else {\n\n env!(\"CARGO_PKG_VERSION\")\n\n };\n\n let user_agent = if let Some(feature) = feature {\n\n format!(\"wrangler/{}/{}\", version, feature)\n\n } else {\n\n format!(\"wrangler/{}\", version)\n\n };\n\n\n\n let mut headers = HeaderMap::new();\n\n headers.insert(USER_AGENT, HeaderValue::from_str(&user_agent).unwrap());\n\n headers\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 51, "score": 175927.90296834614 }, { "content": "fn add_auth_headers<'a>(headers: &'a mut HeaderMap, user: &GlobalUser) {\n\n match user {\n\n GlobalUser::TokenAuth { api_token } => {\n\n headers.insert(\n\n \"Authorization\",\n\n HeaderValue::from_str(&format!(\"Bearer {}\", &api_token)).unwrap(),\n\n );\n\n }\n\n GlobalUser::GlobalKeyAuth { email, api_key } => {\n\n headers.insert(\"X-Auth-Email\", HeaderValue::from_str(&email).unwrap());\n\n headers.insert(\"X-Auth-Key\", HeaderValue::from_str(&api_key).unwrap());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 52, "score": 174258.54898109072 }, { "content": "pub fn script(\n\n client: &Client,\n\n target: &Target,\n\n asset_manifest: Option<AssetManifest>,\n\n) -> Result<(), failure::Error> {\n\n let worker_addr = format!(\n\n \"https://api.cloudflare.com/client/v4/accounts/{}/workers/scripts/{}\",\n\n target.account_id, target.name,\n\n );\n\n\n\n let script_upload_form = form::build(target, asset_manifest)?;\n\n\n\n let res = client\n\n .put(&worker_addr)\n\n .multipart(script_upload_form)\n\n .send()?;\n\n\n\n let res_status = res.status();\n\n\n\n if !res_status.is_success() {\n\n let res_text = res.text()?;\n\n failure::bail!(error_msg(res_status, res_text))\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/upload/mod.rs", "rank": 53, "score": 173048.52960934202 }, { "content": "fn base_fixture_path() -> PathBuf {\n\n let current_dir = env::current_dir().unwrap();\n\n\n\n Path::new(&current_dir)\n\n .join(\"src\")\n\n .join(\"settings\")\n\n .join(\"toml\")\n\n .join(\"tests\")\n\n .join(\"tomls\")\n\n}\n\n\n", "file_path": "src/settings/toml/tests/mod.rs", "rank": 54, "score": 170423.3846053719 }, { "content": "fn validate_target(target: &Target) -> Result<(), failure::Error> {\n\n let mut missing_fields = Vec::new();\n\n\n\n if target.account_id.is_empty() {\n\n missing_fields.push(\"account_id\")\n\n };\n\n\n\n if !missing_fields.is_empty() {\n\n failure::bail!(\n\n \"{} Your wrangler.toml is missing the following field(s): {:?}\",\n\n emoji::WARN,\n\n missing_fields\n\n )\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/commands/secret/mod.rs", "rank": 55, "score": 170198.04925207194 }, { "content": "fn cleanup(name: &str) {\n\n fs::remove_dir_all(name).unwrap();\n\n assert_eq!(Path::new(name).exists(), false);\n\n}\n", "file_path": "tests/init.rs", "rank": 56, "score": 170097.516299766 }, { "content": "fn cleanup(name: &str) {\n\n fs::remove_dir_all(name).unwrap();\n\n assert_eq!(Path::new(name).exists(), false);\n\n}\n", "file_path": "tests/generate.rs", "rank": 57, "score": 170097.516299766 }, { "content": "pub fn delete(\n\n zone_identifier: String,\n\n user: &GlobalUser,\n\n route_id: &str,\n\n) -> Result<(), failure::Error> {\n\n let client = http::cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n\n\n let result = client.request(&DeleteRoute {\n\n zone_identifier: &zone_identifier,\n\n identifier: route_id,\n\n });\n\n\n\n match result {\n\n Ok(success) => {\n\n let msg = format!(\"Successfully deleted route with id {}\", success.result.id);\n\n message::success(&msg);\n\n }\n\n\n\n Err(e) => failure::bail!(\"{}\", http::format_error(e, Some(&error_suggestions))),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/route/mod.rs", "rank": 58, "score": 169941.7291209761 }, { "content": "pub fn preview(\n\n mut target: Target,\n\n user: Option<GlobalUser>,\n\n method: HTTPMethod,\n\n body: Option<String>,\n\n livereload: bool,\n\n verbose: bool,\n\n headless: bool,\n\n) -> Result<(), failure::Error> {\n\n commands::build(&target)?;\n\n\n\n let sites_preview: bool = target.site.is_some();\n\n\n\n let script_id = upload(&mut target, user.as_ref(), sites_preview, verbose)?;\n\n\n\n let session = Uuid::new_v4().to_simple();\n\n let preview_host = \"example.com\";\n\n let https = true;\n\n let https_str = if https { \"https://\" } else { \"http://\" };\n\n\n", "file_path": "src/commands/preview/mod.rs", "rank": 59, "score": 169941.7291209761 }, { "content": "pub fn dev(\n\n target: Target,\n\n user: Option<GlobalUser>,\n\n host: Option<&str>,\n\n port: Option<&str>,\n\n ip: Option<&str>,\n\n verbose: bool,\n\n) -> Result<(), failure::Error> {\n\n let wrangler_dev_msg = style(\"`wrangler dev`\").yellow().bold();\n\n let feedback_url = style(\"https://github.com/cloudflare/wrangler/issues/1047\")\n\n .blue()\n\n .bold();\n\n message::billboard(&format!(\"{0} is currently unstable and there are likely to be breaking changes!\\nFor this reason, we cannot yet recommend using {0} for integration testing.\\n\\nPlease submit any feedback here: {1}\", wrangler_dev_msg, feedback_url));\n\n commands::build(&target)?;\n\n let server_config = ServerConfig::new(host, ip, port)?;\n\n let session_id = get_session_id()?;\n\n let preview_id = get_preview_id(\n\n target.clone(),\n\n user.clone(),\n\n &server_config,\n", "file_path": "src/commands/dev/mod.rs", "rank": 60, "score": 169941.7291209761 }, { "content": "pub fn install_artifact(\n\n tool_name: &str,\n\n owner: &str,\n\n version: &str,\n\n) -> Result<Download, failure::Error> {\n\n if let Some(download) = tool_exists(tool_name)? {\n\n return Ok(download);\n\n }\n\n\n\n let download = download_prebuilt(tool_name, owner, version, &[]);\n\n match download {\n\n Ok(download) => Ok(download),\n\n Err(e) => {\n\n failure::bail!(\"could not download pre-built `{}` ({}).\", tool_name, e);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/install/mod.rs", "rank": 61, "score": 169941.7291209761 }, { "content": "pub fn generate(\n\n name: &str,\n\n template: &str,\n\n target_type: Option<TargetType>,\n\n site: bool,\n\n) -> Result<(), failure::Error> {\n\n validate_worker_name(name)?;\n\n\n\n log::info!(\"Generating a new worker project with name '{}'\", name);\n\n run_generate(name, template)?;\n\n\n\n let config_path = PathBuf::from(\"./\").join(&name);\n\n // TODO: this is tightly coupled to our site template. Need to remove once\n\n // we refine our generate logic.\n\n let generated_site = if site {\n\n Some(Site::new(\"./public\"))\n\n } else {\n\n None\n\n };\n\n Manifest::generate(name.to_string(), target_type, &config_path, generated_site)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/generate/mod.rs", "rank": 62, "score": 169941.7291209761 }, { "content": "pub fn build(\n\n target: &Target,\n\n asset_manifest: Option<AssetManifest>,\n\n) -> Result<Form, failure::Error> {\n\n let target_type = &target.target_type;\n\n let kv_namespaces = target.kv_namespaces();\n\n let mut text_blobs: Vec<TextBlob> = Vec::new();\n\n let mut plain_texts: Vec<PlainText> = Vec::new();\n\n let mut wasm_modules: Vec<WasmModule> = Vec::new();\n\n\n\n if let Some(vars) = &target.vars {\n\n for (key, value) in vars.iter() {\n\n plain_texts.push(PlainText::new(key.clone(), value.clone())?)\n\n }\n\n }\n\n\n\n match target_type {\n\n TargetType::Rust => {\n\n log::info!(\"Rust project detected. Publishing...\");\n\n let name = krate::Krate::new(\"./\")?.name.replace(\"-\", \"_\");\n", "file_path": "src/upload/form/mod.rs", "rank": 63, "score": 169941.7291209761 }, { "content": "pub fn init(\n\n name: Option<&str>,\n\n target_type: Option<TargetType>,\n\n site_flag: bool,\n\n) -> Result<(), failure::Error> {\n\n if Path::new(\"./wrangler.toml\").exists() {\n\n if site_flag {\n\n let msg = r#\"A wrangler.toml file already exists!\n\n\n\nTo add Workers Sites to your existing wrangler.toml, please add this section:\n\n\n\n[site]\n\nbucket = \"\" # this should point to the directory with static assets\n\nentry-point = \"workers-site\"\n\n\n\n\"#;\n\n failure::bail!(msg);\n\n } else {\n\n failure::bail!(\"A wrangler.toml file already exists! Please remove it before running this command again.\");\n\n }\n", "file_path": "src/commands/init/mod.rs", "rank": 64, "score": 169941.7291209761 }, { "content": "fn get_preview_url(path_string: &str) -> Result<Uri, InvalidUri> {\n\n format!(\"https://{}{}\", PREVIEW_HOST, path_string).parse()\n\n}\n\n\n", "file_path": "src/commands/dev/mod.rs", "rank": 65, "score": 169404.46669195598 }, { "content": "// This is broken into a separate step because the intended design does not\n\n// necessarily intend for bucket support outside of the [site] usage, especially\n\n// since assets are still hashed. In a subsequent release, we will either\n\n// deprecate this step, or we will integrate it more closely and adapt to user\n\n// feedback.\n\n//\n\n// In order to track usage of this \"feature\", this function returns a bool that\n\n// indicates whether any non-site kv namespaces were specified / uploaded.\n\npub fn sync_non_site_buckets(\n\n target: &Target,\n\n user: &GlobalUser,\n\n verbose: bool,\n\n) -> Result<bool, failure::Error> {\n\n let mut is_using_non_site_bucket = false;\n\n\n\n for namespace in target.kv_namespaces() {\n\n if let Some(path) = &namespace.bucket {\n\n is_using_non_site_bucket = true;\n\n validate_bucket_location(path)?;\n\n let (to_upload, to_delete, _) =\n\n kv::bucket::sync(target, user, &namespace.id, path, verbose)?;\n\n // First, upload all existing files in bucket directory\n\n if verbose {\n\n message::info(\"Preparing to upload updated files...\");\n\n }\n\n upload_files(target, user, &namespace.id, to_upload)?;\n\n\n\n // Finally, remove any stale files\n", "file_path": "src/commands/publish.rs", "rank": 66, "score": 165830.78670661207 }, { "content": "#[test]\n\nfn it_uses_env_name_when_provided() {\n\n let top_level_name = \"worker\";\n\n let custom_env_name = \"george\";\n\n\n\n let env_config = EnvConfig::custom_script_name(custom_env_name);\n\n let with_name_env_override = WranglerToml::with_env(top_level_name, env_config);\n\n let manifest = Manifest::from_str(&toml::to_string(&with_name_env_override).unwrap()).unwrap();\n\n\n\n assert_eq!(manifest.worker_name(Some(TEST_ENV_NAME)), custom_env_name);\n\n}\n\n\n", "file_path": "src/settings/toml/tests/mod.rs", "rank": 67, "score": 165564.4967261837 }, { "content": "#[test]\n\nfn parses_same_from_config_path_as_string() {\n\n env::remove_var(\"CF_ACCOUNT_ID\");\n\n env::remove_var(\"CF_ZONE_ID\");\n\n let config_path = toml_fixture_path(\"environments.toml\");\n\n eprintln!(\"{:#?}\", &config_path);\n\n let string_toml = fs::read_to_string(&config_path).unwrap();\n\n\n\n let manifest_from_string = Manifest::from_str(&string_toml).unwrap();\n\n let manifest_from_config = Manifest::new(&config_path).unwrap();\n\n\n\n assert_eq!(manifest_from_config, manifest_from_string);\n\n}\n\n\n", "file_path": "src/settings/toml/tests/mod.rs", "rank": 68, "score": 165413.35811381714 }, { "content": "// upload_draft_worker will attempt to upload a \"draft\" version of a worker script if it does not\n\n// already exist in the API (API error code 10007 is returned). The function returns None if this draft\n\n// script was uploaded, or else returns Some (with a Result type so we can return a potential script upload error\n\n// up the call chain).\n\npub fn upload_draft_worker(\n\n e: &ApiFailure,\n\n user: &GlobalUser,\n\n target: &Target,\n\n) -> Option<Result<(), failure::Error>> {\n\n match e {\n\n ApiFailure::Error(_, api_errors) => {\n\n for error in &api_errors.errors {\n\n if error.code == 10007 {\n\n message::working(&format!(\"Worker {} doesn't exist in the API yet. Creating a draft Worker so we can create new secret.\", target.name));\n\n let upload_client = http::auth_client(None, user);\n\n return Some(upload::script(&upload_client, target, None));\n\n } else {\n\n return None;\n\n }\n\n }\n\n return None;\n\n }\n\n ApiFailure::Invalid(_) => None,\n\n }\n\n}\n\n\n", "file_path": "src/commands/secret/mod.rs", "rank": 69, "score": 164211.69939543708 }, { "content": "pub fn get_preview_id(\n\n mut target: Target,\n\n user: Option<GlobalUser>,\n\n server_config: &ServerConfig,\n\n session_id: &str,\n\n verbose: bool,\n\n) -> Result<String, failure::Error> {\n\n let sites_preview = false;\n\n let script_id = upload(&mut target, user.as_ref(), sites_preview, verbose).map_err(|_| failure::format_err!(\"Could not upload your script. Check your internet connection or https://www.cloudflarestatus.com/ for rare incidents impacting the Cloudflare Workers API.\"))?;\n\n Ok(format!(\n\n \"{}{}{}{}\",\n\n &script_id,\n\n session_id,\n\n server_config.host.is_https() as u8,\n\n server_config.host\n\n ))\n\n}\n", "file_path": "src/commands/dev/mod.rs", "rank": 70, "score": 164208.3814126036 }, { "content": "fn generate(name: Option<&str>) {\n\n let mut wrangler = Command::cargo_bin(env!(\"CARGO_PKG_NAME\")).unwrap();\n\n wrangler\n\n .arg(\"generate\")\n\n .arg(name.unwrap())\n\n .assert()\n\n .success();\n\n}\n\n\n", "file_path": "tests/init.rs", "rank": 71, "score": 163537.84876846377 }, { "content": "fn filename_from_path(path: &PathBuf) -> Option<String> {\n\n path.file_stem()?.to_str().map(|s| s.to_string())\n\n}\n\n\n", "file_path": "src/upload/form/mod.rs", "rank": 72, "score": 163518.77005544514 }, { "content": "fn prebuilt_url(tool_name: &str, owner: &str, version: &str) -> Option<String> {\n\n if tool_name == \"wranglerjs\" {\n\n Some(format!(\n\n \"https://workers.cloudflare.com/get-wranglerjs-binary/{0}/v{1}.tar.gz\",\n\n tool_name, version\n\n ))\n\n } else {\n\n let target = if target::LINUX && target::x86_64 {\n\n \"x86_64-unknown-linux-musl\"\n\n } else if target::MACOS && target::x86_64 {\n\n \"x86_64-apple-darwin\"\n\n } else if target::WINDOWS && target::x86_64 {\n\n \"x86_64-pc-windows-msvc\"\n\n } else {\n\n return None;\n\n };\n\n\n\n let url = format!(\n\n \"https://workers.cloudflare.com/get-binary/{0}/{1}/v{2}/{3}.tar.gz\",\n\n owner, tool_name, version, target\n\n );\n\n Some(url)\n\n }\n\n}\n\n\n", "file_path": "src/install/mod.rs", "rank": 73, "score": 161878.79357249892 }, { "content": "fn fetch_all(user: &GlobalUser, zone_identifier: &str) -> Result<Vec<Route>, failure::Error> {\n\n let client = cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n\n\n let routes: Vec<Route> = match client.request(&ListRoutes { zone_identifier }) {\n\n Ok(success) => success.result.iter().map(Route::from).collect(),\n\n Err(e) => failure::bail!(\"{}\", format_error(e, None)), // TODO: add suggestion fn\n\n };\n\n\n\n Ok(routes)\n\n}\n\n\n", "file_path": "src/deploy/route.rs", "rank": 74, "score": 161133.42367478868 }, { "content": "// Ensure that all files in upload directory do not exceed the MAX_VALUE_SIZE (this ensures that\n\n// no partial uploads happen). I don't like this functionality (and the similar key length checking\n\n// logic in validate_key_size()) because it duplicates the size checking the API already does--but\n\n// doing a preemptive check like this (before calling the API) will prevent partial bucket uploads\n\n// from happening.\n\nfn validate_file_size(path: &Path) -> Result<(), failure::Error> {\n\n let metadata = fs::metadata(path)?;\n\n let file_len = metadata.len();\n\n\n\n if file_len > VALUE_MAX_SIZE {\n\n failure::bail!(\n\n \"File `{}` of {} bytes exceeds the maximum value size limit of {} bytes\",\n\n path.display(),\n\n file_len,\n\n VALUE_MAX_SIZE\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 75, "score": 161107.9297494809 }, { "content": "// Courtesy of Steve Klabnik's PoC :) Used for bulk operations (write, delete)\n\nfn generate_url_safe_path(path: &Path) -> Result<String, failure::Error> {\n\n // first, we have to re-build the paths: if we're on Windows, we have paths with\n\n // `\\` as separators. But we want to use `/` as separators. Because that's how URLs\n\n // work.\n\n let mut path_with_forward_slash = OsString::new();\n\n\n\n for (i, component) in path.components().enumerate() {\n\n // we don't want a leading `/`, so skip that\n\n if i > 0 {\n\n path_with_forward_slash.push(\"/\");\n\n }\n\n\n\n path_with_forward_slash.push(component);\n\n }\n\n\n\n // if we have a non-utf8 path here, it will fail, but that's not realistically going to happen\n\n let path = path_with_forward_slash\n\n .to_str()\n\n .unwrap_or_else(|| panic!(\"found a non-UTF-8 path, {:?}\", path_with_forward_slash));\n\n\n\n Ok(path.to_string())\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 76, "score": 160288.90531393525 }, { "content": "fn preview_succeeds_with(fixture: &Fixture, env: Option<&str>, expected: &str) {\n\n let _lock = fixture.lock();\n\n env::remove_var(\"CF_ACCOUNT_ID\");\n\n env::remove_var(\"CF_ZONE_ID\");\n\n let mut preview = Command::cargo_bin(env!(\"CARGO_PKG_NAME\")).unwrap();\n\n preview.current_dir(fixture.get_path());\n\n preview.arg(\"preview\").arg(\"--headless\");\n\n if let Some(env) = env {\n\n preview.arg(\"--env\").arg(env);\n\n }\n\n preview\n\n .assert()\n\n .stdout(predicates::str::contains(expected))\n\n .success();\n\n}\n\n\n", "file_path": "tests/preview.rs", "rank": 77, "score": 159772.8272854422 }, { "content": "pub fn destructure_response(parts: &mut ResponseParts) -> Result<(), failure::Error> {\n\n set_response_status(parts)?;\n\n strip_response_headers_prefix(parts)\n\n}\n\n\n", "file_path": "src/commands/dev/headers.rs", "rank": 78, "score": 158196.89816432225 }, { "content": "fn get_path_as_str(uri: &Uri) -> String {\n\n uri.path_and_query()\n\n .map(|x| x.as_str())\n\n .unwrap_or(\"\")\n\n .to_string()\n\n}\n\n\n", "file_path": "src/commands/dev/mod.rs", "rank": 79, "score": 157738.33596081194 }, { "content": "// Assumes that `path` is a file (called from a match branch for path.is_file())\n\n// Assumes that `hashed_value` is a String, not an Option<String> (called from a match branch for value.is_some())\n\nfn generate_path_with_hash(path: &Path, hashed_value: String) -> Result<String, failure::Error> {\n\n if let Some(file_stem) = path.file_stem() {\n\n let mut file_name = file_stem.to_os_string();\n\n let extension = path.extension();\n\n\n\n file_name.push(\".\");\n\n file_name.push(hashed_value);\n\n if let Some(ext) = extension {\n\n file_name.push(\".\");\n\n file_name.push(ext);\n\n }\n\n\n\n let new_path = path.with_file_name(file_name);\n\n\n\n Ok(generate_url_safe_path(&new_path)?)\n\n } else {\n\n failure::bail!(\"no file_stem for path {}\", path.display())\n\n }\n\n}\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 80, "score": 152633.77869377699 }, { "content": "fn url_encode_key(key: &str) -> String {\n\n percent_encode(key.as_bytes(), PATH_SEGMENT_ENCODE_SET).to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::commands::kv;\n\n use crate::settings::toml::{KvNamespace, Target, TargetType};\n\n\n\n #[test]\n\n fn it_can_detect_duplicate_bindings() {\n\n let target_with_dup_kv_bindings = Target {\n\n account_id: \"\".to_string(),\n\n kv_namespaces: Some(vec![\n\n KvNamespace {\n\n id: \"fake\".to_string(),\n\n binding: \"KV\".to_string(),\n\n bucket: None,\n\n },\n\n KvNamespace {\n", "file_path": "src/commands/kv/mod.rs", "rank": 81, "score": 152295.0885649292 }, { "content": "fn subdomain_addr(account_id: &str) -> String {\n\n format!(\n\n \"https://api.cloudflare.com/client/v4/accounts/{}/workers/subdomain\",\n\n account_id\n\n )\n\n}\n\n\n", "file_path": "src/commands/subdomain/mod.rs", "rank": 82, "score": 152295.08856492917 }, { "content": "fn build_fails_with(fixture: &Fixture, expected_message: &str) {\n\n let _lock = fixture.lock();\n\n let mut build = Command::cargo_bin(env!(\"CARGO_PKG_NAME\")).unwrap();\n\n build.current_dir(fixture.get_path());\n\n build.arg(\"build\");\n\n\n\n let output = build.output().expect(\"failed to execute process\");\n\n assert!(!output.status.success());\n\n assert!(\n\n str::from_utf8(&output.stderr)\n\n .unwrap()\n\n .contains(expected_message),\n\n format!(\n\n \"expected {:?} not found, given: {:?}\",\n\n expected_message,\n\n str::from_utf8(&output.stderr)\n\n )\n\n );\n\n}\n", "file_path": "tests/build.rs", "rank": 83, "score": 152080.19890001533 }, { "content": "fn error_suggestions(code: u16) -> &'static str {\n\n match code {\n\n 10005 => \"Confirm the route id by running `wrangler route list`\",\n\n _ => \"\",\n\n }\n\n}\n", "file_path": "src/commands/route/mod.rs", "rank": 84, "score": 149259.69069380872 }, { "content": "fn format_accounts(user: &GlobalUser, accounts: Vec<Account>) -> Table {\n\n let mut table = Table::new();\n\n let table_head = Row::new(vec![Cell::new(\"Account Name\"), Cell::new(\"Account ID\")]);\n\n table.add_row(table_head);\n\n\n\n if let GlobalUser::TokenAuth { api_token: _ } = user {\n\n if accounts.is_empty() {\n\n println!(\"Your token is missing the 'Account Settings: Read' permission.\\n\\nPlease generate and auth with a new token that has these perms to be able to list your accounts.\\n\");\n\n }\n\n }\n\n\n\n for account in accounts {\n\n let row = Row::new(vec![Cell::new(&account.name), Cell::new(&account.id)]);\n\n table.add_row(row);\n\n }\n\n table\n\n}\n", "file_path": "src/commands/whoami/mod.rs", "rank": 85, "score": 148625.773304593 }, { "content": "// secret_errors() provides more detailed explanations of Workers KV API error codes.\n\n// See https://api.cloudflare.com/#workers-secrets ? for details.\n\nfn secret_errors(error_code: u16) -> &'static str {\n\n match error_code {\n\n 7003 | 7000 => {\n\n \"Your wrangler.toml is likely missing the field \\\"account_id\\\", which is required to create a secret.\"\n\n }\n\n 10053 => \"There is already another binding with a different type by this name. Check your wrangler.toml or your Cloudflare dashboard for conflicting bindings\",\n\n 10054 => \"Your secret is too large, it must be 1kB or less\",\n\n 10055 => \"You have exceeded the limit of 32 text bindings for this worker. Run `wrangler secret list` or go to your Cloudflare dashboard to clean up unused text/secret variables\",\n\n _ => \"\",\n\n }\n\n}\n\n\n", "file_path": "src/commands/secret/mod.rs", "rank": 86, "score": 146855.42500543687 }, { "content": "// kv_help() provides more detailed explanations of Workers KV API error codes.\n\n// See https://api.cloudflare.com/#workers-kv-namespace-errors for details.\n\nfn kv_help(error_code: u16) -> &'static str {\n\n match error_code {\n\n 7003 | 7000 => {\n\n \"Your wrangler.toml is likely missing the field \\\"account_id\\\", which is required to write to Workers KV.\"\n\n }\n\n // namespace errors\n\n 10010 | 10011 | 10012 | 10013 | 10014 | 10018 => {\n\n \"Run `wrangler kv:namespace list` to see your existing namespaces with IDs\"\n\n }\n\n 10009 => \"Run `wrangler kv:key list` to see your existing keys\", // key errors\n\n // TODO: link to more info\n\n // limit errors\n\n 10022 | 10024 | 10030 => \"See documentation\",\n\n // TODO: link to tool for this?\n\n // legacy namespace errors\n\n 10021 | 10035 | 10038 => \"Consider moving this namespace\",\n\n // cloudflare account errors\n\n 10017 | 10026 => \"Workers KV is a paid feature, please upgrade your account (https://www.cloudflare.com/products/workers-kv/)\",\n\n _ => \"\",\n\n }\n\n}\n\n\n", "file_path": "src/commands/kv/mod.rs", "rank": 87, "score": 146855.42500543687 }, { "content": "// Create a special API client that has a longer timeout than usual, given that KV operations\n\n// can be lengthy if payloads are large.\n\nfn api_client(user: &GlobalUser) -> Result<HttpApiClient, failure::Error> {\n\n http::cf_v4_api_client(\n\n user,\n\n HttpApiClientConfig {\n\n default_headers: http::headers(None),\n\n // Use 5 minute timeout instead of default 30-second one.\n\n // This is useful for bulk upload operations.\n\n http_timeout: Duration::from_secs(5 * 60),\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/commands/kv/mod.rs", "rank": 88, "score": 144678.5969397085 }, { "content": "fn fetch_accounts(user: &GlobalUser) -> Result<Vec<Account>, failure::Error> {\n\n let client = http::cf_v4_api_client(user, HttpApiClientConfig::default())?;\n\n let response = client.request(&account::ListAccounts { params: None });\n\n match response {\n\n Ok(res) => Ok(res.result),\n\n Err(e) => failure::bail!(http::format_error(e, None)),\n\n }\n\n}\n\n\n", "file_path": "src/commands/whoami/mod.rs", "rank": 89, "score": 144410.06585952255 }, { "content": "fn concat_js(name: &str) -> Result<(), failure::Error> {\n\n let bindgen_js_path = format!(\"./pkg/{}.js\", name);\n\n let bindgen_js: String = fs::read_to_string(bindgen_js_path)?.parse()?;\n\n\n\n let worker_js: String = fs::read_to_string(\"./worker/worker.js\")?.parse()?;\n\n let js = format!(\"{} {}\", bindgen_js, worker_js);\n\n\n\n fs::write(\"./worker/generated/script.js\", js.as_bytes())?;\n\n Ok(())\n\n}\n", "file_path": "src/upload/form/mod.rs", "rank": 90, "score": 144156.30821200972 }, { "content": "fn open_browser(url: &str) -> Result<(), failure::Error> {\n\n let _output = if cfg!(target_os = \"windows\") {\n\n let url_escaped = url.replace(\"&\", \"^&\");\n\n let windows_cmd = format!(\"start {}\", url_escaped);\n\n Command::new(\"cmd\").args(&[\"/C\", &windows_cmd]).output()?\n\n } else if cfg!(target_os = \"linux\") {\n\n let linux_cmd = format!(r#\"xdg-open \"{}\"\"#, url);\n\n Command::new(\"sh\").arg(\"-c\").arg(&linux_cmd).output()?\n\n } else {\n\n let mac_cmd = format!(r#\"open \"{}\"\"#, url);\n\n Command::new(\"sh\").arg(\"-c\").arg(&mac_cmd).output()?\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/preview/mod.rs", "rank": 91, "score": 144156.30821200972 }, { "content": "fn test_env(env_config: EnvConfig) -> HashMap<&'static str, EnvConfig> {\n\n let mut env = HashMap::new();\n\n env.insert(TEST_ENV_NAME, env_config);\n\n\n\n env\n\n}\n", "file_path": "tests/fixture/wrangler_toml.rs", "rank": 92, "score": 142035.84600174488 }, { "content": "fn validate_key_size(key: &str) -> Result<(), failure::Error> {\n\n if key.len() > KEY_MAX_SIZE {\n\n failure::bail!(\n\n \"Path `{}` of {} bytes exceeds the maximum key size limit of {} bytes\",\n\n key,\n\n key.len(),\n\n KEY_MAX_SIZE\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n\nconst REQUIRED_IGNORE_FILES: &[&str] = &[\"node_modules\"];\n\nconst NODE_MODULES: &str = \"node_modules\";\n\n\n", "file_path": "src/commands/kv/bucket/mod.rs", "rank": 93, "score": 139677.01910889978 }, { "content": "#[test]\n\nfn it_fails_with_webpack_target_node() {\n\n let fixture = Fixture::new();\n\n fixture.scaffold_webpack();\n\n\n\n fixture.create_file(\n\n \"webpack.config.js\",\n\n r#\"\n\n module.exports = {\n\n \"entry\": \"./index.js\",\n\n \"target\": \"node\"\n\n }\n\n \"#,\n\n );\n\n\n\n let wrangler_toml = WranglerToml::webpack_std_config(\"test-build-fails-webpack-target-node\");\n\n fixture.create_wrangler_toml(wrangler_toml);\n\n\n\n build_fails_with(\n\n &fixture,\n\n \"Building a Cloudflare Worker with target \\\"node\\\" is not supported\",\n\n );\n\n}\n\n\n", "file_path": "tests/build.rs", "rank": 94, "score": 139459.44564613266 }, { "content": "#[test]\n\nfn it_builds_with_webpack_target_webworker() {\n\n let fixture = Fixture::new();\n\n fixture.scaffold_webpack();\n\n\n\n fixture.create_file(\n\n \"webpack.config.js\",\n\n r#\"\n\n module.exports = {\n\n \"entry\": \"./index.js\",\n\n \"target\": \"webworker\"\n\n }\n\n \"#,\n\n );\n\n\n\n let wrangler_toml = WranglerToml::webpack_std_config(\"test-build-webpack-target-webworker\");\n\n fixture.create_wrangler_toml(wrangler_toml);\n\n\n\n build_creates_assets(&fixture, vec![\"script.js\"]);\n\n}\n\n\n", "file_path": "tests/build.rs", "rank": 95, "score": 139459.44564613266 }, { "content": "#[test]\n\nfn it_fails_with_webpack_target_web() {\n\n let fixture = Fixture::new();\n\n fixture.scaffold_webpack();\n\n\n\n fixture.create_file(\n\n \"webpack.config.js\",\n\n r#\"\n\n module.exports = {\n\n \"entry\": \"./index.js\",\n\n \"target\": \"web\"\n\n }\n\n \"#,\n\n );\n\n\n\n let wrangler_toml = WranglerToml::webpack_std_config(\"test-build-fails-webpack-target-web\");\n\n fixture.create_wrangler_toml(wrangler_toml);\n\n\n\n build_fails_with(\n\n &fixture,\n\n \"Building a Cloudflare Worker with target \\\"web\\\" is not supported\",\n\n );\n\n}\n\n\n", "file_path": "tests/build.rs", "rank": 96, "score": 139459.44564613266 }, { "content": "// We don't want folks setting their bucket to the top level directory,\n\n// which is where wrangler commands are always called from.\n\npub fn validate_bucket_location(bucket: &PathBuf) -> Result<(), failure::Error> {\n\n // TODO: this should really use a convenience function for \"Wrangler Project Root\"\n\n let current_dir = env::current_dir()?;\n\n if bucket.as_os_str() == current_dir {\n\n failure::bail!(\n\n \"{} Your bucket cannot be set to the parent directory of your wrangler.toml\",\n\n emoji::WARN\n\n )\n\n }\n\n let path = Path::new(&bucket);\n\n if !path.exists() {\n\n failure::bail!(\n\n \"{} bucket directory \\\"{}\\\" does not exist\",\n\n emoji::WARN,\n\n path.display()\n\n )\n\n } else if !path.is_dir() {\n\n failure::bail!(\n\n \"{} bucket \\\"{}\\\" is not a directory\",\n\n emoji::WARN,\n\n path.display()\n\n )\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/publish.rs", "rank": 97, "score": 138114.45209989807 }, { "content": "fn get_latest_version(tool_name: &str) -> Result<String, failure::Error> {\n\n Ok(Krate::new(tool_name)?.max_version)\n\n}\n\n\n", "file_path": "src/install/mod.rs", "rank": 98, "score": 137261.2333663163 }, { "content": "pub fn install() -> ! {\n\n if let Err(e) = do_install() {\n\n eprintln!(\"{}\", e);\n\n for cause in e.iter_causes() {\n\n eprintln!(\"Caused by: {}\", cause);\n\n }\n\n }\n\n\n\n // On Windows we likely popped up a console for the installation. If we were\n\n // to exit here immediately then the user wouldn't see any error that\n\n // happened above or any successful message. Let's wait for them to say\n\n // they've read everything and then continue.\n\n if cfg!(windows) {\n\n println!(\"Press enter to close this window...\");\n\n let mut line = String::new();\n\n drop(io::stdin().read_line(&mut line));\n\n }\n\n\n\n process::exit(0);\n\n}\n\n\n", "file_path": "src/installer.rs", "rank": 99, "score": 137067.64968909093 } ]
Rust
rust/src/cosmos/crypto/multisig.rs
PFC-Validator/terra.proto
4939cca497ba641046d18546dd234fd33a3f61c6
#![allow(unknown_lints)] #![allow(clippy::all)] #![allow(unused_attributes)] #![cfg_attr(rustfmt, rustfmt::skip)] #![allow(box_pointers)] #![allow(dead_code)] #![allow(missing_docs)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(non_upper_case_globals)] #![allow(trivial_casts)] #![allow(unused_imports)] #![allow(unused_results)] #[derive(PartialEq,Clone,Default)] pub struct MultiSignature { pub signatures: ::protobuf::RepeatedField<::std::vec::Vec<u8>>, pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a MultiSignature { fn default() -> &'a MultiSignature { <MultiSignature as ::protobuf::Message>::default_instance() } } impl MultiSignature { pub fn new() -> MultiSignature { ::std::default::Default::default() } pub fn get_signatures(&self) -> &[::std::vec::Vec<u8>] { &self.signatures } pub fn clear_signatures(&mut self) { self.signatures.clear(); } pub fn set_signatures(&mut self, v: ::protobuf::RepeatedField<::std::vec::Vec<u8>>) { self.signatures = v; } pub fn mut_signatures(&mut self) -> &mut ::protobuf::RepeatedField<::std::vec::Vec<u8>> { &mut self.signatures } pub fn take_signatures(&mut self) -> ::protobuf::RepeatedField<::std::vec::Vec<u8>> { ::std::mem::replace(&mut self.signatures, ::protobuf::RepeatedField::new()) } } impl ::protobuf::Message for MultiSignature { fn is_initialized(&self) -> bool { true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { ::protobuf::rt::read_repeated_bytes_into(wire_type, is, &mut self.signatures)?; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; for value in &self.signatures { my_size += ::protobuf::rt::bytes_size(1, &value); }; my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { for v in &self.signatures { os.write_bytes(1, &v)?; }; os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> MultiSignature { MultiSignature::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>( "signatures", |m: &MultiSignature| { &m.signatures }, |m: &mut MultiSignature| { &mut m.signatures }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<MultiSignature>( "MultiSignature", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static MultiSignature { static instance: ::protobuf::rt::LazyV2<MultiSignature> = ::protobuf::rt::LazyV2::INIT; instance.get(MultiSignature::new) } } impl ::protobuf::Clear for MultiSignature { fn clear(&mut self) { self.signatures.clear(); self.unknown_fields.clear(); } } impl ::std::fmt::Debug for MultiSignature { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for MultiSignature { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } #[derive(PartialEq,Clone,Default)] pub struct CompactBitArray { pub extra_bits_stored: u32, pub elems: ::std::vec::Vec<u8>, pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a CompactBitArray { fn default() -> &'a CompactBitArray { <CompactBitArray as ::protobuf::Message>::default_instance() } } impl CompactBitArray { pub fn new() -> CompactBitArray { ::std::default::Default::default() } pub fn get_extra_bits_stored(&self) -> u32 { self.extra_bits_stored } pub fn clear_extra_bits_stored(&mut self) { self.extra_bits_stored = 0; } pub fn set_extra_bits_stored(&mut self, v: u32) { self.extra_bits_stored = v; } pub fn get_elems(&self) -> &[u8] { &self.elems } pub fn clear_elems(&mut self) { self.elems.clear(); } pub fn set_elems(&mut self, v: ::std::vec::Vec<u8>) { self.elems = v; } pub fn mut_elems(&mut self) -> &mut ::std::vec::Vec<u8> { &mut self.elems } pub fn take_elems(&mut self) -> ::std::vec::Vec<u8> { ::std::mem::replace(&mut self.elems, ::std::vec::Vec::new()) } } impl ::protobuf::Message for CompactBitArray { fn is_initialized(&self) -> bool { true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { if wire_type != ::protobuf::wire_format::WireTypeVarint { return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); } let tmp = is.read_uint32()?; self.extra_bits_stored = tmp; }, 2 => { ::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.elems)?; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; if self.extra_bits_stored != 0 { my_size += ::protobuf::rt::value_size(1, self.extra_bits_stored, ::protobuf::wire_format::WireTypeVarint); } if !self.elems.is_empty() { my_size += ::protobuf::rt::bytes_size(2, &self.elems); } my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { if self.extra_bits_stored != 0 { os.write_uint32(1, self.extra_bits_stored)?; } if !self.elems.is_empty() { os.write_bytes(2, &self.elems)?; } os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> CompactBitArray { CompactBitArray::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>( "extra_bits_stored", |m: &CompactBitArray| { &m.extra_bits_stored }, |m: &mut CompactBitArray| { &mut m.extra_bits_stored }, )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>( "elems", |m: &CompactBitArray| { &m.elems }, |m: &mut CompactBitArray| { &mut m.elems }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<CompactBitArray>( "CompactBitArray", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static CompactBitArray { static instance: ::protobuf::rt::LazyV2<CompactBitArray> = ::protobuf::rt::LazyV2::INIT; instance.get(CompactBitArray::new) } } impl ::protobuf::Clear for CompactBitArray { fn clear(&mut self) { self.extra_bits_stored = 0; self.elems.clear(); self.unknown_fields.clear(); } } impl ::std::fmt::Debug for CompactBitArray { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for CompactBitArray { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } static file_descriptor_proto_data: &'static [u8] = b"\ \n-cosmos/crypto/multisig/v1beta1/multisig.proto\x12\x1ecosmos.crypto.mu\ ltisig.v1beta1\x1a\x14gogoproto/gogo.proto\"6\n\x0eMultiSignature\x12\ \x1e\n\nsignatures\x18\x01\x20\x03(\x0cR\nsignatures:\x04\xd0\xa1\x1f\ \x01\"Y\n\x0fCompactBitArray\x12*\n\x11extra_bits_stored\x18\x01\x20\x01\ (\rR\x0fextraBitsStored\x12\x14\n\x05elems\x18\x02\x20\x01(\x0cR\x05elem\ s:\x04\x98\xa0\x1f\0B+Z)github.com/cosmos/cosmos-sdk/crypto/typesb\x06pr\ oto3\ "; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto { ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap() } pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto { file_descriptor_proto_lazy.get(|| { parse_descriptor_proto() }) }
#![allow(unknown_lints)] #![allow(clippy::all)] #![allow(unused_attributes)] #![cfg_attr(rustfmt, rustfmt::skip)] #![allow(box_pointers)] #![allow(dead_code)] #![allow(missing_docs)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(non_upper_case_globals)] #![allow(trivial_casts)] #![allow(unused_imports)] #![allow(unused_results)] #[derive(PartialEq,Clone,Default)] pub struct MultiSignature { pub signatures: ::protobuf::RepeatedField<::std::vec::Vec<u8>>, pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a MultiSignature { fn default() -> &'a MultiSignature { <MultiSignature as ::protobuf::Message>::default_instance() } } impl MultiSignature { pub fn new() -> MultiSignature { ::std::default::Default::default() } pub fn get_signatures(&self) -> &[::std::vec::Vec<u8>] { &self.signatures } pub fn clear_signatures(&mut self) { self.signatures.clear(); } pub fn set_signatures(&mut self, v: ::protobuf::RepeatedField<::std::vec::Vec<u8>>) { self.signatures = v; } pub fn mut_signatures(&mut self) -> &mut ::protobuf::RepeatedField<::std::vec::Vec<u8>> { &mut self.signatures } pub fn take_signatures(&mut self) -> ::protobuf::RepeatedField<::std::vec::Vec<u8>> { ::std::mem::replace(&mut self.signatures, ::protobuf::RepeatedField::new()) } } impl ::protobuf::Message for MultiSignature { fn is_initialized(&self) -> bool { true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { ::protobuf::rt::read_repeated_bytes_into(wire_type, is, &mut self.signatures)?; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; for value in &self.signatures { my_size += ::protobuf::rt::bytes_size(1, &value); }; my_si
d }, |m: &mut CompactBitArray| { &mut m.extra_bits_stored }, )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>( "elems", |m: &CompactBitArray| { &m.elems }, |m: &mut CompactBitArray| { &mut m.elems }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<CompactBitArray>( "CompactBitArray", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static CompactBitArray { static instance: ::protobuf::rt::LazyV2<CompactBitArray> = ::protobuf::rt::LazyV2::INIT; instance.get(CompactBitArray::new) } } impl ::protobuf::Clear for CompactBitArray { fn clear(&mut self) { self.extra_bits_stored = 0; self.elems.clear(); self.unknown_fields.clear(); } } impl ::std::fmt::Debug for CompactBitArray { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for CompactBitArray { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } static file_descriptor_proto_data: &'static [u8] = b"\ \n-cosmos/crypto/multisig/v1beta1/multisig.proto\x12\x1ecosmos.crypto.mu\ ltisig.v1beta1\x1a\x14gogoproto/gogo.proto\"6\n\x0eMultiSignature\x12\ \x1e\n\nsignatures\x18\x01\x20\x03(\x0cR\nsignatures:\x04\xd0\xa1\x1f\ \x01\"Y\n\x0fCompactBitArray\x12*\n\x11extra_bits_stored\x18\x01\x20\x01\ (\rR\x0fextraBitsStored\x12\x14\n\x05elems\x18\x02\x20\x01(\x0cR\x05elem\ s:\x04\x98\xa0\x1f\0B+Z)github.com/cosmos/cosmos-sdk/crypto/typesb\x06pr\ oto3\ "; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto { ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap() } pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto { file_descriptor_proto_lazy.get(|| { parse_descriptor_proto() }) }
ze += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { for v in &self.signatures { os.write_bytes(1, &v)?; }; os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> MultiSignature { MultiSignature::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>( "signatures", |m: &MultiSignature| { &m.signatures }, |m: &mut MultiSignature| { &mut m.signatures }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<MultiSignature>( "MultiSignature", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static MultiSignature { static instance: ::protobuf::rt::LazyV2<MultiSignature> = ::protobuf::rt::LazyV2::INIT; instance.get(MultiSignature::new) } } impl ::protobuf::Clear for MultiSignature { fn clear(&mut self) { self.signatures.clear(); self.unknown_fields.clear(); } } impl ::std::fmt::Debug for MultiSignature { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for MultiSignature { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } #[derive(PartialEq,Clone,Default)] pub struct CompactBitArray { pub extra_bits_stored: u32, pub elems: ::std::vec::Vec<u8>, pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a CompactBitArray { fn default() -> &'a CompactBitArray { <CompactBitArray as ::protobuf::Message>::default_instance() } } impl CompactBitArray { pub fn new() -> CompactBitArray { ::std::default::Default::default() } pub fn get_extra_bits_stored(&self) -> u32 { self.extra_bits_stored } pub fn clear_extra_bits_stored(&mut self) { self.extra_bits_stored = 0; } pub fn set_extra_bits_stored(&mut self, v: u32) { self.extra_bits_stored = v; } pub fn get_elems(&self) -> &[u8] { &self.elems } pub fn clear_elems(&mut self) { self.elems.clear(); } pub fn set_elems(&mut self, v: ::std::vec::Vec<u8>) { self.elems = v; } pub fn mut_elems(&mut self) -> &mut ::std::vec::Vec<u8> { &mut self.elems } pub fn take_elems(&mut self) -> ::std::vec::Vec<u8> { ::std::mem::replace(&mut self.elems, ::std::vec::Vec::new()) } } impl ::protobuf::Message for CompactBitArray { fn is_initialized(&self) -> bool { true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { if wire_type != ::protobuf::wire_format::WireTypeVarint { return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); } let tmp = is.read_uint32()?; self.extra_bits_stored = tmp; }, 2 => { ::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.elems)?; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; if self.extra_bits_stored != 0 { my_size += ::protobuf::rt::value_size(1, self.extra_bits_stored, ::protobuf::wire_format::WireTypeVarint); } if !self.elems.is_empty() { my_size += ::protobuf::rt::bytes_size(2, &self.elems); } my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { if self.extra_bits_stored != 0 { os.write_uint32(1, self.extra_bits_stored)?; } if !self.elems.is_empty() { os.write_bytes(2, &self.elems)?; } os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> CompactBitArray { CompactBitArray::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>( "extra_bits_stored", |m: &CompactBitArray| { &m.extra_bits_store
random
[ { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/crypto/proof.rs", "rank": 0, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/tx/tx.rs", "rank": 1, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/oracle/tx.rs", "rank": 2, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/crypto/keys.rs", "rank": 3, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/distribution/distribution.rs", "rank": 4, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/treasury/treasury.rs", "rank": 5, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/canonical.rs", "rank": 6, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/distribution/tx.rs", "rank": 7, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/authz/genesis.rs", "rank": 8, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/auth/genesis.rs", "rank": 9, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/auth/query.rs", "rank": 10, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/distribution/query.rs", "rank": 11, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/staking/tx.rs", "rank": 12, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/pagination.rs", "rank": 13, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/staking/authz.rs", "rank": 14, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/coin.rs", "rank": 15, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/tx/service.rs", "rank": 16, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/vesting/vesting.rs", "rank": 17, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/oracle/query.rs", "rank": 18, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/p2p/pex.rs", "rank": 19, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/tx/signing.rs", "rank": 20, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/reflection.rs", "rank": 21, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/wasm/wasm.rs", "rank": 22, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/params.rs", "rank": 23, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/snapshot.rs", "rank": 24, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/authz/tx.rs", "rank": 25, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/market/market.rs", "rank": 26, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/auth/auth.rs", "rank": 27, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/treasury/query.rs", "rank": 28, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/authz/event.rs", "rank": 29, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/staking/query.rs", "rank": 30, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/block.rs", "rank": 31, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/slashing/tx.rs", "rank": 32, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/treasury/genesis.rs", "rank": 34, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/authz/query.rs", "rank": 35, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/slashing/genesis.rs", "rank": 36, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/market/query.rs", "rank": 37, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/staking/genesis.rs", "rank": 38, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/crypto/keys.rs", "rank": 39, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/oracle/oracle.rs", "rank": 40, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/market/tx.rs", "rank": 41, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/staking/staking.rs", "rank": 42, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/authz/authz.rs", "rank": 43, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/slashing/query.rs", "rank": 44, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/validator.rs", "rank": 45, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/events.rs", "rank": 46, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/p2p/conn.rs", "rank": 47, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/slashing/slashing.rs", "rank": 48, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/wasm/query.rs", "rank": 49, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/wasm/genesis.rs", "rank": 50, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/distribution/genesis.rs", "rank": 51, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/market/genesis.rs", "rank": 52, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/kv.rs", "rank": 53, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/query.rs", "rank": 54, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/vesting/tx.rs", "rank": 55, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/tx/service.rs", "rank": 56, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/p2p/types.rs", "rank": 57, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/types.rs", "rank": 58, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/listening.rs", "rank": 59, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/types/evidence.rs", "rank": 60, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/abci/types.rs", "rank": 61, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/abci.rs", "rank": 62, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/tendermint/version/types.rs", "rank": 63, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/oracle/genesis.rs", "rank": 64, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/protos/wasm/tx.rs", "rank": 65, "score": 222119.91363063856 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "rust/src/cosmos/base/commit_info.rs", "rank": 66, "score": 220818.18293276653 }, { "content": " public static final int NEW_VALUE = 0;\n", "file_path": "java/src/main/java/tendermint/abci/Types.java", "rank": 67, "score": 150570.3655066183 }, { "content": " private static final cosmos.crypto.secp256r1.Keys.PubKey DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/secp256r1/Keys.java", "rank": 68, "score": 146799.66067303394 }, { "content": " private static final cosmos.crypto.ed25519.Keys.PubKey DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/ed25519/Keys.java", "rank": 69, "score": 146799.66067303394 }, { "content": " private static final cosmos.crypto.secp256k1.Keys.PubKey DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/secp256k1/Keys.java", "rank": 70, "score": 146799.66067303394 }, { "content": " public static Builder newBuilder(cosmos.crypto.ed25519.Keys.PubKey prototype) {\n\n return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n", "file_path": "java/src/main/java/cosmos/crypto/ed25519/Keys.java", "rank": 71, "score": 146736.82238126677 }, { "content": " public static Builder newBuilder(cosmos.crypto.secp256r1.Keys.PubKey prototype) {\n\n return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n", "file_path": "java/src/main/java/cosmos/crypto/secp256r1/Keys.java", "rank": 72, "score": 146736.82238126677 }, { "content": " @java.lang.Override\n\n @SuppressWarnings({\"unused\"})\n\n protected java.lang.Object newInstance(\n\n UnusedPrivateParameter unused) {\n\n return new PubKey();\n", "file_path": "java/src/main/java/cosmos/crypto/ed25519/Keys.java", "rank": 73, "score": 146736.82238126677 }, { "content": " @java.lang.Override\n\n @SuppressWarnings({\"unused\"})\n\n protected java.lang.Object newInstance(\n\n UnusedPrivateParameter unused) {\n\n return new PubKey();\n", "file_path": "java/src/main/java/cosmos/crypto/secp256k1/Keys.java", "rank": 74, "score": 146736.82238126677 }, { "content": " public static Builder newBuilder(cosmos.crypto.secp256k1.Keys.PubKey prototype) {\n\n return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n", "file_path": "java/src/main/java/cosmos/crypto/secp256k1/Keys.java", "rank": 75, "score": 146736.82238126677 }, { "content": " @java.lang.Override\n\n @SuppressWarnings({\"unused\"})\n\n protected java.lang.Object newInstance(\n\n UnusedPrivateParameter unused) {\n\n return new PubKey();\n", "file_path": "java/src/main/java/cosmos/crypto/secp256r1/Keys.java", "rank": 76, "score": 146736.82238126677 }, { "content": " public static Builder newBuilder(tendermint.p2p.Types.DefaultNodeInfoOther prototype) {\n\n return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n", "file_path": "java/src/main/java/tendermint/p2p/Types.java", "rank": 77, "score": 146558.89859155903 }, { "content": " @java.lang.Override\n\n @SuppressWarnings({\"unused\"})\n\n protected java.lang.Object newInstance(\n\n UnusedPrivateParameter unused) {\n\n return new DefaultNodeInfo();\n", "file_path": "java/src/main/java/tendermint/p2p/Types.java", "rank": 78, "score": 146558.89859155903 }, { "content": " @java.lang.Override\n\n @SuppressWarnings({\"unused\"})\n\n protected java.lang.Object newInstance(\n\n UnusedPrivateParameter unused) {\n\n return new DefaultNodeInfoOther();\n", "file_path": "java/src/main/java/tendermint/p2p/Types.java", "rank": 79, "score": 146558.89859155903 }, { "content": " public static Builder newBuilder(tendermint.p2p.Types.DefaultNodeInfo prototype) {\n\n return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n", "file_path": "java/src/main/java/tendermint/p2p/Types.java", "rank": 80, "score": 146558.89859155903 }, { "content": " @java.lang.Deprecated\n\n public static SumCase valueOf(int value) {\n\n return forNumber(value);\n", "file_path": "java/src/main/java/cosmos/tx/signing/v1beta1/Signing.java", "rank": 81, "score": 145688.9715157929 }, { "content": " private final int value;\n", "file_path": "java/src/main/java/cosmos/tx/signing/v1beta1/Signing.java", "rank": 82, "score": 145682.6453459394 }, { "content": " public static cosmos.crypto.secp256r1.Keys.PubKey getDefaultInstance() {\n\n return DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/secp256r1/Keys.java", "rank": 83, "score": 145557.88909819903 }, { "content": " public static cosmos.crypto.secp256k1.Keys.PubKey getDefaultInstance() {\n\n return DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/secp256k1/Keys.java", "rank": 84, "score": 145557.88909819903 }, { "content": " public static cosmos.crypto.ed25519.Keys.PubKey getDefaultInstance() {\n\n return DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/ed25519/Keys.java", "rank": 85, "score": 145557.88909819903 }, { "content": " private static final ibc.lightclients.solomachine.v1.Solomachine.SignatureAndData DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/ibc/lightclients/solomachine/v1/Solomachine.java", "rank": 86, "score": 145519.85639344656 }, { "content": " private static final cosmos.crypto.multisig.v1beta1.Multisig.MultiSignature DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/crypto/multisig/v1beta1/Multisig.java", "rank": 87, "score": 145519.85639344656 }, { "content": " private static final ibc.lightclients.solomachine.v2.Solomachine.SignatureAndData DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/ibc/lightclients/solomachine/v2/Solomachine.java", "rank": 88, "score": 145519.85639344656 }, { "content": " private static final cosmos.tx.signing.v1beta1.Signing.SignatureDescriptor DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/tx/signing/v1beta1/Signing.java", "rank": 89, "score": 145519.85639344656 }, { "content": " private static final cosmos.tx.signing.v1beta1.Signing.SignatureDescriptors DEFAULT_INSTANCE;\n", "file_path": "java/src/main/java/cosmos/tx/signing/v1beta1/Signing.java", "rank": 90, "score": 145519.85639344656 }, { "content": " @java.lang.Override\n\n protected Builder newBuilderForType(\n\n com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n\n Builder builder = new Builder(parent);\n\n return builder;\n", "file_path": "java/src/main/java/cosmos/crypto/ed25519/Keys.java", "rank": 91, "score": 145502.45236981037 }, { "content": " @java.lang.Override\n\n protected Builder newBuilderForType(\n\n com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n\n Builder builder = new Builder(parent);\n\n return builder;\n", "file_path": "java/src/main/java/cosmos/crypto/secp256r1/Keys.java", "rank": 92, "score": 145502.45236981037 }, { "content": "\n\n\n\n pub fn get_port(&self) -> u32 {\n\n self.port\n\n }\n\n pub fn clear_port(&mut self) {\n\n self.port = 0;\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_port(&mut self, v: u32) {\n\n self.port = v;\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for NetAddress {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n", "file_path": "rust/src/tendermint/p2p/types.rs", "rank": 93, "score": 44.701996313146836 }, { "content": "\n\n // Param is passed by value, moved\n\n pub fn set_signatures(&mut self, v: ::protobuf::RepeatedField<::std::vec::Vec<u8>>) {\n\n self.signatures = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n pub fn mut_signatures(&mut self) -> &mut ::protobuf::RepeatedField<::std::vec::Vec<u8>> {\n\n &mut self.signatures\n\n }\n\n\n\n // Take field\n\n pub fn take_signatures(&mut self) -> ::protobuf::RepeatedField<::std::vec::Vec<u8>> {\n\n ::std::mem::replace(&mut self.signatures, ::protobuf::RepeatedField::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for TxRaw {\n\n fn is_initialized(&self) -> bool {\n\n true\n", "file_path": "rust/src/cosmos/tx/tx.rs", "rank": 94, "score": 43.39341530623332 }, { "content": " pub fn new() -> Request {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // .tendermint.abci.RequestEcho echo = 1;\n\n\n\n\n\n pub fn get_echo(&self) -> &RequestEcho {\n\n match self.value {\n\n ::std::option::Option::Some(Request_oneof_value::echo(ref v)) => v,\n\n _ => <RequestEcho as ::protobuf::Message>::default_instance(),\n\n }\n\n }\n\n pub fn clear_echo(&mut self) {\n\n self.value = ::std::option::Option::None;\n\n }\n\n\n\n pub fn has_echo(&self) -> bool {\n\n match self.value {\n\n ::std::option::Option::Some(Request_oneof_value::echo(..)) => true,\n", "file_path": "rust/src/tendermint/abci/types.rs", "rank": 95, "score": 43.042245569444304 }, { "content": "}\n\n\n\nimpl PartSetHeader {\n\n pub fn new() -> PartSetHeader {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // uint32 total = 1;\n\n\n\n\n\n pub fn get_total(&self) -> u32 {\n\n self.total\n\n }\n\n pub fn clear_total(&mut self) {\n\n self.total = 0;\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_total(&mut self, v: u32) {\n\n self.total = v;\n", "file_path": "rust/src/tendermint/types/types.rs", "rank": 96, "score": 42.574736918599115 }, { "content": " // Mutable pointer to the field.\n\n pub fn mut_signatures(&mut self) -> &mut ::protobuf::RepeatedField<SignatureDescriptor> {\n\n &mut self.signatures\n\n }\n\n\n\n // Take field\n\n pub fn take_signatures(&mut self) -> ::protobuf::RepeatedField<SignatureDescriptor> {\n\n ::std::mem::replace(&mut self.signatures, ::protobuf::RepeatedField::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for SignatureDescriptors {\n\n fn is_initialized(&self) -> bool {\n\n for v in &self.signatures {\n\n if !v.is_initialized() {\n\n return false;\n\n }\n\n };\n\n true\n\n }\n", "file_path": "rust/src/cosmos/tx/signing.rs", "rank": 97, "score": 42.53939723196437 }, { "content": "impl SignatureDescriptors {\n\n pub fn new() -> SignatureDescriptors {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // repeated .cosmos.tx.signing.v1beta1.SignatureDescriptor signatures = 1;\n\n\n\n\n\n pub fn get_signatures(&self) -> &[SignatureDescriptor] {\n\n &self.signatures\n\n }\n\n pub fn clear_signatures(&mut self) {\n\n self.signatures.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_signatures(&mut self, v: ::protobuf::RepeatedField<SignatureDescriptor>) {\n\n self.signatures = v;\n\n }\n\n\n", "file_path": "rust/src/cosmos/tx/signing.rs", "rank": 98, "score": 42.483923472031194 } ]
Rust
tss-esapi/tests/integration_tests/context_tests/tpm_commands/non_volatile_storage_tests.rs
Superhepper/rust-tss-esapi
a6ae84793e73b10dd672b613ada820566c84fe85
mod test_nv_define_space { use crate::common::create_ctx_with_session; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{algorithm::HashingAlgorithm, resource_handles::Provision}, nv::storage::NvPublicBuilder, }; #[test] fn test_nv_define_space_failures() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500015).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .unwrap(); let platform_nv_index_attributes = NvIndexAttributesBuilder::new() .with_pp_write(true) .with_pp_read(true) .with_platform_create(true) .build() .expect("Failed to create platform nv index attributes"); let platform_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(platform_nv_index_attributes) .with_data_area_size(32) .build() .unwrap(); let _ = context .nv_define_space(Provision::Platform, None, &owner_nv_public) .unwrap_err(); let _ = context .nv_define_space(Provision::Owner, None, &platform_nv_public) .unwrap_err(); } #[test] fn test_nv_define_space() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500016).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let platform_nv_index_attributes = NvIndexAttributesBuilder::new() .with_pp_write(true) .with_pp_read(true) .with_platform_create(true) .build() .expect("Failed to create platform nv index attributes"); let platform_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(platform_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for platform"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); let platform_nv_index_handle = context .nv_define_space(Provision::Platform, None, &platform_nv_public) .expect("Call to nv_define_space failed"); let _ = context .nv_undefine_space(Provision::Platform, platform_nv_index_handle) .expect("Call to nv_undefine_space failed"); } } mod test_nv_undefine_space { use crate::common::create_ctx_with_session; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{algorithm::HashingAlgorithm, resource_handles::Provision}, nv::storage::NvPublicBuilder, }; #[test] fn test_nv_undefine_space() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500017).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); } } mod test_nv_read_public { use crate::common::create_ctx_with_session; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{algorithm::HashingAlgorithm, resource_handles::Provision}, nv::storage::NvPublicBuilder, }; #[test] fn test_nv_read_public() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500019).unwrap(); let nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let expected_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build the expected NvPublic"); let nv_index_handle = context .nv_define_space(Provision::Owner, None, &expected_nv_public) .expect("Call to nv_define_space failed"); let read_public_result = context.nv_read_public(nv_index_handle); let _ = context .nv_undefine_space(Provision::Owner, nv_index_handle) .unwrap(); if let Err(e) = read_public_result { panic!("Failed to read public of nv index: {}", e); } let (actual_nv_public, _name) = read_public_result.unwrap(); assert_eq!(expected_nv_public, actual_nv_public); } } mod test_nv_write { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{ algorithm::HashingAlgorithm, resource_handles::{NvAuth, Provision}, }, nv::storage::NvPublicBuilder, structures::MaxNvBuffer, }; #[test] fn test_nv_write() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500018).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let write_result = context.nv_write( NvAuth::Owner, owner_nv_index_handle, &MaxNvBuffer::try_from([1, 2, 3, 4, 5, 6, 7].to_vec()).unwrap(), 0, ); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); if let Err(e) = write_result { panic!("Failed to perform nv write: {}", e); } } } mod test_nv_read { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{ algorithm::HashingAlgorithm, resource_handles::{NvAuth, Provision}, }, nv::storage::NvPublicBuilder, structures::MaxNvBuffer, }; #[test] fn test_nv_read() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500020).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let value = [1, 2, 3, 4, 5, 6, 7]; let expected_data = MaxNvBuffer::try_from(value.to_vec()).expect("Failed to create MaxBuffer from data"); let write_result = context.nv_write(NvAuth::Owner, owner_nv_index_handle, &expected_data, 0); let read_result = context.nv_read(NvAuth::Owner, owner_nv_index_handle, value.len() as u16, 0); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); if let Err(e) = write_result { panic!("Failed to perform nv write: {}", e); } if let Err(e) = read_result { panic!("Failed to read public of nv index: {}", e); } let actual_data = read_result.unwrap(); assert_eq!(expected_data, actual_data); } }
mod test_nv_define_space { use crate::common::create_ctx_with_session; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{algorithm::HashingAlgorithm, resource_handles::Provision}, nv::storage::NvPublicBuilder, }; #[test] fn test_nv_define_space_failures() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500015).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .unwrap(); let platform_nv_index_attributes = NvIndexAttributesBuilder::new() .with_pp_write(true) .with_pp_read(true) .with_platform_create(true) .build() .expect("Failed to create platform nv index attributes"); let platform_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(platform_nv_index_attributes) .with_data_area_size(32) .build() .unwrap(); let _ = context .nv_define_space(Provision::Platform, None, &owner_nv_public) .unwrap_err(); let _ = context .nv_define_space(Provision::Owner, None, &platform_nv_public) .unwrap_err(); } #[test] fn test_nv_define_space() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500016).unwrap(); le
} mod test_nv_undefine_space { use crate::common::create_ctx_with_session; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{algorithm::HashingAlgorithm, resource_handles::Provision}, nv::storage::NvPublicBuilder, }; #[test] fn test_nv_undefine_space() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500017).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); } } mod test_nv_read_public { use crate::common::create_ctx_with_session; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{algorithm::HashingAlgorithm, resource_handles::Provision}, nv::storage::NvPublicBuilder, }; #[test] fn test_nv_read_public() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500019).unwrap(); let nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let expected_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build the expected NvPublic"); let nv_index_handle = context .nv_define_space(Provision::Owner, None, &expected_nv_public) .expect("Call to nv_define_space failed"); let read_public_result = context.nv_read_public(nv_index_handle); let _ = context .nv_undefine_space(Provision::Owner, nv_index_handle) .unwrap(); if let Err(e) = read_public_result { panic!("Failed to read public of nv index: {}", e); } let (actual_nv_public, _name) = read_public_result.unwrap(); assert_eq!(expected_nv_public, actual_nv_public); } } mod test_nv_write { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{ algorithm::HashingAlgorithm, resource_handles::{NvAuth, Provision}, }, nv::storage::NvPublicBuilder, structures::MaxNvBuffer, }; #[test] fn test_nv_write() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500018).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let write_result = context.nv_write( NvAuth::Owner, owner_nv_index_handle, &MaxNvBuffer::try_from([1, 2, 3, 4, 5, 6, 7].to_vec()).unwrap(), 0, ); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); if let Err(e) = write_result { panic!("Failed to perform nv write: {}", e); } } } mod test_nv_read { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::NvIndexAttributesBuilder, handles::NvIndexTpmHandle, interface_types::{ algorithm::HashingAlgorithm, resource_handles::{NvAuth, Provision}, }, nv::storage::NvPublicBuilder, structures::MaxNvBuffer, }; #[test] fn test_nv_read() { let mut context = create_ctx_with_session(); let nv_index = NvIndexTpmHandle::new(0x01500020).unwrap(); let owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let value = [1, 2, 3, 4, 5, 6, 7]; let expected_data = MaxNvBuffer::try_from(value.to_vec()).expect("Failed to create MaxBuffer from data"); let write_result = context.nv_write(NvAuth::Owner, owner_nv_index_handle, &expected_data, 0); let read_result = context.nv_read(NvAuth::Owner, owner_nv_index_handle, value.len() as u16, 0); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); if let Err(e) = write_result { panic!("Failed to perform nv write: {}", e); } if let Err(e) = read_result { panic!("Failed to read public of nv index: {}", e); } let actual_data = read_result.unwrap(); assert_eq!(expected_data, actual_data); } }
t owner_nv_index_attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) .build() .expect("Failed to create owner nv index attributes"); let owner_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(owner_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for owner"); let platform_nv_index_attributes = NvIndexAttributesBuilder::new() .with_pp_write(true) .with_pp_read(true) .with_platform_create(true) .build() .expect("Failed to create platform nv index attributes"); let platform_nv_public = NvPublicBuilder::new() .with_nv_index(nv_index) .with_index_name_algorithm(HashingAlgorithm::Sha256) .with_index_attributes(platform_nv_index_attributes) .with_data_area_size(32) .build() .expect("Failed to build NvPublic for platform"); let owner_nv_index_handle = context .nv_define_space(Provision::Owner, None, &owner_nv_public) .expect("Call to nv_define_space failed"); let _ = context .nv_undefine_space(Provision::Owner, owner_nv_index_handle) .expect("Call to nv_undefine_space failed"); let platform_nv_index_handle = context .nv_define_space(Provision::Platform, None, &platform_nv_public) .expect("Call to nv_define_space failed"); let _ = context .nv_undefine_space(Provision::Platform, platform_nv_index_handle) .expect("Call to nv_undefine_space failed"); }
function_block-function_prefixed
[ { "content": "fn write_nv_index(context: &mut Context, nv_index: NvIndexTpmHandle) -> NvIndexHandle {\n\n // Create owner nv public.\n\n let owner_nv_index_attributes = NvIndexAttributesBuilder::new()\n\n .with_owner_write(true)\n\n .with_owner_read(true)\n\n .with_pp_read(true)\n\n .with_owner_read(true)\n\n .build()\n\n .expect(\"Failed to create owner nv index attributes\");\n\n\n\n let owner_nv_public = NvPublicBuilder::new()\n\n .with_nv_index(nv_index)\n\n .with_index_name_algorithm(HashingAlgorithm::Sha256)\n\n .with_index_attributes(owner_nv_index_attributes)\n\n .with_data_area_size(1540)\n\n .build()\n\n .unwrap();\n\n\n\n let owner_nv_index_handle = context\n\n .nv_define_space(Provision::Owner, None, &owner_nv_public)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/nv_tests.rs", "rank": 0, "score": 380480.6304726091 }, { "content": "#[test]\n\nfn test_nv_index_attributes_builder_missing_read_attribute_failure() {\n\n // Test missing read error\n\n\n\n // Building with only PP write set this should result in an error\n\n single_attribute_error!(with_pp_write);\n\n // Building with only owner write set this should result in an error\n\n single_attribute_error!(with_owner_write);\n\n // Building with only auth write set this should result in an error\n\n single_attribute_error!(with_auth_write);\n\n // Building with only policy write set this should result in an error\n\n single_attribute_error!(with_policy_write);\n\n // Building with all of them set still results in an error\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::ParamsMissing)),\n\n NvIndexAttributesBuilder::new()\n\n .with_pp_write(true)\n\n .with_owner_write(true)\n\n .with_auth_write(true)\n\n .with_policy_write(true)\n\n .build()\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 1, "score": 316256.3064885266 }, { "content": "#[test]\n\nfn test_nv_index_attributes_builder_missing_write_attribute_failure() {\n\n // Test missing write error\n\n\n\n // Building with only PP read set this should result in an error\n\n single_attribute_error!(with_pp_read);\n\n // Building with only owner read set this should result in an error\n\n single_attribute_error!(with_owner_read);\n\n // Building with only auth read set this should result in an error\n\n single_attribute_error!(with_auth_read);\n\n // Building with only policy read set this should result in an error\n\n single_attribute_error!(with_policy_read);\n\n // Building with all of them set still results in an error\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::ParamsMissing)),\n\n NvIndexAttributesBuilder::new()\n\n .with_pp_read(true)\n\n .with_owner_read(true)\n\n .with_auth_read(true)\n\n .with_policy_read(true)\n\n .build()\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 2, "score": 316256.3064885266 }, { "content": "#[test]\n\nfn test_nv_index_attributes_builder_missing_no_da_attribute_with_pin_fail_index_type() {\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::ParamsMissing)),\n\n NvIndexAttributesBuilder::new()\n\n .with_nv_index_type(NvIndexType::PinFail)\n\n .build()\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 3, "score": 315600.1731738784 }, { "content": "#[test]\n\nfn test_invalid_index_type_value() {\n\n // 15(1111) - invalid\n\n let invalid_15 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_1111_0000u32);\n\n let _ = invalid_15.index_type().unwrap_err();\n\n\n\n // 14(1110) - invalid\n\n let invalid_14 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_1110_0000u32);\n\n let _ = invalid_14.index_type().unwrap_err();\n\n\n\n // 13(1101) - invalid\n\n let invalid_13 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_1101_0000u32);\n\n let _ = invalid_13.index_type().unwrap_err();\n\n\n\n // 12(1100) - invalid\n\n let invalid_12 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_1100_0000u32);\n\n let _ = invalid_12.index_type().unwrap_err();\n\n\n\n // 11(1011) - invalid\n\n let invalid_11 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_1011_0000u32);\n\n let _ = invalid_11.index_type().unwrap_err();\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 4, "score": 302865.7350014262 }, { "content": "#[test]\n\nfn test_attributes_builder() {\n\n let mut builder = NvIndexAttributesBuilder::new();\n\n\n\n // Need to set a read and write in order to be able to build.\n\n builder = builder.with_pp_write(true).with_pp_read(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0000_0000_0000_0001u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with pp_write and pp_read added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_owner_write(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0000_0000_0000_0011u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with owner_write added\")\n\n .0\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 5, "score": 302793.5512715386 }, { "content": "#[allow(dead_code)]\n\npub fn create_ctx_with_session() -> Context {\n\n let mut ctx = create_ctx_without_session();\n\n let session = ctx\n\n .start_auth_session(\n\n None,\n\n None,\n\n None,\n\n SessionType::Hmac,\n\n SymmetricDefinition::AES_256_CFB,\n\n HashingAlgorithm::Sha256,\n\n )\n\n .unwrap();\n\n let (session_attributes, session_attributes_mask) = SessionAttributesBuilder::new()\n\n .with_decrypt(true)\n\n .with_encrypt(true)\n\n .build();\n\n ctx.tr_sess_set_attributes(\n\n session.unwrap(),\n\n session_attributes,\n\n session_attributes_mask,\n\n )\n\n .unwrap();\n\n ctx.set_sessions((session, None, None));\n\n\n\n ctx\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 6, "score": 286335.7703546352 }, { "content": "#[allow(dead_code)]\n\npub fn create_ctx_without_session() -> Context {\n\n let tcti = create_tcti();\n\n Context::new(tcti).unwrap()\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 7, "score": 281851.72505854926 }, { "content": "#[test]\n\nfn test_none_set() {\n\n let attributes = AlgorithmAttributes::from(0x0);\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 8, "score": 252982.35415261865 }, { "content": "/// Lists all the currently defined NV Indexes' names and public components\n\npub fn list(context: &mut Context) -> Result<Vec<(NvPublic, Name)>> {\n\n context.execute_without_session(|ctx| {\n\n ctx.get_capability(\n\n CapabilityType::Handles,\n\n TPM2_NV_INDEX_FIRST,\n\n TPM2_PT_NV_INDEX_MAX,\n\n )\n\n .and_then(|(capability_data, _)| match capability_data {\n\n CapabilityData::Handles(tpm_handles) => Ok(tpm_handles),\n\n _ => Err(Error::local_error(WrapperErrorKind::WrongValueFromTpm)),\n\n })\n\n .and_then(|tpm_handles| {\n\n tpm_handles\n\n .iter()\n\n .map(|&tpm_handle| get_nv_index_info(ctx, NvIndexTpmHandle::try_from(tpm_handle)?))\n\n .collect()\n\n })\n\n })\n\n}\n", "file_path": "tss-esapi/src/abstraction/nv.rs", "rank": 9, "score": 241754.1292079315 }, { "content": "#[test]\n\nfn test_create_and_use_ak() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n let ak_auth = Auth::try_from(vec![0x1, 0x2, 0x42]).unwrap();\n\n let att_key = ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::RsaPss,\n\n Some(&ak_auth),\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let loaded_ak = ak::load_ak(\n\n &mut context,\n\n ek_rsa,\n\n Some(&ak_auth),\n\n att_key.out_private,\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 10, "score": 239140.4398593869 }, { "content": "/// Get the TPM vendor name\n\npub fn get_tpm_vendor(context: &mut Context) -> Result<String> {\n\n // Retrieve the TPM property values\n\n Ok([\n\n PropertyTag::VendorString1,\n\n PropertyTag::VendorString2,\n\n PropertyTag::VendorString3,\n\n PropertyTag::VendorString4,\n\n ]\n\n .iter()\n\n // Retrieve property values\n\n .map(|propid| context.get_tpm_property(*propid))\n\n // Collect and return an error if we got one\n\n .collect::<Result<Vec<Option<u32>>>>()?\n\n .iter()\n\n // Filter out the Option::None values\n\n .filter_map(|x| *x)\n\n // Filter out zero values\n\n .filter(|x| *x != 0)\n\n // Map through int_to_string\n\n .map(tpm_int_to_string)\n\n // Collect to a single string\n\n .collect())\n\n}\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 11, "score": 237458.46645308775 }, { "content": "fn create_ctx() -> TransientKeyContext {\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 12, "score": 235212.26253719584 }, { "content": "#[test]\n\nfn test_conversion_from_tss_type() {\n\n assert_eq!(\n\n NvIndexType::Ordinary,\n\n NvIndexType::try_from(TPM2_NT_ORDINARY).unwrap()\n\n );\n\n assert_eq!(\n\n NvIndexType::Counter,\n\n NvIndexType::try_from(TPM2_NT_COUNTER).unwrap()\n\n );\n\n assert_eq!(\n\n NvIndexType::Bits,\n\n NvIndexType::try_from(TPM2_NT_BITS).unwrap()\n\n );\n\n assert_eq!(\n\n NvIndexType::Extend,\n\n NvIndexType::try_from(TPM2_NT_EXTEND).unwrap()\n\n );\n\n assert_eq!(\n\n NvIndexType::PinFail,\n\n NvIndexType::try_from(TPM2_NT_PIN_FAIL).unwrap()\n\n );\n\n assert_eq!(\n\n NvIndexType::PinPass,\n\n NvIndexType::try_from(TPM2_NT_PIN_PASS).unwrap()\n\n );\n\n\n\n const INVALID_VALUE: TPM2_NT = 15;\n\n let _ = NvIndexType::try_from(INVALID_VALUE).unwrap_err();\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/constants_tests/nv_index_type_tests.rs", "rank": 13, "score": 232886.8860712137 }, { "content": "#[test]\n\nfn test_conversion_to_tss_type() {\n\n assert_eq!(TPM2_NT_ORDINARY, TPM2_NT::from(NvIndexType::Ordinary));\n\n assert_eq!(TPM2_NT_COUNTER, TPM2_NT::from(NvIndexType::Counter));\n\n assert_eq!(TPM2_NT_BITS, TPM2_NT::from(NvIndexType::Bits));\n\n assert_eq!(TPM2_NT_EXTEND, TPM2_NT::from(NvIndexType::Extend));\n\n assert_eq!(TPM2_NT_PIN_FAIL, TPM2_NT::from(NvIndexType::PinFail));\n\n assert_eq!(TPM2_NT_PIN_PASS, TPM2_NT::from(NvIndexType::PinPass));\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/constants_tests/nv_index_type_tests.rs", "rank": 14, "score": 232886.8860712137 }, { "content": "#[test]\n\nfn create_ecc_key() {\n\n let mut ctx = create_ctx();\n\n let _ = ctx\n\n .create_key(\n\n KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n },\n\n 16,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 15, "score": 232874.62744783657 }, { "content": "// Copyright 2020 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nuse tss_esapi::{\n\n attributes::{NvIndexAttributes, NvIndexAttributesBuilder},\n\n constants::NvIndexType,\n\n Error, WrapperErrorKind,\n\n};\n\n\n\n#[test]\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 16, "score": 230637.79671046027 }, { "content": " {\n\n // PinPass = 1001 (7,4)\n\n builder = builder.with_nv_index_type(NvIndexType::PinPass);\n\n let attributes = builder\n\n .build()\n\n .expect(\"Failed to build with nv index type PinPass added\");\n\n assert_eq!(0b0000_0000_0000_0001_0000_0000_1001_1111u32, attributes.0);\n\n assert_eq!(NvIndexType::PinPass, attributes.index_type().unwrap());\n\n }\n\n\n\n // (8,9 Reserved)\n\n builder = builder.with_policy_delete(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0000_0100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with policy_delete added\")\n\n .0\n\n );\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 17, "score": 230637.7618599941 }, { "content": "\n\n // 4(0100) - valid\n\n\n\n // 3(0011) - invalid\n\n let invalid_3 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_0011_0000u32);\n\n let _ = invalid_3.index_type().unwrap_err();\n\n\n\n // 2(0010) - valid\n\n\n\n // 1(0001) - valid\n\n\n\n // 0(0000) - valid\n\n}\n\n\n\nmacro_rules! single_attribute_error {\n\n ($method:ident) => {\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::ParamsMissing)),\n\n NvIndexAttributesBuilder::new().$method(true).build()\n\n );\n\n };\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 18, "score": 230635.15582003052 }, { "content": "\n\n // 10(1010) - invalid\n\n let invalid_10 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_1011_0000u32);\n\n let _ = invalid_10.index_type().unwrap_err();\n\n\n\n // 9(1001) - Valid\n\n\n\n // 8(1000) - Valid\n\n\n\n // 7(0111) - invalid\n\n let invalid_7 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_0111_0000u32);\n\n let _ = invalid_7.index_type().unwrap_err();\n\n\n\n // 6(0110) - invalid\n\n let invalid_6 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_0110_0000u32);\n\n let _ = invalid_6.index_type().unwrap_err();\n\n\n\n // 5(0101) - invalid\n\n let invalid_5 = NvIndexAttributes(0b0000_0000_0000_0000_0000_0000_0101_0000u32);\n\n let _ = invalid_5.index_type().unwrap_err();\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 19, "score": 230635.0814788152 }, { "content": " .expect(\"Failed to build with platform_create added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_read_stclear(true);\n\n assert_eq!(\n\n 0b1111_1110_0000_1111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with read_stclear added\")\n\n .0\n\n );\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 20, "score": 230624.5818873768 }, { "content": " builder\n\n .build()\n\n .expect(\"Failed to build with read_locked added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_written(true);\n\n assert_eq!(\n\n 0b0011_1110_0000_1111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with written added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_platform_create(true);\n\n assert_eq!(\n\n 0b0111_1110_0000_1111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 21, "score": 230623.98567409246 }, { "content": " .build()\n\n .expect(\"Failed to build with global_lock added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_owner_read(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0011_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with owner_read added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_auth_read(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with auth_read added\")\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 22, "score": 230620.36382612976 }, { "content": " );\n\n\n\n builder = builder.with_auth_write(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0000_0000_0000_0111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with auth_write added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_policy_write(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0000_0000_0000_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with policy_write added\")\n\n .0\n\n );\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 23, "score": 230614.05658874701 }, { "content": " .0\n\n );\n\n\n\n builder = builder.with_policy_read(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_1111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with policy_read added\")\n\n .0\n\n );\n\n\n\n // Reserved (24, 20)\n\n builder = builder.with_no_da(true);\n\n assert_eq!(\n\n 0b0000_0010_0000_1111_1111_1100_1001_1111u32,\n\n builder.build().expect(\"Failed to build with no_da added\").0\n\n );\n\n\n\n builder = builder.with_orderly(true);\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 24, "score": 230613.99369381726 }, { "content": " assert_eq!(\n\n 0b0000_0110_0000_1111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with oderly added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_clear_stclear(true);\n\n assert_eq!(\n\n 0b0000_1110_0000_1111_1111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with clear_stclear added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_read_locked(true);\n\n assert_eq!(\n\n 0b0001_1110_0000_1111_1111_1100_1001_1111u32,\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 25, "score": 230613.99369381726 }, { "content": " 0b0000_0000_0000_0001_0011_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with write_define added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_write_stclear(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0111_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with write_stclear added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_global_lock(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_1111_1100_1001_1111u32,\n\n builder\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 26, "score": 230613.99369381726 }, { "content": " builder = builder.with_write_locked(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0000_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with write_locked added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_write_all(true);\n\n assert_eq!(\n\n 0b0000_0000_0000_0001_0001_1100_1001_1111u32,\n\n builder\n\n .build()\n\n .expect(\"Failed to build with write_all added\")\n\n .0\n\n );\n\n\n\n builder = builder.with_write_define(true);\n\n assert_eq!(\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/nv_index_attributes_tests.rs", "rank": 27, "score": 230613.95236872687 }, { "content": "#[test]\n\nfn create_ecc_key_decryption_scheme() {\n\n let mut ctx = create_ctx();\n\n let _ = ctx\n\n .create_key(\n\n KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDh,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n },\n\n 16,\n\n )\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 28, "score": 226808.0675935804 }, { "content": "#[allow(dead_code)]\n\npub fn create_tcti() -> TctiNameConf {\n\n setup_logging();\n\n\n\n match env::var(\"TEST_TCTI\") {\n\n Err(_) => TctiNameConf::Mssim(Default::default()),\n\n Ok(tctistr) => TctiNameConf::from_str(&tctistr).expect(\"Error parsing TEST_TCTI\"),\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 29, "score": 216838.26633329218 }, { "content": "#[allow(dead_code)]\n\npub fn create_public_sealed_object() -> Public {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_no_da(true)\n\n .with_admin_with_policy(true)\n\n .with_user_with_auth(true)\n\n .build()\n\n .expect(\"Failed to create object attributes\");\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::KeyedHash)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_auth_policy(&Default::default())\n\n .with_keyed_hash_parameters(PublicKeyedHashParameters::new(KeyedHashScheme::Null))\n\n .with_keyed_hash_unique_identifier(&Default::default())\n\n .build()\n\n .expect(\"Failed to create public structure.\")\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 30, "score": 216838.26633329218 }, { "content": "#[test]\n\nfn test_constants() {\n\n assert_eq!(\n\n LocalityAttributes::LOCALITY_ZERO,\n\n LocalityAttributesBuilder::new()\n\n .with_locality(0)\n\n .build()\n\n .expect(\"Failed to build locality attributes\"),\n\n \"LOCALITY_ZERO constant does not have the correct value\"\n\n );\n\n\n\n assert_eq!(\n\n LocalityAttributes::LOCALITY_ONE,\n\n LocalityAttributesBuilder::new()\n\n .with_locality(1)\n\n .build()\n\n .expect(\"Failed to build locality attributes\"),\n\n \"LOCALITY_ONE constant does not have the correct value\"\n\n );\n\n\n\n assert_eq!(\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 31, "score": 208092.9740260106 }, { "content": "#[test]\n\nfn test_conversions() {\n\n for locality in 0u8..=4u8 {\n\n let expected_locality_attributes = LocalityAttributesBuilder::new()\n\n .with_locality(locality)\n\n .build()\n\n .expect(\"Failed to build locality attributes\");\n\n let tpma_locality: TPMA_LOCALITY = expected_locality_attributes.into();\n\n assert_eq!(\n\n 1u8.checked_shl(locality.into())\n\n .expect(\"Unable to create locality value\"),\n\n tpma_locality,\n\n \"Locality did not convert into expected TPMA_LOCALITY value\"\n\n );\n\n assert_eq!(\n\n expected_locality_attributes,\n\n tpma_locality.into(),\n\n \"The locality attributes converted from TPMA_LOCALITY did not match the expected value\"\n\n );\n\n }\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 32, "score": 208092.9740260106 }, { "content": "#[test]\n\nfn test_all_set() {\n\n let attributes = AlgorithmAttributes::from(0xFFFFFFFF);\n\n assert!(\n\n attributes.asymmetric(),\n\n \"'asymmetric' is unexpectedly not set\"\n\n );\n\n assert!(\n\n attributes.symmetric(),\n\n \"'symmetric' is unexpectedly not set\"\n\n );\n\n assert!(attributes.hash(), \"'hash' is unexpectedly not set\");\n\n assert!(attributes.object(), \"'object' is unexpectedly not set\");\n\n assert!(attributes.signing(), \"'signing' is unexpectedly not set\");\n\n assert!(\n\n attributes.encrypting(),\n\n \"'encrypting' is unexpectedly not set\"\n\n );\n\n assert!(attributes.method(), \"'method' is unexpectedly not set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 33, "score": 208092.9740260106 }, { "content": "#[test]\n\nfn test_conversions() {\n\n let expected_tpma_algorithm: TPMA_ALGORITHM = 0x16;\n\n let expected_algorithm_attributes = AlgorithmAttributes(expected_tpma_algorithm);\n\n let actual_algorithm_attributes: AlgorithmAttributes = expected_tpma_algorithm.into();\n\n let actual_tpma_algorithm: TPMA_ALGORITHM = expected_algorithm_attributes.into();\n\n\n\n assert_eq!(\n\n expected_algorithm_attributes, actual_algorithm_attributes,\n\n \"AlgorithmAttributes converted from TPMA_ALGORITHM did not contain expected value\"\n\n );\n\n\n\n assert_eq!(\n\n expected_tpma_algorithm, actual_tpma_algorithm,\n\n \"TPMA_ALGORITHM converted from AlgorithmAttributes did not contain expected value\"\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 34, "score": 208092.9740260106 }, { "content": "#[test]\n\nfn test_hash_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(2));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(attributes.hash(), \"'hash' is unexpectedly not set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 35, "score": 205943.77892249307 }, { "content": "#[test]\n\nfn test_signing_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(8));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(attributes.signing(), \"'signing' is unexpectedly not set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 36, "score": 205943.77892249307 }, { "content": "#[test]\n\nfn test_invalid_locality() {\n\n for locality in 5u8..=31u8 {\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::InvalidParam)),\n\n LocalityAttributesBuilder::new()\n\n .with_locality(locality)\n\n .build(),\n\n \"Locality builder did not produce expected error when using locality {}\",\n\n locality\n\n );\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 37, "score": 205943.77892249307 }, { "content": "#[test]\n\nfn test_object_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(3));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(attributes.object(), \"'object' is unexpectedly not set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 38, "score": 205943.77892249305 }, { "content": "#[test]\n\nfn test_asymmetric_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(0));\n\n assert!(\n\n attributes.asymmetric(),\n\n \"'asymmetric' is unexpectedly not set\"\n\n );\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 39, "score": 205943.77892249307 }, { "content": "#[test]\n\nfn test_encrypting_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(9));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(\n\n attributes.encrypting(),\n\n \"'encrypting' is unexpectedly not set\"\n\n );\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 40, "score": 205943.77892249305 }, { "content": "#[test]\n\nfn test_symmetric_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(1));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(\n\n attributes.symmetric(),\n\n \"'symmetric' is unexpectedly not set\"\n\n );\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 41, "score": 205943.77892249305 }, { "content": "#[test]\n\nfn test_method_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(10));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(attributes.method(), \"'method' is unexpectedly not set\");\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 42, "score": 205943.77892249307 }, { "content": "#[test]\n\nfn test_invalid_locality_combinataions() {\n\n for locality in 0u8..=4u8 {\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::InvalidParam)),\n\n LocalityAttributesBuilder::new()\n\n .with_locality(32)\n\n .with_locality(locality)\n\n .build(),\n\n \"Locality builder did not produce expected error when using locality 32 in combination with locality {}\",\n\n locality,\n\n );\n\n }\n\n\n\n for locality in 32u8..u8::MAX {\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::InvalidParam)),\n\n LocalityAttributesBuilder::new()\n\n .with_locality(1)\n\n .with_locality(locality)\n\n .build(),\n\n \"Locality builder did not produce expected error when using locality 32 in combination with locality {}\",\n\n locality,\n\n );\n\n }\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 43, "score": 203855.02439651501 }, { "content": "#[test]\n\nfn test_builder_valid_extended() {\n\n for expected_locality in 32u8..=u8::MAX {\n\n let locality_attributes = LocalityAttributesBuilder::new()\n\n .with_locality(expected_locality)\n\n .build()\n\n .expect(\"Failed to build locality attributes\");\n\n assert!(\n\n locality_attributes.is_extended(),\n\n \"Locality attributes does not indicate to be 'extnded' as expected\"\n\n );\n\n assert_eq!(\n\n expected_locality,\n\n locality_attributes\n\n .as_extended()\n\n .expect(\"Failed to get local attributes as extended\"),\n\n \"The extended value does not match expected value.\",\n\n );\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 44, "score": 203855.02439651501 }, { "content": "#[test]\n\nfn test_invalid_extended_locality() {\n\n for locality in 0u8..=4u8 {\n\n let locality_attributes = LocalityAttributesBuilder::new()\n\n .with_locality(locality)\n\n .build()\n\n .expect(\"Failed to get local attributes as extended\");\n\n\n\n assert!(\n\n !locality_attributes.is_extended(),\n\n \"The non extended locality {} is unexpectedly indicating that it is extended\",\n\n locality\n\n );\n\n\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::InvalidParam)),\n\n locality_attributes.as_extended(),\n\n \"Calling as_extended() on locality {} that is not extended, did not result in the expected error\", locality,\n\n );\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 45, "score": 203855.02439651501 }, { "content": "#[test]\n\nfn new_context() {\n\n let _context = TctiContext::initialize(crate::tcti_ldr_tests::name_conf()).unwrap();\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/tcti_ldr_tests/tcti_context_tests.rs", "rank": 46, "score": 202805.4090239477 }, { "content": "#[test]\n\nfn test_builder_valid_non_extended() {\n\n let locality_attributes = LocalityAttributesBuilder::new()\n\n .with_localities(&[0, 1, 2, 3, 4])\n\n .build()\n\n .expect(\"Failed to build locality attributes\");\n\n\n\n assert!(\n\n locality_attributes.locality_zero(),\n\n \"Locality ZERO was not properly set\"\n\n );\n\n assert!(\n\n locality_attributes.locality_one(),\n\n \"Locality ONE was not properly set\"\n\n );\n\n assert!(\n\n locality_attributes.locality_two(),\n\n \"Locality TWO was not properly set\"\n\n );\n\n assert!(\n\n locality_attributes.locality_three(),\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/locality_attributes_tests.rs", "rank": 47, "score": 201823.88358208796 }, { "content": "#[test]\n\nfn list() {\n\n let mut context = create_ctx_with_session();\n\n\n\n let nv_index = NvIndexTpmHandle::new(0x01500015).unwrap();\n\n\n\n assert!(!nv::list(&mut context)\n\n .unwrap()\n\n .iter()\n\n .map(|(public, _)| public.nv_index())\n\n .any(|x| x == nv_index));\n\n\n\n let owner_nv_index_handle = write_nv_index(&mut context, nv_index);\n\n\n\n assert!(nv::list(&mut context)\n\n .unwrap()\n\n .iter()\n\n .map(|(public, _)| public.nv_index())\n\n .any(|x| x == nv_index));\n\n\n\n let _ = context\n\n .nv_undefine_space(Provision::Owner, owner_nv_index_handle)\n\n .unwrap();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/nv_tests.rs", "rank": 48, "score": 196941.6307675428 }, { "content": "#[test]\n\nfn test_create_ek() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let _ = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n let _ = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Ecc, None).unwrap();\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ek_tests.rs", "rank": 49, "score": 194407.93968669235 }, { "content": "#[test]\n\nfn read_full() {\n\n let mut context = create_ctx_with_session();\n\n\n\n let nv_index = NvIndexTpmHandle::new(0x01500015).unwrap();\n\n\n\n let owner_nv_index_handle = write_nv_index(&mut context, nv_index);\n\n\n\n // Now read it back\n\n let read_result = nv::read_full(&mut context, NvAuth::Owner, nv_index);\n\n\n\n let _ = context\n\n .nv_undefine_space(Provision::Owner, owner_nv_index_handle)\n\n .unwrap();\n\n\n\n let read_result = read_result.unwrap();\n\n assert_eq!(read_result.len(), 1540);\n\n assert_eq!(read_result[0..7], [1, 2, 3, 4, 5, 6, 7]);\n\n assert_eq!(read_result[1024..1031], [1, 2, 3, 4, 5, 6, 7]);\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/nv_tests.rs", "rank": 50, "score": 194390.47397378852 }, { "content": "/// Returns the NvPublic and Name associated with an NV index TPM handle\n\nfn get_nv_index_info(\n\n context: &mut Context,\n\n nv_index_tpm_handle: NvIndexTpmHandle,\n\n) -> Result<(NvPublic, Name)> {\n\n context\n\n .tr_from_tpm_public(nv_index_tpm_handle.into())\n\n .and_then(|mut object_handle| {\n\n context\n\n .nv_read_public(NvIndexHandle::from(object_handle))\n\n .map_err(|e| {\n\n let _ = context.tr_close(&mut object_handle);\n\n e\n\n })\n\n .and_then(|(nv_public, name)| {\n\n context.tr_close(&mut object_handle)?;\n\n Ok((nv_public, name))\n\n })\n\n })\n\n}\n\n\n", "file_path": "tss-esapi/src/abstraction/nv.rs", "rank": 51, "score": 192266.11077810547 }, { "content": "#[test]\n\nfn test_create_custom_ak() {\n\n struct StClearKeys;\n\n impl KeyCustomization for &StClearKeys {\n\n fn attributes(\n\n &self,\n\n attributes_builder: ObjectAttributesBuilder,\n\n ) -> ObjectAttributesBuilder {\n\n attributes_builder.with_st_clear(true)\n\n }\n\n }\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n let ak_auth = Auth::try_from(vec![0x1, 0x2, 0x42]).unwrap();\n\n // Without customization, no st clear\n\n let att_key_without = ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::RsaPss,\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 52, "score": 192101.86462926134 }, { "content": "fn create_validated_test_parameters(\n\n expected_attest_info: AttestInfo,\n\n expected_attestation_type: AttestationType,\n\n) -> (Attest, TPMS_ATTEST) {\n\n let expected_qualified_signer =\n\n Name::try_from(vec![0x0eu8; 64]).expect(\"Failed to create qualified name\");\n\n let expected_extra_data =\n\n Data::try_from(vec![0x0du8; 64]).expect(\"Failed to create extra data\");\n\n let expected_clock_info = ClockInfo::try_from(TPMS_CLOCK_INFO {\n\n clock: 1u64,\n\n resetCount: 2u32,\n\n restartCount: 3u32,\n\n safe: YesNo::Yes.into(),\n\n })\n\n .expect(\"Failed to create clock info\");\n\n let expected_firmware_version = 1u64;\n\n\n\n let expected_tpms_attest = TPMS_ATTEST {\n\n magic: TPM2_GENERATED_VALUE,\n\n type_: expected_attestation_type.into(),\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/attest_tests.rs", "rank": 53, "score": 192095.45224632474 }, { "content": "#[test]\n\nfn test_conversion() {\n\n let expected_index_name =\n\n Name::try_from(vec![0xf0u8; 68]).expect(\"Failed to create index name\");\n\n let expected_offset = 12u16;\n\n let expected_nv_contents =\n\n MaxNvBuffer::try_from(vec![0xfc; 2048]).expect(\"Failed to create nv contents\");\n\n let expected_tpms_nv_certify_info = TPMS_NV_CERTIFY_INFO {\n\n indexName: expected_index_name.clone().into(),\n\n offset: expected_offset,\n\n nvContents: expected_nv_contents.clone().into(),\n\n };\n\n\n\n let nv_certify_info: NvCertifyInfo = expected_tpms_nv_certify_info\n\n .try_into()\n\n .expect(\"Failed to convert TPMS_NV_CERTIFY_INFO into NvCertifyInfo\");\n\n assert_eq!(\n\n &expected_index_name,\n\n nv_certify_info.index_name(),\n\n \"The NvCertifyInfo converted from TPMS_NV_CERTIFY_INFO did not contain correct value for 'index name'\",\n\n );\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/nv_certify_info_tests.rs", "rank": 54, "score": 192043.6208430187 }, { "content": "#[test]\n\nfn full_test() {\n\n let mut ctx = create_ctx();\n\n for _ in 0..4 {\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n let signature = ctx\n\n .sign(\n\n key.clone(),\n\n key_params,\n\n auth,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap();\n\n let pub_key = ctx\n\n .load_external_public_key(key.public().clone(), key_params)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 55, "score": 191552.76069265328 }, { "content": "#[test]\n\nfn verify() {\n\n let pub_key = vec![\n\n 0x96, 0xDC, 0x72, 0x77, 0x49, 0x82, 0xFD, 0x2D, 0x06, 0x65, 0x8C, 0xE5, 0x3A, 0xCD, 0xED,\n\n 0xBD, 0x50, 0xD7, 0x6F, 0x3B, 0xE5, 0x6A, 0x76, 0xED, 0x3E, 0xD8, 0xF9, 0x93, 0x40, 0x55,\n\n 0x86, 0x6F, 0xBE, 0x76, 0x60, 0xD2, 0x03, 0x23, 0x59, 0x19, 0x8D, 0xFC, 0x51, 0x6A, 0x95,\n\n 0xC8, 0x5D, 0x5A, 0x89, 0x4D, 0xE5, 0xEA, 0x44, 0x78, 0x29, 0x62, 0xDB, 0x3F, 0xF0, 0xF7,\n\n 0x49, 0x15, 0xA5, 0xAE, 0x6D, 0x81, 0x8F, 0x06, 0x7B, 0x0B, 0x50, 0x7A, 0x2F, 0xEB, 0x00,\n\n 0xB6, 0x12, 0xF3, 0x10, 0xAF, 0x4D, 0x4A, 0xA9, 0xD9, 0x81, 0xBB, 0x1E, 0x2B, 0xDF, 0xB9,\n\n 0x33, 0x3D, 0xD6, 0xB7, 0x8D, 0x23, 0x7C, 0x7F, 0xE7, 0x12, 0x48, 0x4F, 0x26, 0x73, 0xAF,\n\n 0x63, 0x51, 0xA9, 0xDB, 0xA4, 0xAB, 0xB7, 0x27, 0x00, 0xD7, 0x1C, 0xFC, 0x2F, 0x61, 0x2A,\n\n 0xB9, 0x5B, 0x66, 0xA0, 0xE0, 0xD8, 0xF3, 0xD9,\n\n ];\n\n\n\n // \"Les carottes sont cuites.\" hashed with SHA256\n\n let digest = Digest::try_from(vec![\n\n 0x02, 0x2b, 0x26, 0xb1, 0xc3, 0x18, 0xdb, 0x73, 0x36, 0xef, 0x6f, 0x50, 0x9c, 0x35, 0xdd,\n\n 0xaa, 0xe1, 0x3d, 0x21, 0xdf, 0x83, 0x68, 0x0f, 0x48, 0xae, 0x5d, 0x8a, 0x5d, 0x37, 0x3c,\n\n 0xc1, 0x05,\n\n ])\n\n .unwrap();\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 56, "score": 191427.17490809938 }, { "content": "#[test]\n\nfn test_create_ak_rsa_ecc() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n if ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::Sm2,\n\n None,\n\n None,\n\n )\n\n .is_ok()\n\n {\n\n // We can't use unwrap_err because that requires Debug on the T\n\n panic!(\"Should have errored\");\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 57, "score": 189872.69281904187 }, { "content": "#[test]\n\nfn test_create_ak_rsa_rsa() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::RsaPss,\n\n None,\n\n None,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 58, "score": 189872.69281904187 }, { "content": "#[test]\n\nfn ctx_migration_test() {\n\n // Create two key contexts using `Context`, one for an RSA keypair,\n\n // one for just the public part of the key\n\n let mut basic_ctx = crate::common::create_ctx_with_session();\n\n let random_digest = basic_ctx.get_random(16).unwrap();\n\n let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap();\n\n let prim_key_handle = basic_ctx\n\n .create_primary(\n\n Hierarchy::Owner,\n\n &create_restricted_decryption_rsa_public(\n\n SymmetricDefinitionObject::AES_256_CFB,\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::create(0).unwrap(),\n\n )\n\n .unwrap(),\n\n Some(&key_auth),\n\n None,\n\n None,\n\n None,\n\n )\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 59, "score": 189334.13256178916 }, { "content": "#[test]\n\nfn full_ecc_test() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n for _ in 0..4 {\n\n let (key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n let signature = ctx\n\n .sign(\n\n key.clone(),\n\n key_params,\n\n auth,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 60, "score": 189334.13256178916 }, { "content": "#[test]\n\nfn encrypt_decrypt() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::Oaep, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (dec_key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n let enc_key = ctx\n\n .load_external_public_key(dec_key.public().clone(), key_params)\n\n .unwrap();\n\n let message = vec![0x1, 0x2, 0x3];\n\n\n\n let ciphertext = ctx\n\n .rsa_encrypt(\n\n enc_key,\n\n key_params,\n\n None,\n\n PublicKeyRsa::try_from(message.clone()).unwrap(),\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 61, "score": 189057.95269045516 }, { "content": "#[test]\n\nfn activate_credential() {\n\n // create a Transient key context, generate a key and\n\n // obtain the Make Credential parameters\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n\n let make_cred_params = ctx.get_make_cred_params(obj.clone(), None).unwrap();\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 62, "score": 189057.95269045516 }, { "content": "#[test]\n\nfn sign_with_no_auth() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, _) = ctx.create_key(key_params, 16).unwrap();\n\n ctx.sign(\n\n key,\n\n key_params,\n\n None,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 63, "score": 189057.95269045516 }, { "content": "#[test]\n\nfn test_nv_into_tpm_type_conversions() {\n\n let expected_tpms_nv_certify_info = TPMS_NV_CERTIFY_INFO {\n\n indexName: Name::try_from(vec![0xf0u8; 68])\n\n .expect(\"Failed to create index name\")\n\n .into(),\n\n offset: 12u16,\n\n nvContents: MaxNvBuffer::try_from(vec![0xfc; 2048])\n\n .expect(\"Failed to create nv contents\")\n\n .into(),\n\n };\n\n\n\n let tpmu_attest: TPMU_ATTEST = AttestInfo::Nv {\n\n info: expected_tpms_nv_certify_info\n\n .try_into()\n\n .expect(\"Failed to convert TPMS_NV_CERTIFY_INFO into NvCertifyInfo\"),\n\n }\n\n .into();\n\n\n\n let actual_tpms_nv_certify_info = unsafe { &tpmu_attest.nv };\n\n\n\n crate::common::ensure_tpms_nv_certify_info_equality(\n\n &expected_tpms_nv_certify_info,\n\n actual_tpms_nv_certify_info,\n\n );\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/attest_info_test.rs", "rank": 64, "score": 187660.23765320762 }, { "content": "#[test]\n\nfn sign_with_bad_auth() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, key_auth) = ctx.create_key(key_params, 16).unwrap();\n\n let auth_value = key_auth.unwrap();\n\n let mut bad_auth_values = auth_value.value().to_vec();\n\n bad_auth_values[6..10].copy_from_slice(&[0xDE, 0xAD, 0xBE, 0xEF]);\n\n ctx.sign(\n\n key,\n\n key_params,\n\n Some(Auth::try_from(bad_auth_values).unwrap()),\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 65, "score": 186767.42931513462 }, { "content": "#[test]\n\nfn wrong_auth_size() {\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .with_root_key_auth_size(33)\n\n .build()\n\n .unwrap_err(),\n\n Error::WrapperError(ErrorKind::WrongParamSize)\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 66, "score": 186767.42931513462 }, { "content": "#[test]\n\nfn verify_wrong_digest() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n\n\n let signature = ctx\n\n .sign(\n\n key.clone(),\n\n key_params,\n\n auth,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap();\n\n let pub_key = ctx\n\n .load_external_public_key(key.public().clone(), key_params)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 67, "score": 186767.42931513462 }, { "content": "#[test]\n\nfn wrong_key_sizes() {\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .with_root_key_size(1023)\n\n .build()\n\n .unwrap_err(),\n\n Error::WrapperError(ErrorKind::InvalidParam)\n\n );\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .with_root_key_size(1025)\n\n .build()\n\n .unwrap_err(),\n\n Error::WrapperError(ErrorKind::InvalidParam)\n\n );\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 68, "score": 186767.42931513462 }, { "content": "#[test]\n\nfn verify_wrong_key() {\n\n let mut ctx = create_ctx();\n\n let key_params1 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key1, auth1) = ctx.create_key(key_params1, 16).unwrap();\n\n\n\n let key_params2 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key2, _) = ctx.create_key(key_params2, 16).unwrap();\n\n\n\n // Sign with the first key\n\n let signature = ctx\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 69, "score": 186767.42931513462 }, { "content": "#[test]\n\nfn load_with_invalid_params() {\n\n let pub_key = vec![\n\n 0x96, 0xDC, 0x72, 0x77, 0x49, 0x82, 0xFD, 0x2D, 0x06, 0x65, 0x8C, 0xE5, 0x3A, 0xCD, 0xED,\n\n 0xBD, 0x50, 0xD7, 0x6F, 0x3B, 0xE5, 0x6A, 0x76, 0xED, 0x3E, 0xD8, 0xF9, 0x93, 0x40, 0x55,\n\n 0x86, 0x6F, 0xBE, 0x76, 0x60, 0xD2, 0x03, 0x23, 0x59, 0x19, 0x8D, 0xFC, 0x51, 0x6A, 0x95,\n\n 0xC8, 0x5D, 0x5A, 0x89, 0x4D, 0xE5, 0xEA, 0x44, 0x78, 0x29, 0x62, 0xDB, 0x3F, 0xF0, 0xF7,\n\n 0x49, 0x15, 0xA5, 0xAE, 0x6D, 0x81, 0x8F, 0x06, 0x7B, 0x0B, 0x50, 0x7A, 0x2F, 0xEB, 0x00,\n\n 0xB6, 0x12, 0xF3, 0x10, 0xAF, 0x4D, 0x4A, 0xA9, 0xD9, 0x81, 0xBB, 0x1E, 0x2B, 0xDF, 0xB9,\n\n 0x33, 0x3D, 0xD6, 0xB7, 0x8D, 0x23, 0x7C, 0x7F, 0xE7, 0x12, 0x48, 0x4F, 0x26, 0x73, 0xAF,\n\n 0x63, 0x51, 0xA9, 0xDB, 0xA4, 0xAB, 0xB7, 0x27, 0x00, 0xD7, 0x1C, 0xFC, 0x2F, 0x61, 0x2A,\n\n 0xB9, 0x5B, 0x66, 0xA0, 0xE0, 0xD8, 0xF3, 0xD9,\n\n ];\n\n\n\n let key_params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n let mut ctx = create_ctx();\n\n let _ = ctx\n\n .load_external_public_key(PublicKey::Rsa(pub_key), key_params)\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 70, "score": 186767.42931513462 }, { "content": "#[test]\n\nfn rsa_exponent_create_test() {\n\n let expected_error = Err(Error::WrapperError(WrapperErrorKind::InvalidParam));\n\n // Valid values for RsaExponent are only 0 or a prime number value larger then 2.\n\n assert_eq!(expected_error, RsaExponent::create(1));\n\n\n\n // The specification says that 0 or any prime number larger then 2 should be accepted.\n\n let _ = RsaExponent::create(0).expect(\"Failed to create a RsaExponent from the value 0\");\n\n let _ = RsaExponent::create(5).expect(\"Failed to create a RsaExponent from the value 5\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_exponent_tests.rs", "rank": 71, "score": 185583.36251122912 }, { "content": "#[test]\n\nfn load_bad_sized_key() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa1024,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let _ = ctx\n\n .load_external_public_key(PublicKey::Rsa(vec![0xDE, 0xAD, 0xBE, 0xEF]), key_params)\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 72, "score": 184551.42202721586 }, { "content": "#[test]\n\nfn make_cred_params_name() {\n\n // create a Transient key context, generate a key and\n\n // obtain the Make Credential parameters\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n\n let make_cred_params = ctx.get_make_cred_params(obj, None).unwrap();\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 73, "score": 184551.42202721586 }, { "content": "#[test]\n\nfn activate_credential_wrong_key() {\n\n // create a Transient key context, generate two keys and\n\n // obtain the Make Credential parameters for the first one\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n // \"Good\" key (for which the credential will be generated)\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 74, "score": 184551.42202721586 }, { "content": "#[test]\n\nfn activate_credential_wrong_data() {\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n // \"Good\" key (for which the credential will be generated)\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n\n\n\n // No data (essentially wrong size)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 75, "score": 184551.42202721586 }, { "content": "#[test]\n\nfn two_signatures_different_digest() {\n\n let mut ctx = create_ctx();\n\n let key_params1 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key1, auth1) = ctx.create_key(key_params1, 16).unwrap();\n\n let key_params2 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key2, auth2) = ctx.create_key(key_params2, 16).unwrap();\n\n let signature1 = ctx\n\n .sign(\n\n key1,\n\n key_params1,\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 76, "score": 184551.42202721586 }, { "content": "#[test]\n\nfn test_attest_with_nv_creation_info_into_tpm_type_conversions() {\n\n let expected_index_name =\n\n Name::try_from(vec![0xf0u8; 68]).expect(\"Failed to create index name\");\n\n let expected_offset = 12u16;\n\n let expected_nv_contents =\n\n MaxNvBuffer::try_from(vec![0xfc; 2048]).expect(\"Failed to create nv contents\");\n\n let expected_attest_info = AttestInfo::Nv {\n\n info: TPMS_NV_CERTIFY_INFO {\n\n indexName: expected_index_name.clone().into(),\n\n offset: expected_offset,\n\n nvContents: expected_nv_contents.clone().into(),\n\n }\n\n .try_into()\n\n .expect(\"Failed to convert TPMS_NV_CERTIFY_INFO to NvCreationInfo\"),\n\n };\n\n\n\n let (attest, expected_tpms_attest) =\n\n create_validated_test_parameters(expected_attest_info, AttestationType::Nv);\n\n\n\n if let AttestInfo::Nv { info } = attest.attested() {\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/attest_tests.rs", "rank": 77, "score": 183552.68676484833 }, { "content": "fn get_ek_object_public(context: &mut crate::Context) -> Result<PublicKey> {\n\n let key_handle = ek::create_ek_object(context, AsymmetricAlgorithm::Rsa, None)?;\n\n let (attesting_key_pub, _, _) = context.read_public(key_handle).or_else(|e| {\n\n context.flush_context(key_handle.into())?;\n\n Err(e)\n\n })?;\n\n context.flush_context(key_handle.into())?;\n\n\n\n PublicKey::try_from(attesting_key_pub)\n\n}\n", "file_path": "tss-esapi/src/abstraction/transient/key_attestation.rs", "rank": 78, "score": 182078.51948473224 }, { "content": "#[allow(dead_code)]\n\npub fn setup_logging() {\n\n LOG_INIT.call_once(|| {\n\n env_logger::init();\n\n });\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 79, "score": 181694.7366555589 }, { "content": "#[allow(dead_code)]\n\npub fn get_pcr_policy_digest(\n\n context: &mut Context,\n\n mangle: bool,\n\n do_trial: bool,\n\n) -> (Digest, PolicySession) {\n\n let old_ses = context.sessions();\n\n context.clear_sessions();\n\n\n\n // Read the pcr values using pcr_read\n\n let pcr_selection_list = PcrSelectionListBuilder::new()\n\n .with_selection(HashingAlgorithm::Sha256, &[PcrSlot::Slot0, PcrSlot::Slot1])\n\n .build();\n\n\n\n let (_update_counter, pcr_selection_list_out, pcr_data) = context\n\n .pcr_read(&pcr_selection_list)\n\n .map(|(update_counter, read_pcr_selections, read_pcr_digests)| {\n\n (\n\n update_counter,\n\n read_pcr_selections.clone(),\n\n PcrData::create(&read_pcr_selections, &read_pcr_digests)\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 80, "score": 176167.58728605122 }, { "content": "/// Retrieve the Endorsement Key public certificate from the TPM\n\npub fn retrieve_ek_pubcert(context: &mut Context, alg: AsymmetricAlgorithm) -> Result<Vec<u8>> {\n\n let nv_idx = match alg {\n\n AsymmetricAlgorithm::Rsa => RSA_2048_EK_CERTIFICATE_NV_INDEX,\n\n AsymmetricAlgorithm::Ecc => ECC_P256_EK_CERTIFICATE_NV_INDEX,\n\n AsymmetricAlgorithm::Null => {\n\n // TDOD: Figure out what to with Null.\n\n return Err(Error::local_error(WrapperErrorKind::UnsupportedParam));\n\n }\n\n };\n\n\n\n let nv_idx = NvIndexTpmHandle::new(nv_idx).unwrap();\n\n\n\n let nv_auth_handle = TpmHandle::NvIndex(nv_idx);\n\n let nv_auth_handle = context.execute_without_session(|ctx| {\n\n ctx.tr_from_tpm_public(nv_auth_handle)\n\n .map(|v| NvAuth::NvIndex(v.into()))\n\n })?;\n\n\n\n context.execute_with_nullauth_session(|ctx| nv::read_full(ctx, nv_auth_handle, nv_idx))\n\n}\n", "file_path": "tss-esapi/src/abstraction/ek.rs", "rank": 81, "score": 172958.70892368903 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nmod algorithm_attributes_tests;\n\nmod locality_attributes_tests;\n\nmod nv_index_attributes_tests;\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/mod.rs", "rank": 82, "score": 172880.2025688031 }, { "content": "#[allow(dead_code)]\n\npub fn signing_key_pub() -> Public {\n\n utils::create_unrestricted_signing_rsa_public(\n\n RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::default(),\n\n )\n\n .expect(\"Failed to create an unrestricted signing rsa public structure\")\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 83, "score": 172445.98382290086 }, { "content": "#[allow(dead_code)]\n\npub fn decryption_key_pub() -> Public {\n\n utils::create_restricted_decryption_rsa_public(\n\n Cipher::aes_256_cfb()\n\n .try_into()\n\n .expect(\"Failed to create symmetric object\"),\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::default(),\n\n )\n\n .expect(\"Failed to create a restricted decryption rsa public structure\")\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 84, "score": 172445.98382290086 }, { "content": "mod general_esys_tr_tests;\n\nmod tpm_commands;\n", "file_path": "tss-esapi/tests/integration_tests/context_tests/mod.rs", "rank": 85, "score": 172310.6467529752 }, { "content": "#[allow(dead_code)]\n\npub fn name_conf() -> TctiNameConf {\n\n match env::var(\"TEST_TCTI\") {\n\n Err(_) => TctiNameConf::Mssim(Default::default()),\n\n Ok(tctistr) => TctiNameConf::from_str(&tctistr).expect(\"Error parsing TEST_TCTI\"),\n\n }\n\n}\n\n\n\nmod tcti_context_tests;\n\nmod tcti_info_tests;\n", "file_path": "tss-esapi/tests/integration_tests/tcti_ldr_tests/mod.rs", "rank": 86, "score": 171579.65837432072 }, { "content": "#[allow(dead_code)]\n\npub fn encryption_decryption_key_pub() -> Public {\n\n utils::create_unrestricted_encryption_decryption_rsa_public(\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::default(),\n\n )\n\n .expect(\"Failed to create an unrestricted encryption decryption rsa public structure\")\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 87, "score": 169828.40316629186 }, { "content": "// Copyright 2020 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nuse tss_esapi::{\n\n constants::{\n\n tss::{\n\n TPM2_NT_BITS, TPM2_NT_COUNTER, TPM2_NT_EXTEND, TPM2_NT_ORDINARY, TPM2_NT_PIN_FAIL,\n\n TPM2_NT_PIN_PASS,\n\n },\n\n NvIndexType,\n\n },\n\n tss2_esys::TPM2_NT,\n\n};\n\n\n\nuse std::convert::{From, TryFrom};\n\n\n\n#[test]\n", "file_path": "tss-esapi/tests/integration_tests/constants_tests/nv_index_type_tests.rs", "rank": 88, "score": 168190.13532358815 }, { "content": "/// Create the [Public] structure for an ECC unrestricted signing key.\n\n///\n\n/// * `scheme` - Asymmetric scheme to be used for signing; *must* be an RSA signing scheme\n\n/// * `curve` - identifier of the precise curve to be used with the key\n\npub fn create_unrestricted_signing_ecc_public(\n\n scheme: EccScheme,\n\n curve: EccCurve,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(false)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Ecc)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_ecc_parameters(\n\n PublicEccParametersBuilder::new_unrestricted_signing_key(scheme, curve).build()?,\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 89, "score": 167194.06686544043 }, { "content": "/// Create the [Public] structure for an RSA unrestricted signing key.\n\n///\n\n/// * `scheme` - RSA scheme to be used for signing\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\npub fn create_unrestricted_signing_rsa_public(\n\n scheme: RsaScheme,\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(false)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_rsa_parameters(\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 90, "score": 167192.64434738248 }, { "content": "/// Create the [Public] structure for a restricted decryption key.\n\n///\n\n/// * `symmetric` - Cipher to be used for decrypting children of the key\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\npub fn create_restricted_decryption_rsa_public(\n\n symmetric: SymmetricDefinitionObject,\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(true)\n\n .with_sign_encrypt(false)\n\n .with_restricted(true)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_rsa_parameters(\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 91, "score": 167192.64434738248 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nmod asymmetric_primitives_tests;\n\nmod attached_components_tests;\n\nmod attestation_commands_tests;\n\nmod authenticated_countdown_timer_tests;\n\nmod capability_commands_tests;\n\nmod clocks_and_timers_tests;\n\nmod command_audit_tests;\n\nmod context_management_tests;\n\nmod dictionary_attack_functions_tests;\n\nmod duplication_commands_tests;\n\nmod enhanced_authorization_ea_commands_tests;\n\nmod ephemeral_ec_keys_tests;\n\nmod field_upgrade_tests;\n\nmod hash_hmac_event_sequences_tests;\n\nmod hierarchy_commands_tests;\n\nmod integrity_collection_pcr_tests;\n\nmod miscellaneous_management_functions_tests;\n\nmod non_volatile_storage_tests;\n\nmod object_commands_tests;\n\nmod random_number_generator_tests;\n\nmod session_commands_tests;\n\nmod signing_and_signature_verification_tests;\n\nmod startup_tests;\n\nmod symmetric_primitives_tests;\n\nmod testing_tests;\n\nmod vendor_specific_tests;\n", "file_path": "tss-esapi/tests/integration_tests/context_tests/tpm_commands/mod.rs", "rank": 92, "score": 166681.96402198583 }, { "content": "#[test]\n\nfn bindgen_test_layout_TPMS_CONTEXT() {\n\n assert_eq!(\n\n ::std::mem::size_of::<TPMS_CONTEXT>(),\n\n 5208usize,\n\n concat!(\"Size of: \", stringify!(TPMS_CONTEXT))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<TPMS_CONTEXT>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(TPMS_CONTEXT))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<TPMS_CONTEXT>())).sequence as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(TPMS_CONTEXT),\n\n \"::\",\n\n stringify!(sequence)\n\n )\n", "file_path": "tss-esapi-sys/src/bindings/x86_64-unknown-darwin.rs", "rank": 93, "score": 165307.29476588254 }, { "content": "/// Create the [Public] structure for an RSA unrestricted signing key.\n\n///\n\n/// * `scheme` - RSA scheme to be used for signing\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\n/// * `rsa_public_key` - The public part of the RSA key that is going to be used as unique identifier.\n\npub fn create_unrestricted_signing_rsa_public_with_unique(\n\n scheme: RsaScheme,\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n rsa_public_key: &PublicKeyRsa,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(false)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 94, "score": 164281.96447044838 }, { "content": "/// Create the [Public] structure for an unrestricted encryption/decryption key.\n\n///\n\n/// * `symmetric` - Cipher to be used for decrypting children of the key\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\npub fn create_unrestricted_encryption_decryption_rsa_public(\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(true)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_rsa_parameters(\n\n PublicRsaParametersBuilder::new()\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 95, "score": 164281.42207349208 }, { "content": "pub fn ensure_tpms_nv_certify_info_equality(\n\n expected: &TPMS_NV_CERTIFY_INFO,\n\n actual: &TPMS_NV_CERTIFY_INFO,\n\n) {\n\n ensure_sized_buffer_field_equality!(expected, actual, indexName, name, TPM2B_NAME);\n\n assert_eq!(\n\n expected.offset, actual.offset,\n\n \"'offset' value in TPMS_NV_CERTIFY_INFO, mismatch between actual and expected\",\n\n );\n\n ensure_sized_buffer_field_equality!(expected, actual, nvContents, buffer, TPM2B_MAX_NV_BUFFER);\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/tpms_types_equality_checks.rs", "rank": 96, "score": 163961.89794328736 }, { "content": " ///\n\n /// # Arguments\n\n /// * `set` - `true` indicates that the attribute should have the value SET.\n\n /// `false`indicates that the attribute should have the value CLEAR.\n\n pub fn with_written(mut self, set: bool) -> Self {\n\n self.nv_index_attributes.set_written(set);\n\n self\n\n }\n\n\n\n /// Controls the `platform create` attribute\n\n ///\n\n /// # Arguments\n\n /// * `set` - `true` indicates that the attribute should have the value SET.\n\n /// `false`indicates that the attribute should have the value CLEAR.\n\n pub fn with_platform_create(mut self, set: bool) -> Self {\n\n self.nv_index_attributes.set_platform_create(set);\n\n self\n\n }\n\n\n\n /// Controls the `read stclear` attribute\n", "file_path": "tss-esapi/src/attributes/nv_index.rs", "rank": 97, "score": 163319.55175668417 }, { "content": " pub platform_create, _: 30;\n\n _, set_platform_create: 30;\n\n pub read_stclear, _: 31;\n\n _, set_read_stclear: 31;\n\n}\n\n\n\nimpl NvIndexAttributes {\n\n /// Returns the `NvIndexType` of the `NvIndexAttributes`\n\n pub fn index_type(&self) -> Result<NvIndexType> {\n\n NvIndexType::try_from(self.tss_index_type())\n\n }\n\n\n\n /// Validates the attributes\n\n ///\n\n /// # Details\n\n /// Performs checks on `self` in order to verify\n\n /// that the attributes conforms to the requirements\n\n /// specified in the standard.\n\n ///\n\n /// # Errors\n", "file_path": "tss-esapi/src/attributes/nv_index.rs", "rank": 98, "score": 163316.37883517393 }, { "content": " ///\n\n /// # Arguments\n\n /// * `set` - `true` indicates that the attribute should have the value SET.\n\n /// `false`indicates that the attribute should have the value CLEAR.\n\n pub fn with_read_stclear(mut self, set: bool) -> Self {\n\n self.nv_index_attributes.set_read_stclear(set);\n\n self\n\n }\n\n\n\n /// Builds the nv index attributes.\n\n ///\n\n /// # Errors\n\n /// Returns an error if some attributes are missing\n\n /// or are in conflict with each other.\n\n pub fn build(self) -> Result<NvIndexAttributes> {\n\n self.nv_index_attributes.validate()?;\n\n Ok(self.nv_index_attributes)\n\n }\n\n}\n\n\n\nimpl Default for NvIndexAttributesBuilder {\n\n fn default() -> Self {\n\n NvIndexAttributesBuilder::new()\n\n }\n\n}\n", "file_path": "tss-esapi/src/attributes/nv_index.rs", "rank": 99, "score": 163314.73746388135 } ]
Rust
src/syntax/src/statement.rs
isaacazuelos/kurt
f86c3230d9d0cd54b307eda27e3ddbb4c145d1f0
use parser::Parse; use crate::lexer::{Reserved, TokenKind}; use super::*; #[derive(Debug)] pub enum Statement<'a> { Binding(Binding<'a>), Empty(Span), Expression(Expression<'a>), If(IfOnly<'a>), } impl<'a> Syntax for Statement<'a> { fn span(&self) -> Span { match self { Statement::Binding(b) => b.span(), Statement::Empty(s) => *s, Statement::Expression(s) => s.span(), Statement::If(i) => i.span(), } } } impl<'a> Parse<'a> for Statement<'a> { type SyntaxError = SyntaxError; fn parse_with(parser: &mut Parser<'a>) -> SyntaxResult<Statement<'a>> { match parser.peek_kind() { Some(TokenKind::Semicolon) => { Ok(Statement::Empty(parser.next_span())) } Some(TokenKind::Reserved(Reserved::Var | Reserved::Let)) => { Ok(Statement::Binding(parser.parse()?)) } Some(TokenKind::Reserved(Reserved::If)) => { let if_only: IfOnly = parser.parse()?; if parser.peek_kind() == Some(TokenKind::Reserved(Reserved::Else)) { let if_else = if_only.expand_with_else(parser)?; Ok(Statement::Expression(Expression::If(if_else))) } else { Ok(Statement::If(if_only)) } } Some(_) => Ok(Statement::Expression(parser.parse()?)), None => Err(Error::EOF(parser.eof_span())), } } } #[cfg(test)] mod parser_tests { use super::*; #[test] fn parse_expression_literal() { let mut parser = Parser::new("0").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!( literal, Ok(Statement::Expression(Expression::Literal(_))) )); assert!(parser.is_empty()); } #[test] fn parse_empty() { let mut parser = Parser::new(";").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!(literal, Ok(Statement::Empty(_)))); assert!(!parser.is_empty()); } #[test] fn parse_binding() { let mut parser = Parser::new("let x = 1;").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!(literal, Ok(Statement::Binding(_)))); assert!(!parser.is_empty()); } #[test] fn parse_expression_with_semicolon() { let mut parser = Parser::new("0;").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!( literal, Ok(Statement::Expression(Expression::Literal(_))) )); assert!(!parser.is_empty()); } #[test] fn parse_if_only() { let mut parser = Parser::new("if true { }").unwrap(); let syntax = parser.parse::<Statement>(); assert!( matches!(syntax, Ok(Statement::If(_))), "expected If statement, but got {:#?}", syntax ); assert!(parser.is_empty()); } #[test] fn parse_if_else() { let mut parser = Parser::new("if true { } else { }").unwrap(); let syntax = parser.parse::<Statement>(); assert!( matches!(syntax, Ok(Statement::Expression(Expression::If(_)))), "expected If expression, but got {:#?}", syntax ); assert!(parser.is_empty()); } }
use parser::Parse; use crate::lexer::{Reserved, TokenKind}; use super::*; #[derive(Debug)] pub enum Statement<'a> { Binding(Binding<'a>), Empty(Span), Expression(Expression<'a>), If(IfOnly<'a>), } impl<'a> Syntax for Statement<'a> {
} impl<'a> Parse<'a> for Statement<'a> { type SyntaxError = SyntaxError; fn parse_with(parser: &mut Parser<'a>) -> SyntaxResult<Statement<'a>> { match parser.peek_kind() { Some(TokenKind::Semicolon) => { Ok(Statement::Empty(parser.next_span())) } Some(TokenKind::Reserved(Reserved::Var | Reserved::Let)) => { Ok(Statement::Binding(parser.parse()?)) } Some(TokenKind::Reserved(Reserved::If)) => { let if_only: IfOnly = parser.parse()?; if parser.peek_kind() == Some(TokenKind::Reserved(Reserved::Else)) { let if_else = if_only.expand_with_else(parser)?; Ok(Statement::Expression(Expression::If(if_else))) } else { Ok(Statement::If(if_only)) } } Some(_) => Ok(Statement::Expression(parser.parse()?)), None => Err(Error::EOF(parser.eof_span())), } } } #[cfg(test)] mod parser_tests { use super::*; #[test] fn parse_expression_literal() { let mut parser = Parser::new("0").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!( literal, Ok(Statement::Expression(Expression::Literal(_))) )); assert!(parser.is_empty()); } #[test] fn parse_empty() { let mut parser = Parser::new(";").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!(literal, Ok(Statement::Empty(_)))); assert!(!parser.is_empty()); } #[test] fn parse_binding() { let mut parser = Parser::new("let x = 1;").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!(literal, Ok(Statement::Binding(_)))); assert!(!parser.is_empty()); } #[test] fn parse_expression_with_semicolon() { let mut parser = Parser::new("0;").unwrap(); let literal = parser.parse::<Statement>(); assert!(matches!( literal, Ok(Statement::Expression(Expression::Literal(_))) )); assert!(!parser.is_empty()); } #[test] fn parse_if_only() { let mut parser = Parser::new("if true { }").unwrap(); let syntax = parser.parse::<Statement>(); assert!( matches!(syntax, Ok(Statement::If(_))), "expected If statement, but got {:#?}", syntax ); assert!(parser.is_empty()); } #[test] fn parse_if_else() { let mut parser = Parser::new("if true { } else { }").unwrap(); let syntax = parser.parse::<Statement>(); assert!( matches!(syntax, Ok(Statement::Expression(Expression::If(_)))), "expected If expression, but got {:#?}", syntax ); assert!(parser.is_empty()); } }
fn span(&self) -> Span { match self { Statement::Binding(b) => b.span(), Statement::Empty(s) => *s, Statement::Expression(s) => s.span(), Statement::If(i) => i.span(), } }
function_block-full_function
[]
Rust
component/src/desktop_window.rs
rohankumardubey/makepad
2248d20bd8e1431616acc89846a841abc36dffb7
use makepad_render::*; use crate::desktop_button::*; use crate::window_menu::*; use crate::button_logic::*; live_register!{ use crate::theme::*; DesktopWindow: {{DesktopWindow}} { clear_color: (COLOR_WINDOW_BG) caption_bg: {color: (COLOR_WINDOW_CAPTION)} caption: "Desktop Window", main_view:{}, border_fill: {color: (COLOR_WINDOW_CAPTION)}, inner_view:{ }, caption_layout:{ padding:{t:2.0} align: {fx: 0.5, fy: 0.5}, walk: { width: Width::Filled, height: Height::Fixed(26.), } } caption_view: { layout: { walk: { width: Width::Filled, height: Height::Computed }, } } } } #[derive(Live, LiveHook)] pub struct DesktopWindow { #[rust] pub caption_size: Vec2, pass: Pass, color_texture: Texture, depth_texture: Texture, window: Window, main_view: View, caption_view: View, inner_view: View, caption_layout: Layout, clear_color: Vec4, min_btn: DesktopButton, max_btn: DesktopButton, close_btn: DesktopButton, xr_btn: DesktopButton, fullscreen_btn: DesktopButton, caption_text: DrawText, caption_bg: DrawColor, caption: String, border_fill: DrawColor, #[rust(WindowMenu::new(cx))] pub window_menu: WindowMenu, #[rust(Menu::main(vec![ Menu::sub("App", vec![ ]), ]))] default_menu: Menu, #[rust] pub last_menu: Option<Menu>, #[rust] pub inner_over_chrome: bool, } #[derive(Clone, PartialEq)] pub enum DesktopWindowEvent { EventForOtherWindow, WindowClosed, WindowGeomChange(WindowGeomChangeEvent), None } impl DesktopWindow { pub fn handle_event(&mut self, cx: &mut Cx, event: &mut Event) -> DesktopWindowEvent { if let ButtonAction::WasClicked = self.xr_btn.handle_desktop_button(cx, event) { if self.window.xr_is_presenting(cx) { self.window.xr_stop_presenting(cx); } else { self.window.xr_start_presenting(cx); } } if let ButtonAction::WasClicked = self.fullscreen_btn.handle_desktop_button(cx, event) { if self.window.is_fullscreen(cx) { self.window.normal(cx); } else { self.window.fullscreen(cx); } } if let ButtonAction::WasClicked = self.min_btn.handle_desktop_button(cx, event) { self.window.minimize(cx); } if let ButtonAction::WasClicked = self.max_btn.handle_desktop_button(cx, event) { if self.window.is_fullscreen(cx) { self.window.restore(cx); } else { self.window.maximize(cx); } } if let ButtonAction::WasClicked = self.close_btn.handle_desktop_button(cx, event) { self.window.close(cx); } let is_for_other_window = match event { Event::WindowCloseRequested(ev) => ev.window_id != self.window.window_id, Event::WindowClosed(ev) => { if ev.window_id == self.window.window_id { return DesktopWindowEvent::WindowClosed } true } Event::WindowGeomChange(ev) => { if ev.window_id == self.window.window_id { return DesktopWindowEvent::WindowGeomChange(ev.clone()) } true }, Event::WindowDragQuery(dq) => { if dq.window_id == self.window.window_id { if dq.abs.x < self.caption_size.x && dq.abs.y < self.caption_size.y { if dq.abs.x < 50. { dq.response = WindowDragQueryResponse::SysMenu; } else { dq.response = WindowDragQueryResponse::Caption; } } } true } Event::FingerDown(ev) => ev.window_id != self.window.window_id, Event::FingerMove(ev) => ev.window_id != self.window.window_id, Event::FingerHover(ev) => ev.window_id != self.window.window_id, Event::FingerUp(ev) => ev.window_id != self.window.window_id, Event::FingerScroll(ev) => ev.window_id != self.window.window_id, _ => false }; if is_for_other_window { DesktopWindowEvent::EventForOtherWindow } else { DesktopWindowEvent::None } } pub fn begin(&mut self, cx: &mut Cx, menu: Option<&Menu>) -> ViewRedraw { if !self.main_view.view_will_redraw(cx) { return Err(()) } self.window.begin(cx); self.pass.begin(cx); self.pass.add_color_texture(cx, &self.color_texture, PassClearColor::ClearWith(self.clear_color)); self.pass.set_depth_texture(cx, &self.depth_texture, PassClearDepth::ClearWith(1.0)); self.main_view.begin(cx).unwrap(); /*self.caption_view.set_layout(cx, Layout { walk: Walk::wh(Width::Filled, Height::Computed), ..Layout::default() });*/ if self.caption_view.begin(cx).is_ok() { let process_chrome = match cx.platform_type { PlatformType::Linux {custom_window_chrome} => custom_window_chrome, _ => true }; if process_chrome { match cx.platform_type { PlatformType::MsWindows | PlatformType::Unknown | PlatformType::Linux {..} => { self.caption_bg.begin(cx, Layout { align: Align {fx: 1.0, fy: 0.0}, walk: Walk::wh(Width::Filled, Height::Computed), ..Default::default() }); if let Some(_menu) = menu { } self.min_btn.draw_desktop_button(cx, DesktopButtonType::WindowsMin); if self.window.is_fullscreen(cx) { self.max_btn.draw_desktop_button(cx, DesktopButtonType::WindowsMaxToggled); } else { self.max_btn.draw_desktop_button(cx, DesktopButtonType::WindowsMax); } self.close_btn.draw_desktop_button(cx, DesktopButtonType::WindowsClose); cx.change_turtle_align_x_cab(0.5); cx.compute_turtle_height(); cx.change_turtle_align_y_cab(0.5); cx.reset_turtle_pos(); cx.move_turtle(50., 0.); self.caption_size = Vec2 {x: cx.get_width_left(), y: cx.get_height_left()}; self.caption_text.draw_walk(cx, &self.caption); self.caption_bg.end(cx); cx.turtle_new_line(); }, PlatformType::OSX => { if let Some(menu) = menu { cx.update_menu(menu); } else { cx.update_menu(&self.default_menu); } self.caption_bg.begin(cx, self.caption_layout); self.caption_size = Vec2 {x: cx.get_width_left(), y: cx.get_height_left()}; self.caption_text.draw_walk(cx, &self.caption); self.caption_bg.end(cx); cx.turtle_new_line(); }, PlatformType::WebBrowser {..} => { if self.window.is_fullscreen(cx) { self.caption_bg.begin(cx, Layout { align: Align {fx: 0.5, fy: 0.5}, walk: Walk::wh(Width::Filled, Height::Fixed(22.)), ..Default::default() }); self.caption_bg.end(cx); cx.turtle_new_line(); } } } } self.caption_view.end(cx); } cx.turtle_new_line(); if self.inner_view.begin(cx).is_ok(){ return Ok(()) } self.end_inner(cx, true); Err(()) } pub fn end(&mut self, cx: &mut Cx) { self.end_inner(cx, false); } fn end_inner(&mut self, cx: &mut Cx, no_inner:bool) { if !no_inner{ self.inner_view.end(cx); } if !cx.platform_type.is_desktop() && !self.window.is_fullscreen(cx) { cx.reset_turtle_pos(); cx.move_turtle(cx.get_width_total() - 50.0, 0.); self.fullscreen_btn.draw_desktop_button(cx, DesktopButtonType::Fullscreen); } if self.window.xr_can_present(cx) { cx.reset_turtle_pos(); cx.move_turtle(cx.get_width_total() - 100.0, 0.); self.xr_btn.draw_desktop_button(cx, DesktopButtonType::XRMode); } self.main_view.end(cx); self.pass.end(cx); self.window.end(cx); } }
use makepad_render::*; use crate::desktop_button::*; use crate::window_menu::*; use crate::button_logic::*; live_register!{ use crate::theme::*; DesktopWindow: {{DesktopWindow}} { clear_color: (COLOR_WINDOW_BG) caption_bg: {color: (COLOR_WINDOW_CAPTION)} caption: "Desktop Window", main_view:{}, border_fill: {color: (COLOR_WINDOW_CAPTION)}, inner_view:{ }, caption_layout:{ padding:{t:2.0} align: {fx: 0.5, fy: 0.5}, walk: { width: Width::Filled, height: Height::Fixed(26.), } } caption_view: { layout: { walk: { width: Width::Filled, height: Height::Computed }, } } } } #[derive(Live, LiveHook)] pub struct DesktopWindow { #[rust] pub caption_size: Vec2, pass: Pass, color_texture: Texture, depth_texture: Texture, window: Window, main_view: View, caption_view: View, inner_view: View, caption_layout: Layout, clear_color: Vec4, min_btn: DesktopButton, max_btn: DesktopButton, close_btn: DesktopButton, xr_btn: DesktopButton, fullscreen_btn: DesktopButton, caption_text: DrawText, caption_bg: DrawColor, caption: String, border_fill: DrawColor, #[rust(WindowMenu::new(cx))] pub window_menu: WindowMenu, #[rust(Menu::main(vec![ Menu::sub("App", vec![ ]), ]))] default_menu: Menu, #[rust] pub last_menu: Option<Menu>, #[rust] pub inner_over_chrome: bool, } #[derive(Clone, PartialEq)] pub enum DesktopWindowEvent { EventForOtherWindow, WindowClosed, WindowGeomChange(WindowGeomChangeEvent), None } impl DesktopWindow { pub fn handle_event(&mut self, cx: &mut Cx, event: &mut Event) -> DesktopWindowEvent { if let ButtonAction::WasClicked = self.xr_btn.handle_desktop_button(cx, event) { if self.window.xr_is_presenting(cx) { self.window.xr_stop_presenting(cx); } else { self.window.xr_start_presenting(cx); } } if let ButtonAction::WasClicked = self.fullscreen_btn.handle_desktop_button(cx, event) { if self.window.is_fullscreen(cx) { self.window.normal(cx); } else { self.window.fullscreen(cx); } } if let ButtonAction::WasClicked = self.min_btn.handle_desktop_button(cx, event) { self.window.minimize(cx); } if let ButtonAction::WasClicked = self.max_btn.handle_desktop_button(cx, event) { if self.window.is_fullscreen(cx) { self.window.restore(cx); } else { self.window.maximize(cx); } } if let ButtonAction::WasClicked = self.close_btn.handle_desktop_button(cx, event) { self.window.close(cx); } let is_for_other_window = match event { Event::WindowCloseRequested(ev) => ev.window_id != self.window.window_id, Event::WindowClosed(ev) => { if ev.window_id == self.window.window_id { return DesktopWindowEvent::WindowClosed } true } Event::WindowGeomChange(ev) => { if ev.window_id == self.window.window_id { return DesktopWindowEvent::WindowGeomChange(ev.clone()) } true }, Event::WindowDragQuery(dq) => { if dq.window_id == self.window.window_id { if dq.abs.x < self.caption_size.x && dq.abs.y < self.caption_size.y { if dq.abs.x < 50. { dq.response = WindowDragQueryResponse::SysMenu; } else { dq.response = WindowDragQueryResponse::Caption; } } } true } Event::FingerDown(ev) => ev.window_id != self.window.window_id, Event::FingerMove(ev) => ev.window_id != self.window.window_id, Event::FingerHover(ev) => ev.window_id != self.window.window_id, Event::FingerUp(ev) => ev.window_id != self.window.window_id, Event::FingerScroll(ev) => ev.window_id != self.window.window_id, _ => false }; if is_for_other_window { DesktopWindowEvent::EventForOtherWindow } else { DesktopWindowEvent::None } } pub fn begin(&mut self, cx: &mut Cx, menu: Option<&Menu>) -> ViewRedraw { if !self.main_view.view_will_redraw(cx) { return Err(()) } self.window.begin(cx); self.pass.begin(cx); self.pass.add_color_texture(cx, &self.color_texture, PassClearColor::ClearWith(self.clear_color)); self.pass.set_depth_texture(cx, &self.depth_texture, PassClearDepth::ClearWith(1.0)); self.main_view.begin(cx).unwrap(); /*self.caption_view.set_layout(cx, Layout { walk: Walk::wh(Width::Filled, Height::Computed), ..Layout::default() });*/ if self.caption_view.begin(cx).is_ok() { let process_chrome = match cx.platform_type { PlatformType::Linux {custom_window_chrome} => custom_window_chrome, _ => true }; if process_chrome { match cx.platform_type { PlatformType::MsWindows | PlatformType::Unknown | PlatformType::Linux {..} => { self.caption_bg.begin(cx, Layout { align: Align {fx: 1.0, fy: 0.0}, walk: Walk::wh(Width::Filled, Height::Computed), ..Default::default() }); if let Some(_menu) = menu { } self.min_btn.draw_desktop_button(cx, DesktopButtonType::WindowsMin); if self.window.is_fullscreen(cx) { self.max_btn.draw_desktop_button(cx, DesktopButtonType::WindowsMaxToggled); } else { self.max_btn.draw_desktop_button(cx, DesktopButtonType::WindowsMax); } self.close_btn.draw_desktop_button(cx, DesktopButtonType::WindowsClose); cx.change_turtle_align_x_cab(0.5); cx.compute_turtle_height(); cx.change_turtle_align_y_cab(0.5); cx.reset_turtle_pos(); cx.move_turtle(50., 0.); self.caption_size = Vec2 {x: cx.get_width_left(), y: cx.get_height_left()}; self.caption_text.draw_walk(cx, &self.caption); self.caption_bg.end(cx); cx.turtle_new_line(); }, PlatformType::OSX => { if let Some(menu) = menu { cx.update_menu(menu); } else { cx.update_menu(&self.default_menu); } self.caption_bg.begin(cx, self.caption_layout); self.caption_size = Vec2 {x: cx.get_width_left(), y: cx.get_height_left()}; self.caption_text.draw_walk(cx, &self.caption); self.caption_bg.end(cx); cx.turtle_new_line(); }, PlatformType::WebBrowser {..} => { if self.window.is_fullscreen(cx) { self.caption_bg.begin(cx, Layout { align: Align {fx: 0.5, fy: 0.5}, walk: Walk::wh(Width::Filled, Height::Fixed(22.)), ..Default::default() }); self.caption_bg.end(cx); cx.turtle_new_line(); } } } } self.caption_view.end(cx); } cx.turtle_new_line(); if self.inner_view.begin(cx).is_ok(){ return Ok(()) } self.end_inner(cx, true); Err(()) } pub fn end(&mut self, cx: &mut Cx) { self.end_inner(cx, false); } fn end_inner(&mut self, cx: &mut Cx, no_inner:bool) { if !no_inner{ self.inner_view.end(cx); } if !cx.platform_type.is_desktop() && !self.window.is_fullscreen(cx) { cx.reset_turtle_pos(); cx.move_turtle(cx.get_width_total() - 50.0, 0.); self.fullscreen_btn.draw_desktop_button(cx, DesktopButtonType::Fullscreen); } if self.window.xr_can_present(cx) { cx.reset_turtle_pos(); cx.move_turtle(cx.get_width_total() - 100.0, 0.); self.xr_btn.draw_deskto
}
p_button(cx, DesktopButtonType::XRMode); } self.main_view.end(cx); self.pass.end(cx); self.window.end(cx); }
function_block-function_prefixed
[ { "content": "pub fn new_cxthread(&mut self) -> CxThread {\n\n CxThread {cx: self as *mut _ as u64}\n\n }\n\n \n\n pub fn get_mapped_texture_user_data(&mut self, texture: &Texture) -> Option<usize> {\n\n if let Some(texture_id) = texture.texture_id {\n\n let cxtexture = &self.textures[texture_id];\n\n if let Ok(mapped) = cxtexture.platform.mapped.lock() {\n\n return mapped.textures[mapped.read % MAPPED_TEXTURE_BUFFER_COUNT].user_data;\n\n }\n\n }\n\n None\n\n }\n\n \n\nimpl CxThread {\n\n \n\n pub fn lock_mapped_texture_f32(&mut self, texture: &Texture, user_data: usize) -> Option<&mut [f32]> {\n\n if let Some(texture_id) = texture.texture_id {\n\n let cx = unsafe {&mut *(self.cx as *mut Cx)};\n\n let cxtexture = &mut cx.textures[texture_id];\n", "file_path": "render/src/platform/mswindows/d3d11.rs", "rank": 0, "score": 333122.62759222183 }, { "content": "pub fn live_register(cx:&mut Cx){\n\n crate::theme::live_register(cx);\n\n crate::frame::live_register(cx);\n\n crate::fold_button::live_register(cx);\n\n crate::color_picker::live_register(cx);\n\n crate::scroll_shadow::live_register(cx);\n\n crate::button::live_register(cx);\n\n crate::desktop_button::live_register(cx);\n\n crate::desktop_window::live_register(cx);\n\n crate::bare_window::live_register(cx);\n\n crate::window_menu::live_register(cx);\n\n crate::fold_list::live_register(cx);\n\n crate::scroll_view::live_register(cx);\n\n crate::scroll_bar::live_register(cx);\n\n crate::file_tree::live_register(cx);\n\n crate::splitter::live_register(cx);\n\n crate::tab_close_button::live_register(cx);\n\n crate::tab::live_register(cx);\n\n crate::tab_bar::live_register(cx);\n\n crate::dock::live_register(cx);\n\n crate::frame_registry::live_register(cx);\n\n}\n", "file_path": "component/src/lib.rs", "rank": 1, "score": 327715.4305116886 }, { "content": "pub fn live_register(cx: &mut Cx){\n\n crate::design_editor::inline_widget::inline_color_picker::live_register(cx);\n\n crate::design_editor::inline_widget::inline_registry::live_register(cx);\n\n crate::design_editor::live_editor::live_register(cx);\n\n}\n", "file_path": "studio/src/design_editor/mod.rs", "rank": 2, "score": 318569.5656506693 }, { "content": "pub fn from_ptr_impl<CB>(cx: &mut Cx, live_ptr: LivePtr, cb: CB)\n\nwhere CB: FnOnce(&mut Cx, LiveFileId, usize, &[LiveNode]) -> usize {\n\n let live_registry_rc = cx.live_registry.clone();\n\n let live_registry = live_registry_rc.borrow();\n\n let doc = live_registry.ptr_to_doc(live_ptr);\n\n let next_index = cb(cx, live_ptr.file_id, live_ptr.index as usize, &doc.nodes);\n\n if next_index <= live_ptr.index as usize + 2 {\n\n cx.apply_error_empty_object(live_error_origin!(), live_ptr.index as usize, &doc.nodes);\n\n }\n\n}\n\n\n", "file_path": "render/src/live_traits.rs", "rank": 3, "score": 305687.9115912078 }, { "content": "fn register_factory(cx: &mut Cx) {\n\n struct Factory();\n\n impl InlineWidgetFactory for Factory {\n\n fn new(&self, cx: &mut Cx) -> Box<dyn InlineWidget> {\n\n Box::new(InlineColorPicker::new(cx))\n\n }\n\n \n\n fn can_edit_value(&self, live_registry: &LiveRegistry, bind: InlineEditBind) -> CanEdit {\n\n let node = live_registry.ptr_to_node(bind.live_ptr);\n\n match &node.value {\n\n LiveValue::Color(_) => {\n\n return CanEdit::Yes(100.0)\n\n }\n\n LiveValue::DSL {..} => {\n\n let token = live_registry.token_id_to_token(bind.live_token_id);\n\n if token.is_color(){\n\n return CanEdit::Yes(100.0)\n\n } \n\n }\n\n _ => ()\n", "file_path": "studio/src/design_editor/inline_widget/inline_color_picker.rs", "rank": 4, "score": 284767.72211786633 }, { "content": "pub fn rect_contains_with_margin(rect: &Rect, pos: Vec2, margin: &Option<Margin>) -> bool {\n\n if let Some(margin) = margin {\n\n return\n\n pos.x >= rect.pos.x - margin.l\n\n && pos.x <= rect.pos.x + rect.size.x + margin.r\n\n && pos.y >= rect.pos.y - margin.t\n\n && pos.y <= rect.pos.y + rect.size.y + margin.b;\n\n }\n\n else {\n\n return rect.contains(pos);\n\n }\n\n}\n\n\n\nimpl Event {\n\n \n\n \n\n pub fn hits(&mut self, cx: &mut Cx, area: Area) -> HitEvent {\n\n self.hits_with_options(cx, area, HitOptions::default())\n\n }\n\n \n", "file_path": "render/src/event/finger.rs", "rank": 5, "score": 284382.4891081415 }, { "content": "// this function parses tokens into a source-equal whitespaced output string\n\nfn token_parser_to_whitespace_matching_string(parser: &mut TokenParser, span: Span) -> (String, Vec<TokenStream>) {\n\n \n\n let mut s = String::new();\n\n let mut live_types = Vec::new();\n\n \n\n tp_to_str(parser, span, &mut s, &mut live_types, &mut None);\n\n \n\n return (s, live_types);\n\n \n\n #[derive(Clone, Copy)]\n\n struct Lc {\n\n line: usize,\n\n column: usize\n\n }\n\n \n\n impl Lc {\n\n fn _next_char(self) -> Self {\n\n Self {line: self.line, column: self.column + 1}\n\n }\n\n }\n", "file_path": "render/derive_live/src/live_register_macro.rs", "rank": 6, "score": 281055.93327861856 }, { "content": "pub fn vec2(x: f32, y: f32) -> Vec2 {Vec2 {x, y}}\n", "file_path": "render/math/src/math.rs", "rank": 7, "score": 271243.05203914957 }, { "content": "pub fn get_cocoa_window(this: &Object) -> &mut CocoaWindow {\n\n unsafe {\n\n let ptr: *mut c_void = *this.get_ivar(\"cocoa_window_ptr\");\n\n &mut *(ptr as *mut CocoaWindow)\n\n }\n\n}", "file_path": "render/src/platform/apple/cocoa_window.rs", "rank": 8, "score": 268104.2154984415 }, { "content": "#[cfg(not(feature = \"nightly\"))]\n\nfn tokenparser_to_string(parser: &mut TokenParser, _span:Span, out: &mut String, live_types:&mut Vec<TokenStream>, last_end:&mut Option<Lc>){\n\n while !parser.eat_eot(){\n\n let span = parser.span().unwrap();\n\n if let Some(delim) = parser.open_group(){\n\n // if delim is { and the next one is also { write out a type index\n\n parse_type_ident(parser, out, live_types);\n\n let (s,e) = delim_to_pair(delim);\n\n out.push(s);\n\n tokenparser_to_string(parser, span, out, live_types, last_end);\n\n out.push(e);\n\n }\n\n else{\n\n if let Some(tt) = &parser.current{\n\n out.push_str(&tt.to_string());\n\n }\n\n parser.advance();\n\n }\n\n }\n\n}\n\n*/\n", "file_path": "render/derive_live/src/live_register_macro.rs", "rank": 9, "score": 255745.06532108755 }, { "content": "pub fn generate_cons_fn(backend_writer:&dyn BackendWriter, string: &mut String, ty_lit: TyLit, param_tys: &[Ty]) {\n\n let mut cons_name = format!(\"consfn_{}\", ty_lit);\n\n for param_ty in param_tys {\n\n write!(cons_name, \"_{}\", param_ty).unwrap();\n\n }\n\n if !backend_writer.use_cons_fn(&cons_name) {\n\n return\n\n }\n\n \n\n backend_writer.write_ty_lit(string, ty_lit);\n\n write!(string, \" {}(\", cons_name).unwrap();\n\n let mut sep = \"\";\n\n if param_tys.len() == 1 {\n\n backend_writer.write_var_decl(string, sep, false, false, &Ident(id!(x)), &param_tys[0]);\n\n } else {\n\n for (index, param_ty) in param_tys.iter().enumerate() {\n\n write!(string, \"{}\", sep).unwrap();\n\n backend_writer.write_var_decl(string, sep, false, false,&DisplaConstructorArg(index), param_ty);\n\n sep = \", \";\n\n }\n", "file_path": "render/shader_compiler/src/generate.rs", "rank": 10, "score": 252021.1628643467 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCreateWindowEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCreateWindowEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XCreateWindowEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCreateWindowEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCreateWindowEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCreateWindowEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCreateWindowEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 11, "score": 251949.43241024215 }, { "content": "#[test]\n\nfn bindgen_test_layout_XDestroyWindowEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XDestroyWindowEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XDestroyWindowEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XDestroyWindowEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XDestroyWindowEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XDestroyWindowEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XDestroyWindowEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 12, "score": 251949.43241024215 }, { "content": "#[test]\n\nfn bindgen_test_layout_XDestroyWindowEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XDestroyWindowEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XDestroyWindowEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XDestroyWindowEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XDestroyWindowEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XDestroyWindowEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XDestroyWindowEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 13, "score": 251949.43241024215 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCreateWindowEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCreateWindowEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XCreateWindowEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCreateWindowEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCreateWindowEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCreateWindowEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCreateWindowEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 14, "score": 251949.43241024215 }, { "content": "pub fn vec2(x:f32, y:f32)->Vec2{\n\n Vec2{x:x, y:y}\n\n}*/\n\n\n\n#[derive(Clone, Copy, Default, PartialEq, Debug)]\n\npub struct Vec3 {\n\n pub x: f32,\n\n pub y: f32,\n\n pub z: f32\n\n}\n\n\n\nimpl Vec3 {\n\n \n\n pub fn from_lerp(a: Vec3, b: Vec3, f: f32) -> Vec3 {\n\n Vec3 {\n\n x: (b.x - a.x) * f + a.x,\n\n y: (b.y - a.y) * f + a.y,\n\n z: (b.z - a.z) * f + a.z\n\n }\n\n }\n", "file_path": "render/math/src/math.rs", "rank": 15, "score": 250946.62369055295 }, { "content": "pub fn nsstring_to_string(string: ObjcId) -> String {\n\n unsafe {\n\n let utf8_string: *const std::os::raw::c_uchar = msg_send![string, UTF8String];\n\n let utf8_len: usize = msg_send![string, lengthOfBytesUsingEncoding: UTF8_ENCODING];\n\n let slice = std::slice::from_raw_parts(\n\n utf8_string,\n\n utf8_len,\n\n );\n\n std::str::from_utf8_unchecked(slice).to_owned()\n\n }\n\n}\n\n\n", "file_path": "render/src/platform/apple/frameworks.rs", "rank": 16, "score": 248133.6945395624 }, { "content": "pub fn vec4(x: f32, y: f32, z: f32, w: f32) -> Vec4 {Vec4 {x, y, z, w}}\n\n\n\nconst TORAD: f32 = 0.017453292519943295;\n\nconst TODEG: f32 = 57.295779513082321;\n\n\n\n/*\n", "file_path": "render/math/src/math.rs", "rank": 17, "score": 243261.80343511447 }, { "content": "pub fn vec4(x:f32, y:f32, z:f32, w:f32)->Vec4{\n\n Vec4{x:x, y:y, z:z, w:w}\n\n}*/\n\n\n\n\n\nimpl Mat4 {\n\n pub fn identity() -> Mat4 {\n\n return Mat4 {v: [\n\n 1.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 1.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 1.0,\n\n 0.0,\n", "file_path": "render/math/src/math.rs", "rank": 18, "score": 233919.22812368994 }, { "content": "pub fn keycode_to_menu_key(keycode: KeyCode, shift: bool) -> &'static str {\n\n if !shift {\n\n match keycode {\n\n KeyCode::Backtick => \"`\",\n\n KeyCode::Key0 => \"0\",\n\n KeyCode::Key1 => \"1\",\n\n KeyCode::Key2 => \"2\",\n\n KeyCode::Key3 => \"3\",\n\n KeyCode::Key4 => \"4\",\n\n KeyCode::Key5 => \"5\",\n\n KeyCode::Key6 => \"6\",\n\n KeyCode::Key7 => \"7\",\n\n KeyCode::Key8 => \"8\",\n\n KeyCode::Key9 => \"9\",\n\n KeyCode::Minus => \"-\",\n\n KeyCode::Equals => \"=\",\n\n \n\n KeyCode::KeyQ => \"q\",\n\n KeyCode::KeyW => \"w\",\n\n KeyCode::KeyE => \"e\",\n", "file_path": "render/src/platform/apple/cocoa_util.rs", "rank": 19, "score": 226103.92843268986 }, { "content": "pub fn get_event_char(event: ObjcId) -> char {\n\n unsafe {\n\n let characters: ObjcId = msg_send![event, characters];\n\n if characters == nil {\n\n return '\\0'\n\n }\n\n let chars = nsstring_to_string(characters);\n\n \n\n if chars.len() == 0 {\n\n return '\\0'\n\n }\n\n chars.chars().next().unwrap()\n\n }\n\n}\n\n\n", "file_path": "render/src/platform/apple/cocoa_util.rs", "rank": 20, "score": 223668.48351293968 }, { "content": "pub fn get_all_metal_devices() -> Vec<ObjcId> {\n\n #[cfg(target_os = \"ios\")]\n\n {\n\n MTLCreateSystemDefaultDevice().into_iter().collect()\n\n }\n\n #[cfg(not(target_os = \"ios\"))]\n\n unsafe {\n\n let array = MTLCopyAllDevices();\n\n let count: u64 = msg_send![array, count];\n\n let ret = (0..count)\n\n .map( | i | msg_send![array, objectAtIndex: i])\n\n // The elements of this array are references---we convert them to owned references\n\n // (which just means that we increment the reference count here, and it is\n\n // decremented in the `Drop` impl for `Device`)\n\n .map( | device: *mut Object | msg_send![device, retain])\n\n .collect();\n\n let () = msg_send![array, release];\n\n ret\n\n }\n\n}\n\n\n\n\n", "file_path": "render/src/platform/apple/frameworks.rs", "rank": 21, "score": 221462.52611328114 }, { "content": "pub fn live_eval(live_registry: &LiveRegistry, start: usize, index: &mut usize, nodes: &[LiveNode], err:&mut Option<&mut dyn LiveEvalError>) -> LiveEval {\n\n match &nodes[*index].value {\n\n LiveValue::Float(v) => {\n\n *index += 1;\n\n return LiveEval::Float(*v);\n\n }\n\n LiveValue::Int(v) => {\n\n *index += 1;\n\n return LiveEval::Int(*v);\n\n }\n\n LiveValue::Vec2(v) => {\n\n *index += 1;\n\n return LiveEval::Vec2(*v);\n\n }\n\n LiveValue::Vec3(v) => {\n\n *index += 1;\n\n return LiveEval::Vec3(*v);\n\n }\n\n LiveValue::Vec4(v) => {\n\n *index += 1;\n", "file_path": "render/live_compiler/src/live_eval.rs", "rank": 22, "score": 218510.80145465583 }, { "content": "pub fn base64_encode(input: &[u8]) -> String {\n\n let mut out = String::new();\n\n let mut rem: usize = 0;\n\n let mut step = 0;\n\n for i in 0..input.len() {\n\n let inp = input[i] as usize;\n\n if step == 0 {\n\n out.push(BASE64_TABLE[inp >> 2] as char);\n\n rem = inp & 3;\n\n step += 1;\n\n }\n\n else if step == 1 {\n\n out.push(BASE64_TABLE[rem << 4 | inp >> 4] as char);\n\n rem = inp & 0xf;\n\n step += 1;\n\n }\n\n else if step == 2 {\n\n out.push(BASE64_TABLE[rem << 2 | inp >> 6] as char);\n\n out.push(BASE64_TABLE[inp & 0x3f] as char);\n\n step = 0;\n", "file_path": "tools/webserver/http/src/digest.rs", "rank": 23, "score": 218453.548171059 }, { "content": "pub fn get_event_key_modifier(event: ObjcId) -> KeyModifiers {\n\n let flags: u64 = unsafe {msg_send![event, modifierFlags]};\n\n KeyModifiers {\n\n shift: flags & NSEventModifierFlags::NSShiftKeyMask as u64 != 0,\n\n control: flags & NSEventModifierFlags::NSControlKeyMask as u64 != 0,\n\n alt: flags & NSEventModifierFlags::NSAlternateKeyMask as u64 != 0,\n\n logo: flags & NSEventModifierFlags::NSCommandKeyMask as u64 != 0,\n\n }\n\n}\n\n\n", "file_path": "render/src/platform/apple/cocoa_util.rs", "rank": 24, "score": 218213.8875883996 }, { "content": "pub fn parse_url_path(url: &str) -> Option<(String, Option<String>)> {\n\n \n\n // find the end_of_name skipping everything else\n\n let end_of_name = url.find(' ');\n\n if end_of_name.is_none() {\n\n return None;\n\n }\n\n let end_of_name = end_of_name.unwrap();\n\n let mut search = None;\n\n let end_of_name = if let Some(q) = url.find('?') {\n\n search = Some(url[q..].to_string());\n\n end_of_name.min(q)\n\n }else {end_of_name};\n\n \n\n let mut url = url[0..end_of_name].to_string();\n\n \n\n if url.ends_with(\"/\") {\n\n url.push_str(\"index.html\");\n\n }\n\n \n", "file_path": "tools/webserver/http/src/httputil.rs", "rank": 25, "score": 217642.91185887723 }, { "content": "#[test]\n\nfn bindgen_test_layout_XColor() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XColor>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(XColor))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XColor>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XColor))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XColor>())).pixel as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XColor),\n\n \"::\",\n\n stringify!(pixel)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 26, "score": 217158.44902363286 }, { "content": "#[test]\n\nfn bindgen_test_layout_XColor() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XColor>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(XColor))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XColor>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XColor))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XColor>())).pixel as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XColor),\n\n \"::\",\n\n stringify!(pixel)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 27, "score": 217158.44902363286 }, { "content": "#[test]\n\nfn bindgen_test_layout_XAnyEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XAnyEvent>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XAnyEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XAnyEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XAnyEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XAnyEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XAnyEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 28, "score": 217073.6345795329 }, { "content": "#[test]\n\nfn bindgen_test_layout__XEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<_XEvent>(),\n\n 192usize,\n\n concat!(\"Size of: \", stringify!(_XEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<_XEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(_XEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<_XEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(_XEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 29, "score": 217073.6345795329 }, { "content": "#[test]\n\nfn bindgen_test_layout_XAnyEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XAnyEvent>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XAnyEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XAnyEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XAnyEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XAnyEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XAnyEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 30, "score": 217073.6345795329 }, { "content": "#[test]\n\nfn bindgen_test_layout__XEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<_XEvent>(),\n\n 192usize,\n\n concat!(\"Size of: \", stringify!(_XEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<_XEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(_XEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<_XEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(_XEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 31, "score": 217073.6345795329 }, { "content": "pub fn get_event_keycode(event: ObjcId) -> Option<KeyCode> {\n\n let scan_code: std::os::raw::c_ushort = unsafe {\n\n msg_send![event, keyCode]\n\n };\n\n \n\n Some(match scan_code {\n\n 0x00 => KeyCode::KeyA,\n\n 0x01 => KeyCode::KeyS,\n\n 0x02 => KeyCode::KeyD,\n\n 0x03 => KeyCode::KeyF,\n\n 0x04 => KeyCode::KeyH,\n\n 0x05 => KeyCode::KeyG,\n\n 0x06 => KeyCode::KeyZ,\n\n 0x07 => KeyCode::KeyX,\n\n 0x08 => KeyCode::KeyC,\n\n 0x09 => KeyCode::KeyV,\n\n //0x0a => World 1,\n\n 0x0b => KeyCode::KeyB,\n\n 0x0c => KeyCode::KeyQ,\n\n 0x0d => KeyCode::KeyW,\n", "file_path": "render/src/platform/apple/cocoa_util.rs", "rank": 32, "score": 212741.75453848485 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCharStruct() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCharStruct>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(XCharStruct))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCharStruct>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(XCharStruct))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCharStruct>())).lbearing as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCharStruct),\n\n \"::\",\n\n stringify!(lbearing)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 33, "score": 212724.23091190046 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCharStruct() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCharStruct>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(XCharStruct))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCharStruct>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(XCharStruct))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCharStruct>())).lbearing as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCharStruct),\n\n \"::\",\n\n stringify!(lbearing)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 34, "score": 212724.23091190046 }, { "content": "#[test]\n\nfn bindgen_test_layout_XFontStruct() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XFontStruct>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XFontStruct))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XFontStruct>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XFontStruct))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XFontStruct>())).ext_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XFontStruct),\n\n \"::\",\n\n stringify!(ext_data)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 35, "score": 212724.23091190046 }, { "content": "#[test]\n\nfn bindgen_test_layout_XFontStruct() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XFontStruct>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XFontStruct))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XFontStruct>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XFontStruct))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XFontStruct>())).ext_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XFontStruct),\n\n \"::\",\n\n stringify!(ext_data)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 36, "score": 212724.23091190046 }, { "content": "#[test]\n\nfn bindgen_test_layout_XWindowAttributes() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XWindowAttributes>(),\n\n 136usize,\n\n concat!(\"Size of: \", stringify!(XWindowAttributes))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XWindowAttributes>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XWindowAttributes))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XWindowAttributes>())).x as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XWindowAttributes),\n\n \"::\",\n\n stringify!(x)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 37, "score": 212712.99575445545 }, { "content": "#[test]\n\nfn bindgen_test_layout_XWindowAttributes() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XWindowAttributes>(),\n\n 136usize,\n\n concat!(\"Size of: \", stringify!(XWindowAttributes))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XWindowAttributes>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XWindowAttributes))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XWindowAttributes>())).x as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XWindowAttributes),\n\n \"::\",\n\n stringify!(x)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 38, "score": 212712.99575445545 }, { "content": "#[test]\n\nfn bindgen_test_layout_XWindowChanges() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XWindowChanges>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XWindowChanges))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XWindowChanges>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XWindowChanges))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XWindowChanges>())).x as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XWindowChanges),\n\n \"::\",\n\n stringify!(x)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 39, "score": 212712.99575445545 }, { "content": "#[test]\n\nfn bindgen_test_layout_XWindowChanges() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XWindowChanges>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XWindowChanges))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XWindowChanges>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XWindowChanges))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XWindowChanges>())).x as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XWindowChanges),\n\n \"::\",\n\n stringify!(x)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 40, "score": 212712.99575445545 }, { "content": "#[test]\n\nfn bindgen_test_layout_XSelectionEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XSelectionEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XSelectionEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XSelectionEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XSelectionEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XSelectionEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XSelectionEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 41, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMappingEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMappingEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XMappingEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMappingEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMappingEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMappingEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMappingEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 42, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMappingEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMappingEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XMappingEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMappingEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMappingEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMappingEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMappingEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 43, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMapEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XMapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 44, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGravityEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGravityEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XGravityEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGravityEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGravityEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGravityEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGravityEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 45, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XErrorEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XErrorEvent>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XErrorEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XErrorEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XErrorEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XErrorEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XErrorEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 46, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMotionEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMotionEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XMotionEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMotionEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMotionEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMotionEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMotionEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 47, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XNoExposeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XNoExposeEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XNoExposeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XNoExposeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XNoExposeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XNoExposeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XNoExposeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 48, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XUnmapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XUnmapEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XUnmapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XUnmapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XUnmapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XUnmapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XUnmapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 49, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XExposeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XExposeEvent>(),\n\n 64usize,\n\n concat!(\"Size of: \", stringify!(XExposeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XExposeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XExposeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XExposeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XExposeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 50, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XButtonEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XButtonEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XButtonEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XButtonEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XButtonEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XButtonEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XButtonEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 51, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGenericEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGenericEvent>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XGenericEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGenericEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGenericEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGenericEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGenericEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 52, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XKeymapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XKeymapEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XKeymapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XKeymapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XKeymapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XKeymapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XKeymapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 53, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XPropertyEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XPropertyEvent>(),\n\n 64usize,\n\n concat!(\"Size of: \", stringify!(XPropertyEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XPropertyEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XPropertyEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XPropertyEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XPropertyEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 54, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XNoExposeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XNoExposeEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XNoExposeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XNoExposeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XNoExposeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XNoExposeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XNoExposeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 55, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XVisibilityEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XVisibilityEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XVisibilityEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XVisibilityEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XVisibilityEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XVisibilityEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XVisibilityEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 56, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XReparentEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XReparentEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XReparentEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XReparentEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XReparentEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XReparentEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XReparentEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 57, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMotionEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMotionEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XMotionEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMotionEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMotionEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMotionEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMotionEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 58, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGravityEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGravityEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XGravityEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGravityEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGravityEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGravityEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGravityEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 59, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCrossingEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCrossingEvent>(),\n\n 104usize,\n\n concat!(\"Size of: \", stringify!(XCrossingEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCrossingEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCrossingEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCrossingEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCrossingEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 60, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XSelectionEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XSelectionEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XSelectionEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XSelectionEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XSelectionEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XSelectionEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XSelectionEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 61, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XVisibilityEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XVisibilityEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XVisibilityEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XVisibilityEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XVisibilityEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XVisibilityEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XVisibilityEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 62, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCirculateEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCirculateEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XCirculateEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCirculateEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCirculateEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCirculateEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCirculateEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 63, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XUnmapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XUnmapEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XUnmapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XUnmapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XUnmapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XUnmapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XUnmapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 64, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCirculateEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCirculateEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XCirculateEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCirculateEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCirculateEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCirculateEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCirculateEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 65, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XConfigureEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XConfigureEvent>(),\n\n 88usize,\n\n concat!(\"Size of: \", stringify!(XConfigureEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XConfigureEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XConfigureEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XConfigureEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XConfigureEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 66, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XKeymapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XKeymapEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XKeymapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XKeymapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XKeymapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XKeymapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XKeymapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 67, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XReparentEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XReparentEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XReparentEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XReparentEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XReparentEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XReparentEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XReparentEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 68, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XPropertyEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XPropertyEvent>(),\n\n 64usize,\n\n concat!(\"Size of: \", stringify!(XPropertyEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XPropertyEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XPropertyEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XPropertyEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XPropertyEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 69, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XKeyEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XKeyEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XKeyEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XKeyEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XKeyEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XKeyEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XKeyEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 70, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XConfigureEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XConfigureEvent>(),\n\n 88usize,\n\n concat!(\"Size of: \", stringify!(XConfigureEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XConfigureEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XConfigureEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XConfigureEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XConfigureEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 71, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XErrorEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XErrorEvent>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XErrorEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XErrorEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XErrorEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XErrorEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XErrorEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 72, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCrossingEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCrossingEvent>(),\n\n 104usize,\n\n concat!(\"Size of: \", stringify!(XCrossingEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCrossingEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCrossingEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCrossingEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCrossingEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 73, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XButtonEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XButtonEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XButtonEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XButtonEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XButtonEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XButtonEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XButtonEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 74, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XExposeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XExposeEvent>(),\n\n 64usize,\n\n concat!(\"Size of: \", stringify!(XExposeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XExposeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XExposeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XExposeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XExposeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 75, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XKeyEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XKeyEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XKeyEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XKeyEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XKeyEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XKeyEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XKeyEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 76, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XColormapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XColormapEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XColormapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XColormapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XColormapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XColormapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XColormapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 77, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGenericEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGenericEvent>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(XGenericEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGenericEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGenericEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGenericEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGenericEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 78, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMapEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XMapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 79, "score": 212671.04880770162 }, { "content": "#[test]\n\nfn bindgen_test_layout_XColormapEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XColormapEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XColormapEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XColormapEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XColormapEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XColormapEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XColormapEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 80, "score": 212671.04880770162 }, { "content": "pub fn error(err: &str) -> TokenStream {\n\n let mut tb = TokenBuilder::new();\n\n tb.add(\"compile_error ! (\").string(err).add(\") ;\");\n\n tb.end()\n\n}\n\n\n", "file_path": "render/micro_serde/derive/src/macro_lib.rs", "rank": 81, "score": 212057.18377395475 }, { "content": "#[test]\n\nfn bindgen_test_layout_XSetWindowAttributes() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XSetWindowAttributes>(),\n\n 112usize,\n\n concat!(\"Size of: \", stringify!(XSetWindowAttributes))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XSetWindowAttributes>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XSetWindowAttributes))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<XSetWindowAttributes>())).background_pixmap as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XSetWindowAttributes),\n\n \"::\",\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 82, "score": 208517.435303071 }, { "content": "#[test]\n\nfn bindgen_test_layout_XSetWindowAttributes() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XSetWindowAttributes>(),\n\n 112usize,\n\n concat!(\"Size of: \", stringify!(XSetWindowAttributes))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XSetWindowAttributes>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XSetWindowAttributes))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<XSetWindowAttributes>())).background_pixmap as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XSetWindowAttributes),\n\n \"::\",\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 83, "score": 208517.435303071 }, { "content": "#[test]\n\nfn bindgen_test_layout_XFocusChangeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XFocusChangeEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XFocusChangeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XFocusChangeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XFocusChangeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XFocusChangeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XFocusChangeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 84, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XFocusChangeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XFocusChangeEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XFocusChangeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XFocusChangeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XFocusChangeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XFocusChangeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XFocusChangeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 85, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XResizeRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XResizeRequestEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XResizeRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XResizeRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XResizeRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XResizeRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XResizeRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 86, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMapRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMapRequestEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XMapRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMapRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMapRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMapRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMapRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 87, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XClientMessageEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XClientMessageEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XClientMessageEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XClientMessageEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XClientMessageEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XClientMessageEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XClientMessageEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 88, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGraphicsExposeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGraphicsExposeEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XGraphicsExposeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGraphicsExposeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGraphicsExposeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGraphicsExposeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGraphicsExposeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 89, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XConfigureRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XConfigureRequestEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XConfigureRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XConfigureRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XConfigureRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XConfigureRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XConfigureRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 90, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XMapRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XMapRequestEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XMapRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XMapRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XMapRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XMapRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XMapRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 91, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCirculateRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCirculateRequestEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XCirculateRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCirculateRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCirculateRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCirculateRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCirculateRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 92, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XSelectionRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XSelectionRequestEvent>(),\n\n 80usize,\n\n concat!(\"Size of: \", stringify!(XSelectionRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XSelectionRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XSelectionRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XSelectionRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XSelectionRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 93, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XCirculateRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XCirculateRequestEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XCirculateRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XCirculateRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XCirculateRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XCirculateRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XCirculateRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 94, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XConfigureRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XConfigureRequestEvent>(),\n\n 96usize,\n\n concat!(\"Size of: \", stringify!(XConfigureRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XConfigureRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XConfigureRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XConfigureRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XConfigureRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 95, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XResizeRequestEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XResizeRequestEvent>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(XResizeRequestEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XResizeRequestEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XResizeRequestEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XResizeRequestEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XResizeRequestEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 96, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGenericEventCookie() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGenericEventCookie>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XGenericEventCookie))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGenericEventCookie>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGenericEventCookie))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGenericEventCookie>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGenericEventCookie),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 97, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XSelectionClearEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XSelectionClearEvent>(),\n\n 56usize,\n\n concat!(\"Size of: \", stringify!(XSelectionClearEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XSelectionClearEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XSelectionClearEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XSelectionClearEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XSelectionClearEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/x11-sys/src/bindings.rs", "rank": 98, "score": 208476.479324881 }, { "content": "#[test]\n\nfn bindgen_test_layout_XGraphicsExposeEvent() {\n\n assert_eq!(\n\n ::std::mem::size_of::<XGraphicsExposeEvent>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(XGraphicsExposeEvent))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<XGraphicsExposeEvent>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(XGraphicsExposeEvent))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<XGraphicsExposeEvent>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(XGraphicsExposeEvent),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "render/bind/glx-sys/src/bindings.rs", "rank": 99, "score": 208476.479324881 } ]
Rust
src/main.rs
jeff-lund/Molecule
c37aeffce87abea007a56b82ed0d1cd474cb8530
#![allow(dead_code)] #![allow(unused_assignments)] #![allow(unused_variables)] mod utility; use utility::*; mod atoms; use atoms::*; use std::env::args; use std::fs::File; use std::io::prelude::*; const DEBUG: bool = false; const MAX_GENERATIONS: u32 = 200; fn pprint (molecule: &Molecule, atoms: &Vec<&str>, peaks: &Vec<f32>) { for a in atoms { print!("|{} ", a); } println!("|"); for row in molecule.structure.genrows() { println!("{}", row); } println!(""); println!("Fitness: {}", molecule.fitness); print!("Carbon Assignments: {:?}", molecule.kind[0]); for entry in molecule.kind.iter().skip(1) { print!(", {:?}", entry); } println!(""); print!("Chemical Shifts: {}", molecule.chemical_shifts[0]); for entry in molecule.chemical_shifts.iter().skip(1) { print!(", {}", entry); } println!(""); print!("Experimental Chemical Shifts: {}", peaks[0]); for entry in peaks.iter().skip(1) { print!(", {}", entry); } println!(""); } fn main() -> std::io::Result<()> { let f = args().nth(1).expect("No file argument given"); let mut file = File::open(f)?; let mut buffer = String::new(); file.read_to_string(&mut buffer)?; let mut b = buffer.lines(); let chemical_formula = parse_elemental_analysis(b.next().expect("invalid file")); let peaks = parse_peaks(b.next().expect("invalid file")); let ihd = compute_ihd(&chemical_formula); let bonds = get_bonds(&chemical_formula); let atoms = get_atoms(&chemical_formula); if symmetrical_carbons(&chemical_formula, &peaks) { eprintln!("There must be a chemical shift for each carbon atom in the chemical formula"); std::process::exit(0); } if DEBUG == false { let mut population: Vec<Molecule> = Vec::new(); let mut pop = 0; while pop < POPULATION { let molecule = create_test_molecule(&atoms, bonds); if connected(&molecule.structure) && check_bonds(&molecule.structure, &atoms) { population.push(molecule); pop += 1; } } let mut best_molecule: Molecule = Molecule::new(Structure::zeros((1,1))); for gen in 0..MAX_GENERATIONS { println!("Generation {}", gen); for molecule in population.iter_mut() { molecule.assign_carbons(&atoms); molecule.compute_shifts(&atoms); molecule.fitness(&peaks); } best_molecule = best(&population); if best_molecule.fitness == 0.0 { break; } population = generate_children(population, &atoms, bonds) } println!("Best fit found"); pprint(&best_molecule, &atoms, &peaks); } if DEBUG == true { println!("********************DEBUG*************************************"); println!("{:?}", atoms); println!("IHD {}", ihd); println!("{:?}", chemical_formula); println!("{:?}", peaks); if symmetrical_carbons(&chemical_formula, &peaks) { println!("Symmetric carbons present"); } println!("Bonds Assigned: {}", bonds); println!("********************END DEBUG*********************************"); let mut tot = 0; let mut fcon = 0; let mut fbond = 0; let mut failed_both = 0; let mut pop = 0; let mut population = Vec::new(); while pop < POPULATION { let m = create_test_molecule(&atoms, bonds); let mut bond_flag = 0; let mut con_flag = 0; tot += 1; if !connected(&m.structure) { fcon += 1; con_flag = 1; } if !check_bonds(&m.structure, &atoms) { fbond += 1; bond_flag = 1; } if con_flag == 0 && bond_flag == 0 { population.push(m); pop += 1; } if con_flag == 1 && bond_flag == 1 { failed_both += 1; } } println!("Added {} molecules to population. Total molecules created: {}", pop, tot); println!("Failed bond check: {}", fbond); println!("Failed connected: {}", fcon); println!("Failed both: {}", failed_both); for i in 0..10 { population[i].assign_carbons(&atoms); println!("{:?}", population[i].kind); } } Ok(()) }
#![allow(dead_code)] #![allow(unused_assignments)] #![allow(unused_variables)] mod utility; use utility::*; mod atoms; use atoms::*; use std::env::args; use std::fs::File; use std::io::prelude::*; const DEBUG: bool = false; const MAX_GENERATIONS: u32 = 200; fn pprint (molecule: &Molecule, atoms: &Vec<&str>, peaks: &Vec<f32>) { for a in atoms { print!("|{} ", a); } println!("|"); for row in molecule.structure.genrows() { println!("{}", row); } println!(""); println!("Fitness: {}", molecule.fitness); print!("Carbon Assignments: {:?}", molecule.kind[0]); for entry in molecule.kind.iter().skip(1) { print!(", {:?}", entry); } println!(""); print!("Chemical Shifts: {}", molecule.chemical_shifts[0]); for entry in molecule.chemical_shifts.iter().skip(1) { print!(", {}", entry); } println!("");
fn main() -> std::io::Result<()> { let f = args().nth(1).expect("No file argument given"); let mut file = File::open(f)?; let mut buffer = String::new(); file.read_to_string(&mut buffer)?; let mut b = buffer.lines(); let chemical_formula = parse_elemental_analysis(b.next().expect("invalid file")); let peaks = parse_peaks(b.next().expect("invalid file")); let ihd = compute_ihd(&chemical_formula); let bonds = get_bonds(&chemical_formula); let atoms = get_atoms(&chemical_formula); if symmetrical_carbons(&chemical_formula, &peaks) { eprintln!("There must be a chemical shift for each carbon atom in the chemical formula"); std::process::exit(0); } if DEBUG == false { let mut population: Vec<Molecule> = Vec::new(); let mut pop = 0; while pop < POPULATION { let molecule = create_test_molecule(&atoms, bonds); if connected(&molecule.structure) && check_bonds(&molecule.structure, &atoms) { population.push(molecule); pop += 1; } } let mut best_molecule: Molecule = Molecule::new(Structure::zeros((1,1))); for gen in 0..MAX_GENERATIONS { println!("Generation {}", gen); for molecule in population.iter_mut() { molecule.assign_carbons(&atoms); molecule.compute_shifts(&atoms); molecule.fitness(&peaks); } best_molecule = best(&population); if best_molecule.fitness == 0.0 { break; } population = generate_children(population, &atoms, bonds) } println!("Best fit found"); pprint(&best_molecule, &atoms, &peaks); } if DEBUG == true { println!("********************DEBUG*************************************"); println!("{:?}", atoms); println!("IHD {}", ihd); println!("{:?}", chemical_formula); println!("{:?}", peaks); if symmetrical_carbons(&chemical_formula, &peaks) { println!("Symmetric carbons present"); } println!("Bonds Assigned: {}", bonds); println!("********************END DEBUG*********************************"); let mut tot = 0; let mut fcon = 0; let mut fbond = 0; let mut failed_both = 0; let mut pop = 0; let mut population = Vec::new(); while pop < POPULATION { let m = create_test_molecule(&atoms, bonds); let mut bond_flag = 0; let mut con_flag = 0; tot += 1; if !connected(&m.structure) { fcon += 1; con_flag = 1; } if !check_bonds(&m.structure, &atoms) { fbond += 1; bond_flag = 1; } if con_flag == 0 && bond_flag == 0 { population.push(m); pop += 1; } if con_flag == 1 && bond_flag == 1 { failed_both += 1; } } println!("Added {} molecules to population. Total molecules created: {}", pop, tot); println!("Failed bond check: {}", fbond); println!("Failed connected: {}", fcon); println!("Failed both: {}", failed_both); for i in 0..10 { population[i].assign_carbons(&atoms); println!("{:?}", population[i].kind); } } Ok(()) }
print!("Experimental Chemical Shifts: {}", peaks[0]); for entry in peaks.iter().skip(1) { print!(", {}", entry); } println!(""); }
function_block-function_prefix_line
[ { "content": "/// Validate the overall correctness of a chromosome\n\nfn validate_chromosome(chromosome: &Chromosome, atoms: &Vec<&str>, num_bonds: u32) -> bool {\n\n // check proper number of bonds exist\n\n let bonds: u32 = chromosome.iter().sum();\n\n if bonds != num_bonds {\n\n return false;\n\n }\n\n // check to degree of each atom for bounds and connectedness of the moleculer structure\n\n let temp = chromosome_to_structure(chromosome);\n\n check_bonds(&temp, atoms) && connected(&temp)\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 1, "score": 88312.74678009405 }, { "content": "/// Randomly creates sample molecule with no guarantee of validity\n\n/// A chromosome is the concatenation of the values from the upper\n\n/// triangle exluding the diagonal\n\n// TODO This algorithm is super inefficient find better way to generate graph\n\npub fn create_test_molecule(atoms: &Vec<&str>, bonds: u32) -> Molecule {\n\n let mut rng = thread_rng();\n\n let l = atoms.len();\n\n let num_bonds = bonds;\n\n let chromosome_length: usize = (l * l - l)/2;\n\n let mut chromosome: Chromosome = vec![0; chromosome_length];\n\n\n\n let mut r: usize;\n\n for _ in 0..num_bonds {\n\n r = rng.gen_range(0, chromosome_length);\n\n // can't have more than a triple bond to any atom\n\n while chromosome[r] > 3 {\n\n r = rng.gen_range(0, chromosome_length);\n\n }\n\n chromosome[r] += 1;\n\n }\n\n Molecule::new(chromosome_to_structure(&chromosome))\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 2, "score": 85061.68139103566 }, { "content": "/// recombine two parents to form a child chromosome\n\n// TODO alter recombination probabilitoes based off of relative fitness\n\n// TODO alter RP iteritively throughout generations\n\n// TODO could send in RP and MP to iter alter thoughout gens\n\npub fn recombine(p1: &Molecule, p2: &Molecule, atoms: &Vec<&str>, num_bonds: u32) -> Molecule {\n\n let mut rng = thread_rng();\n\n let mut rand;\n\n let mut child: Chromosome = Vec::new();\n\n let chrom0 = structure_to_chromosome(&p1.structure);\n\n let chrom1 = structure_to_chromosome(&p2.structure);\n\n let mut runs = 0;\n\n loop {\n\n runs += 1;\n\n if runs > 1000000 {\n\n panic!(\"Recombine stuck in loop\");\n\n }\n\n for i in 0..chrom1.len() {\n\n rand = rng.gen_range(0, 2);\n\n match rand {\n\n 0 => child.push(chrom0[i]),\n\n 1 => child.push(chrom1[i]),\n\n _ => panic!(\"recombine: generated rand out of range\"),\n\n }\n\n }\n", "file_path": "src/atoms.rs", "rank": 3, "score": 80041.2536632778 }, { "content": "/// Returns the maximum number of bonds a given element can have\n\nfn match_element(element: &str) -> u32 {\n\n match element {\n\n \"C\" => 4,\n\n \"O\" => 2,\n\n \"N\" => 3,\n\n \"Cl\" | \"Br\" | \"H\" => 1,\n\n _ => panic!(\"unknown element found\")\n\n }\n\n}\n", "file_path": "src/atoms.rs", "rank": 4, "score": 71982.95125894404 }, { "content": "// Check if a moleculer structure is a connected graph using BFS\n\n// components = 1 implies all vertices/atoms are in a single connected component\n\npub fn connected(structure: &Structure) -> bool {\n\n let mut components = 0;\n\n let len = s_len(structure);\n\n let mut marks: Vec<usize> = vec![0; len];\n\n let mut processing: Vec<usize> = Vec::new();\n\n let mut v;\n\n for i in 0..len {\n\n if marks[i] == 0 {\n\n components += 1;\n\n processing.push(marks[i]);\n\n while !processing.is_empty() {\n\n v = processing.remove(0);\n\n marks[v] += components;\n\n for j in 0..len {\n\n if marks[j] == 0 && structure[[v, j]] > 0 {\n\n processing.push(j);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n components == 1\n\n}\n", "file_path": "src/atoms.rs", "rank": 5, "score": 70631.80033221458 }, { "content": "/// Returns true if there is a peak in the input chemical shift peak data for each carbon in the chemical formula\n\npub fn symmetrical_carbons(chemical_formula: &HashMap<&str, i32>, chemical_peaks: &Vec<f32>) -> bool {\n\n let ncarbons = chemical_formula.get(\"C\").expect(\"No carbons present in formula\");\n\n let length = chemical_peaks.len() as i32;\n\n *ncarbons != length\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 6, "score": 68664.44489430371 }, { "content": "#[test]\n\nfn test_parse_peaks() {\n\n assert_eq!(parse_peaks(\"122.2,73.8, 10.0\"), vec![122.2, 73.8, 10.0]);\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 7, "score": 67295.92459270412 }, { "content": "// checks that each atom in a molecule is not exceeding its maximum possible bonds\n\n// C: 4 | O: 2 | N: 3 | Br: 1 | Cl: 1\n\n// TODO try to enumerate rows and ditch row_index\n\npub fn check_bonds(structure: &Structure, atoms: &Vec<&str>) -> bool {\n\n let mut row_index = 0;\n\n let mut count;\n\n for row in structure.genrows() {\n\n count = row.iter().fold(0, |acc, x| acc + x);\n\n if count > match_element(atoms[row_index]) {\n\n return false;\n\n }\n\n row_index += 1;\n\n }\n\n true\n\n}\n", "file_path": "src/atoms.rs", "rank": 8, "score": 64300.38510022595 }, { "content": "/// Collects input chemical shift peak values into vector\n\npub fn parse_peaks(peaks: &str) -> Vec<f32> {\n\n let mut ret: Vec<f32> = Vec::new();\n\n let buf = peaks.split(',');\n\n for p in buf {\n\n ret.push(p.trim().parse::<f32>().expect(\"peaks incorrectly formatted, expects floats\"));\n\n }\n\n ret.sort_by(|x, y| y.partial_cmp(x).unwrap());\n\n ret\n\n}\n", "file_path": "src/utility.rs", "rank": 9, "score": 62001.88679174093 }, { "content": "#[test]\n\nfn test_assign_carbons() {\n\n let mut test = Molecule::new(chromosome_to_structure(&vec![1,2,1,0,0,0,1,0,0,0]));\n\n let atoms = vec![\"C\",\"C\",\"O\",\"O\",\"N\"];\n\n test.assign_carbons(&atoms);\n\n assert_eq!(test.kind, vec![CarboxylicAcid, CN]);\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 10, "score": 61241.888177930545 }, { "content": "/// Mutates a random single bond in a molecule\n\nfn mutate_child(chromosome: &mut Chromosome, atoms: &Vec<&str>, num_bonds: u32) {\n\n let mut rng = thread_rng();\n\n let l = chromosome.len();\n\n let mut gene_1;\n\n let mut gene_2;\n\n let mut runs= 0;\n\n loop {\n\n runs += 1;\n\n if runs > 10_000 {\n\n break;\n\n }\n\n gene_1 = rng.gen_range(0, l);\n\n gene_2 = rng.gen_range(0, l);\n\n if gene_1 != gene_2 {\n\n if chromosome[gene_1] == 0 { continue; }\n\n chromosome[gene_1] -= 1;\n\n chromosome[gene_2] += 1;\n\n if validate_chromosome(chromosome, atoms, num_bonds) {\n\n break;\n\n } else {\n\n chromosome[gene_1] += 1;\n\n chromosome[gene_2] -= 1;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/atoms.rs", "rank": 11, "score": 60064.12212757417 }, { "content": "/// Returns assigned bonds\n\n/// When building matrices only heavy atoms are assigned, hydrogen is ignored\n\n/// The total bonds are needed to check the final structure has exactly the number of\n\n/// open bonds to fill in with hydrogens\n\n/// Total bonds = (4 * carbon + 2 * oxygen + 3 * nitrogen + hydrogen + halogens) / 2\n\n/// assigned bonds = total bonds - hydrogen\n\npub fn get_bonds(chemical_formula: &HashMap<&str, i32>) -> u32 {\n\n let mut total_bonds = 0;\n\n match chemical_formula.get(\"C\") {\n\n Some(n) => total_bonds += *n as u32 * 4,\n\n None => (),\n\n }\n\n match chemical_formula.get(\"O\") {\n\n Some(n) => total_bonds += *n as u32 * 2,\n\n None => (),\n\n }\n\n match chemical_formula.get(\"N\") {\n\n Some(n) => total_bonds += *n as u32 * 3,\n\n None => (),\n\n }\n\n match chemical_formula.get(\"Cl\") {\n\n Some(n) => total_bonds += *n as u32,\n\n None => (),\n\n }\n\n match chemical_formula.get(\"Br\") {\n\n Some(n) => total_bonds += *n as u32,\n\n None => (),\n\n }\n\n let h = match chemical_formula.get(\"H\") {\n\n Some(n) => *n as u32,\n\n None => 0,\n\n };\n\n total_bonds = (total_bonds + h) / 2;\n\n let assigned_bonds = total_bonds - h;\n\n assigned_bonds\n\n}\n", "file_path": "src/atoms.rs", "rank": 12, "score": 58732.044650983895 }, { "content": "/// Finds the molecule from a given population with the lowest fitness\n\n/// and returns it as the best fit in the generation.\n\npub fn best(population: &Vec<Molecule>) -> Molecule {\n\n let mut min = population[0].fitness;\n\n let mut best = 0;\n\n for i in 1..population.len() {\n\n if population[i].fitness < min {\n\n min = population[i].fitness;\n\n best = i;\n\n }\n\n }\n\n population[best].clone()\n\n}\n", "file_path": "src/atoms.rs", "rank": 13, "score": 58258.72958623864 }, { "content": "fn is_benzene(edges: &Vec<(usize, usize)>, func_groups: &Vec<FunctionalGroup>) -> bool {\n\n if edges.len() != 6 {\n\n return false;\n\n }\n\n for (x, y) in edges {\n\n if func_groups[*x] != Alkene || func_groups[*y] != Alkene {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n", "file_path": "src/atoms.rs", "rank": 14, "score": 55444.74185366287 }, { "content": "/// Retruns the length of the molecule matrix derived from this chromosome\n\n/// Derived from the quadratic equation\n\nfn molecule_len(chromosome: &Chromosome) -> usize {\n\n ((1.0 + (1.0 + 8.0 * chromosome.len() as f32).sqrt()) / 2.0) as usize\n\n}\n", "file_path": "src/atoms.rs", "rank": 15, "score": 54056.31178380272 }, { "content": "/// Correct a chromosome such that it posesses the correct number of bonds\n\nfn correct_child(child: &mut Chromosome, parent0: &Chromosome, parent1: &Chromosome, num_bonds: u32) {\n\n let mut rng = thread_rng();\n\n let mut r;\n\n let len = child.len();\n\n // correct number of bonds\n\n let mut current_bonds: u32 = child.iter().sum();\n\n let mut runs = 0;\n\n while current_bonds != num_bonds {\n\n runs += 1;\n\n if runs > 100_000 {\n\n panic!(\"correct child stuck in loop\");\n\n }\n\n // delete existing bond\n\n if current_bonds > num_bonds {\n\n r = rng.gen_range(0, len);\n\n while child[r] == 0 {\n\n r = rng.gen_range(0, len);\n\n }\n\n current_bonds -= child[r];\n\n child[r] = 0;\n", "file_path": "src/atoms.rs", "rank": 16, "score": 52961.85350241723 }, { "content": "/// Generates new child generation from parent population\n\n/// Keeps best half of parent population and recombines parents to form children\n\n/// Returns child population\n\npub fn generate_children(mut population:Vec<Molecule>,\n\n atoms: &Vec<&str>, num_bonds: u32)\n\n -> Vec<Molecule> {\n\n let mut children: Vec<Molecule> = Vec::new();\n\n let mut rng = thread_rng();\n\n // sort population\n\n population.sort_by(|a, b| (&a.fitness).partial_cmp(&b.fitness).unwrap_or(Less));\n\n // Take top half\n\n population.truncate(POPULATION/2);\n\n // Shuffle parents\n\n rng.shuffle(&mut population);\n\n // Create 2 children per pair\n\n while !population.is_empty() {\n\n let mol1 = population.pop().expect(\"odd number of molecules in population\");\n\n let mol2 = population.pop().expect(\"odd number of molecules in population\");\n\n let child1 = recombine(&mol1, &mol2, atoms, num_bonds);\n\n let child2 = recombine(&mol1, &mol2, atoms, num_bonds);\n\n children.push(child1);\n\n children.push(child2);\n\n children.push(mol1);\n\n children.push(mol2);\n\n }\n\n children\n\n}\n", "file_path": "src/atoms.rs", "rank": 17, "score": 49659.90512664813 }, { "content": "#[test]\n\nfn test_ihd() {\n\n let caffeine: HashMap<&str, i32> = [(\"C\", 8), (\"H\", 10),\n\n (\"N\", 4), (\"O\", 2)].iter().cloned().collect();\n\n assert_eq!(compute_ihd(&caffeine), 6);\n\n let acetic_acid: HashMap<&str, i32> = [(\"C\", 2), (\"H\", 4),\n\n (\"O\", 2)].iter().cloned().collect();\n\n assert_eq!(compute_ihd(&acetic_acid), 1);\n\n}\n", "file_path": "src/utility.rs", "rank": 18, "score": 48082.62808749495 }, { "content": "#[test]\n\nfn test_parse_elemental_analysis() {\n\n let answer: HashMap<&str, i32> = [(\"C\", 4), (\"H\", 4), (\"O\", 2), (\"Cl\", 1), (\"Br\", 2), (\"N\", 2)].iter().cloned().collect();\n\n assert_eq!(parse_elemental_analysis(\"C4H4O2Cl1Br2N2\"), answer);\n\n}\n", "file_path": "src/utility.rs", "rank": 19, "score": 44345.651475548606 }, { "content": "#[test]\n\nfn test_connected() {\n\n // acyclic\n\n assert!(connected(&chromosome_to_structure(&vec![1,0,0,2,1,0])));\n\n // cyclic\n\n assert!(connected(&chromosome_to_structure(&vec![1,0,0,1,1,0,0,1,0,1])));\n\n}\n", "file_path": "src/atoms.rs", "rank": 20, "score": 41467.994529816286 }, { "content": "#[test]\n\nfn test_is_benzene() {\n\n let edges = vec![(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (0, 5)];\n\n let func_groups = vec![Alkene,Alkene,Alkene,Alkene,Alkene,Alkene];\n\n assert!(is_benzene(&edges, &func_groups));\n\n}\n", "file_path": "src/atoms.rs", "rank": 21, "score": 41467.994529816286 }, { "content": "#[test]\n\nfn test_edges() {\n\n assert_eq!(edges(&chromosome_to_structure(&vec![1,1,1])), vec![(0,1), (0,2), (1,2)]);\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 22, "score": 41467.994529816286 }, { "content": "/// Creates vector with individual heavy atoms in C - O - N - Cl - Br order.\n\n/// Hydrogen atoms are not included in this vector\n\n// Rework ugly copy paste code\n\npub fn get_atoms(chemical_formula: &HashMap<&str, i32>) -> Vec<&'static str> {\n\n let mut v: Vec<&str> = Vec::new();\n\n match chemical_formula.get(\"C\") {\n\n Some(n) => { for _ in 0..*n { v.push(\"C\"); }},\n\n None => (),\n\n }\n\n match chemical_formula.get(\"O\") {\n\n Some(n) => { for _ in 0..*n { v.push(\"O\"); }},\n\n None => (),\n\n }\n\n match chemical_formula.get(\"N\") {\n\n Some(n) => { for _ in 0..*n { v.push(\"N\");}},\n\n None => (),\n\n }\n\n match chemical_formula.get(\"Cl\") {\n\n Some(n) => {for _ in 0..*n { v.push(\"Cl\"); }},\n\n None => (),\n\n }\n\n match chemical_formula.get(\"Br\") {\n\n Some(n) => { for _ in 0..*n { v.push(\"Br\"); }},\n\n None => (),\n\n }\n\n v\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 23, "score": 40792.148972546885 }, { "content": "#[test]\n\nfn test_mol_len() {\n\n assert_eq!(molecule_len(&vec![0,0,0,0,0,0,0,0,0,0]), 5);\n\n}\n", "file_path": "src/atoms.rs", "rank": 24, "score": 39835.37391833991 }, { "content": "/// Gets all adjacent carbon nodes from a base node\n\nfn get_adjacent_carbons(\n\n base_node: usize, edges: &Vec<(usize, usize)>, atoms: &Vec<&str>)\n\n-> Vec<usize> {\n\n let mut ret = Vec::new();\n\n for (x, y) in edges {\n\n if *x == base_node && atoms[*y] == \"C\" && !ret.contains(y) {\n\n ret.push(*y);\n\n } else if *y == base_node && atoms[*x] == \"C\" && !ret.contains(x) {\n\n ret.push(*x);\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 25, "score": 39835.37391833991 }, { "content": "#[test]\n\nfn test_build_tree() {\n\n let chrom = vec![1,1,0,0,0,0,0,0\n\n ,0,1,1,0,0,0,0,\n\n 0,0,1,0,0,1,\n\n 0,0,0,0,0,\n\n 0,1,1,0,\n\n 0,0,0,\n\n 0,0,\n\n 0];\n\n let atoms = vec![\"C\",\"C\",\"C\",\"C\",\"C\",\"C\",\"C\",\"O\",\"O\"];\n\n println!(\"{:?}\", chromosome_to_structure(&chrom));\n\n let test = build_tree(0, &chromosome_to_structure(&chrom), &atoms);\n\n let answer = Tree { alpha: vec![1, 2], beta: vec![3, 4, 5],\n\n gamma: vec![6], delta: Vec::new(), epsilon: Vec::new() };\n\n let test2 = build_tree(4, &chromosome_to_structure(&chrom), &atoms);\n\n let answer2 = Tree{ alpha: vec![1, 6], beta: vec![0, 3], gamma: vec![2], delta: vec![5], epsilon: Vec::new()};\n\n assert_eq!(test, answer);\n\n assert_eq!(test2, answer2);\n\n}\n", "file_path": "src/atoms.rs", "rank": 26, "score": 39835.37391833991 }, { "content": "#[test]\n\nfn test_rings_present() {\n\n let chrom1: Chromosome = vec![1,0,0,0,1,0,0,1,1,0];\n\n let test1 = chromosome_to_structure(&chrom1);\n\n let chrom2: Chromosome = vec![1,0,0,1,1,1,1,0,0,0,0,0,0,0,1];\n\n let test2 = chromosome_to_structure(&chrom2);\n\n assert_eq!(rings_present(&test1), None);\n\n assert_eq!(rings_present(&test2), Some(vec![(0,4),(0,5),(4,5)]));\n\n}\n", "file_path": "src/atoms.rs", "rank": 27, "score": 39835.37391833991 }, { "content": "#[test]\n\nfn test_reduce_structure() {\n\n assert_eq!(reduce_structure(&chromosome_to_structure(&vec![2,3,4,2,2,2,2,0,0,0])),\n\n chromosome_to_structure(&vec![1,1,1,1,1,1,1,0,0,0]));\n\n}\n", "file_path": "src/atoms.rs", "rank": 28, "score": 39835.37391833991 }, { "content": "#[test]\n\nfn test_generate_children() {\n\n let atoms = vec![\"C\", \"C\",\"C\",\"C\",\"C\",\"O\",\"Cl\"];\n\n let bonds = 7;\n\n let mut population = Vec::new();\n\n let mut pop = 0;\n\n let mut runs = 0;\n\n let mut con_cnt = 0;\n\n let mut bnd_cnt = 0;\n\n while pop < POPULATION {\n\n let con: bool;\n\n let bnd: bool;\n\n runs += 1;\n\n if runs > 100000 {\n\n println!(\"Failed bonds: {}\", bnd_cnt);\n\n println!(\"Failed connection: {}\", con_cnt);\n\n panic!(\"creation stuck in loop\");\n\n }\n\n let mol = create_test_molecule(&atoms, bonds);\n\n bnd = check_bonds(&mol.structure, &atoms);\n\n con = connected(&mol.structure);\n", "file_path": "src/atoms.rs", "rank": 29, "score": 39835.37391833991 }, { "content": "#[test]\n\nfn test_elucidate_rings() {\n\n let edges = vec![(1,2), (1,3), (2, 3), (5, 6), (5, 8),(6, 7), (7, 8)];\n\n let answer = vec![vec![1, 2, 3], vec![5, 6,7, 8]];\n\n assert_eq!(elucidate_rings(&edges), answer);\n\n}\n", "file_path": "src/atoms.rs", "rank": 30, "score": 39835.37391833991 }, { "content": "#[test]\n\nfn test_check_bonds_good() {\n\n let atoms = vec![\"C\", \"O\", \"Cl\"];\n\n let test = chromosome_to_structure(&vec![2,1,0]);\n\n assert!(check_bonds(&test, &atoms));\n\n\n\n}\n", "file_path": "src/atoms.rs", "rank": 31, "score": 38357.34241121759 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_check_bonds_panic() {\n\n let atoms = vec![\"C\", \"O\", \"Cl\"];\n\n let test = chromosome_to_structure(&vec![2,1,1]);\n\n assert!(check_bonds(&test, &atoms));\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 32, "score": 38357.34241121759 }, { "content": "/// Returns the length of a Structure.\n\n/// As all Structures are square matrices either dimension can be returned\n\nfn s_len(s: &Structure) -> usize {\n\n s.dim().0\n\n}\n", "file_path": "src/atoms.rs", "rank": 33, "score": 37896.01976671112 }, { "content": "/// Reduce moleculer structure to eliminate double/triple bonds\n\nfn reduce_structure(structure: &Structure) -> Structure {\n\n let len = s_len(structure);\n\n let mut reduced = Structure::zeros((len, len));\n\n for i in 0..len {\n\n for j in 0..len {\n\n if structure[[i,j]] > 0 {\n\n reduced[[i, j]] = 1;\n\n }\n\n }\n\n }\n\n reduced\n\n}\n", "file_path": "src/atoms.rs", "rank": 34, "score": 35076.0154973668 }, { "content": "// PROBABLY DONT NEED THIS\n\n// Computes the index of gen deficiency to find level of unsaturation\n\n// 1 degree of unsaturation = 1 ring or double bond in final structure\n\n// Higher IHD's are more computationally intensive\n\n// IHD = (2C + 2 + N - H - X) / 2 where X is halogens\n\npub fn compute_ihd(elements: &HashMap<&str, i32>) -> i32 {\n\n let mut ihd: i32 = 2;\n\n for (k, v) in elements.iter() {\n\n match k.as_ref() {\n\n \"C\" => ihd += 2 * v,\n\n \"H\" => ihd -= v,\n\n \"N\" => ihd += v,\n\n \"O\" => continue,\n\n \"Cl\" => ihd -= v,\n\n \"Br\" => ihd -= v,\n\n _ => panic!(\"Unrecognized element in chemical formula\"),\n\n }\n\n }\n\n ihd/2\n\n}\n", "file_path": "src/utility.rs", "rank": 35, "score": 34340.642850026416 }, { "content": "/// Builds a tree of attached carbon atoms for each layer of chemical shift search\n\n/// atoms refered to by index in atoms vec\n\nfn build_tree(start: usize, structure: &Structure, atoms: &Vec<&str>)\n\n-> Tree {\n\n let mut edges = edges(&structure);\n\n // START component vectors\n\n let alpha: Vec<usize>;\n\n let mut beta: Vec<usize> = Vec::new();\n\n let mut gamma: Vec<usize> = Vec::new();\n\n let mut delta: Vec<usize> = Vec::new();\n\n let mut epsilon: Vec<usize> = Vec::new();\n\n // END component vectors\n\n\n\n alpha = get_adjacent_carbons(start, &edges, atoms);\n\n // pull out any edge that contains the root\n\n edges.retain(|(x, y)| *x != start && *y != start);\n\n // beta layer\n\n for node in alpha.iter() {\n\n let mut temp = get_adjacent_carbons(*node, &edges, atoms);\n\n beta.append(&mut temp);\n\n edges.retain(|(x, y)| x != node && y != node);\n\n }\n", "file_path": "src/atoms.rs", "rank": 36, "score": 34132.92411494664 }, { "content": "// creates a chromosome vector from a molecule matrix\n\npub fn structure_to_chromosome(structure: &Structure) -> Chromosome {\n\n let mut chromosome = Chromosome::new();\n\n let len = s_len(structure);\n\n for i in 0..len {\n\n for j in 0..len {\n\n if j <= i { continue; }\n\n chromosome.push(structure[[i, j]]);\n\n }\n\n }\n\n chromosome\n\n}\n", "file_path": "src/atoms.rs", "rank": 37, "score": 33382.04751208359 }, { "content": "/// Transforms a chromsome into its matrix representation\n\npub fn chromosome_to_structure(chrom: &Chromosome) -> Structure {\n\n let length = molecule_len(chrom);\n\n let mut structure = Structure::zeros((length, length));\n\n let mut vec_index = 0;\n\n for i in 0..length {\n\n for j in 0..length {\n\n if j <= i { continue; }\n\n structure[[i, j]] = chrom[vec_index];\n\n structure[[j, i]] = chrom[vec_index];\n\n vec_index += 1;\n\n }\n\n }\n\n structure\n\n}\n", "file_path": "src/atoms.rs", "rank": 38, "score": 33379.47635067287 }, { "content": "// Parses elemental analysis into a HashMap for easy access\n\n// to the elements of the chemical formula\n\n// Requires explicit counting for all atoms in formula, i.e. C2H6O1 not C2H6O\n\npub fn parse_elemental_analysis(formula: &str) -> HashMap<&str, i32> {\n\n let mut chemical_formula = HashMap::new();\n\n let valid_elements: HashSet<_> = [\"C\", \"H\", \"O\", \"N\", \"Cl\", \"Br\"].iter().cloned().collect();\n\n let mut elements: Vec<&str> = formula.split(char::is_numeric).collect();\n\n elements.retain(|e| e != &\"\");\n\n for e in &elements {\n\n if !valid_elements.contains(e) {\n\n panic!(\"Invalid symbol in chemical formula\")\n\n }\n\n }\n\n let mut quantities: Vec<&str> = formula.split(char::is_alphabetic).collect();\n\n quantities.retain(|e| e != &\"\");\n\n if elements.len() != quantities.len() {\n\n panic!(\"elements and quantities do not match. Invalid chemical formula\");\n\n }\n\n for (elem, quant) in elements.iter().zip(quantities.iter()) {\n\n chemical_formula.insert(\n\n *elem,\n\n quant.parse::<i32>().expect(\"Not a number\"),\n\n );\n\n }\n\n chemical_formula\n\n}\n", "file_path": "src/utility.rs", "rank": 39, "score": 33277.15380826524 }, { "content": "/// Transforms an adjacency matrix into an edge list\n\n/// The lower triangle is excluded as it is a mirror of the upper triangle\n\nfn edges(structure: &Structure) -> (Vec<(usize, usize)>) {\n\n let mut ret: Vec<(usize, usize)> = Vec::new();\n\n let len = s_len(structure);\n\n for i in 0..len {\n\n for j in 0.. len {\n\n // only grab edges in the upper triangle\n\n if j <= i { continue; }\n\n for _ in 0..structure[[i, j]] {\n\n ret.push((i, j));\n\n }\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "src/atoms.rs", "rank": 40, "score": 32974.28917104891 }, { "content": "/// Detects if rings are present in molecule\n\n/// Returns vector containing edges in ring if cycle is present\n\n/// Returns None if no rings(cycles) are presen\n\n/// Nodes contains nodes that are still possibilities for existing in a ring\n\n/// Singletons contains nodes that are not in a ring\n\n// BUG this doesn't actually work for how rings_present returns values\n\n// TODO need to find hamiltonian paths within cycle\n\nfn rings_present(structure: &Structure) -> Option<Vec<(usize, usize)>> {\n\n let len = s_len(structure);\n\n let mut nodes: Vec<usize> = Vec::new();\n\n for i in 0..len { // TODO find better way to initialize this\n\n nodes.push(i as usize);\n\n }\n\n let mut degree: Vec<usize> = vec![0; len];\n\n let mut singletons: Vec<usize> = Vec::new();\n\n let mut edge_list = edges(&structure);\n\n\n\n while !nodes.is_empty() {\n\n //find degree of each nodes\n\n for (node1, node2) in edge_list.iter() {\n\n degree[*node1] += 1;\n\n degree[*node2] += 1;\n\n }\n\n if !degree.contains(&1) {\n\n return Some(edge_list);\n\n }\n\n // Add nodes with degree of 1 to singtons vec, theyre not in the ring\n", "file_path": "src/atoms.rs", "rank": 41, "score": 30457.06720913072 }, { "content": "/// Finds the number of rings within a molcule and elucidates specific members of each ring\n\nfn elucidate_rings(edges: &Vec<(usize, usize)>) -> Vec<Vec<usize>> {\n\n let mut components = 0;\n\n let mut nodes = Vec::new();\n\n let mut ret = Vec::new();\n\n for (a, b) in edges.iter() {\n\n nodes.push(*a);\n\n nodes.push(*b);\n\n }\n\n nodes.sort();\n\n nodes.dedup();\n\n\n\n let len = nodes.len();\n\n let mut marks: Vec<usize> = vec![0; len];\n\n let mut processing: Vec<usize> = Vec::new();\n\n let mut processed = HashSet::new();\n\n let mut v;\n\n for i in 0..len {\n\n if marks[i] == 0 {\n\n components += 1;\n\n processing.push(nodes[i]);\n", "file_path": "src/atoms.rs", "rank": 42, "score": 29184.02945124207 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\n\n\n// Parses elemental analysis into a HashMap for easy access\n\n// to the elements of the chemical formula\n\n// Requires explicit counting for all atoms in formula, i.e. C2H6O1 not C2H6O\n", "file_path": "src/utility.rs", "rank": 43, "score": 24433.482999905034 }, { "content": "extern crate ndarray;\n\nextern crate rand;\n\nuse atoms::rand::prelude::*;\n\nuse atoms::ndarray::prelude::*;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::cmp::Ordering::Less;\n\npub const POPULATION: usize = 64; // POPULATION must be even\n\npub const MUTATION_PROBABILITY: f64 = 0.10;\n\npub type Structure = Array2<u32>;\n\npub type Chromosome = Vec<u32>;\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum FunctionalGroup {\n\n CH3,\n\n CH2,\n\n CH,\n\n C,\n\n CCl,\n\n CBr,\n", "file_path": "src/atoms.rs", "rank": 45, "score": 17049.303392826034 }, { "content": " }\n\n // Calculates fitness of a molecule by taking RMSD of chemical shifts of carbon atoms\n\n // fitness = sqrt( 1/N sum((chem_shift_calc - chem_shift_exp)^2))\n\n // Input: chemical peaks data\n\n pub fn fitness(&mut self, experimental: &Vec<f32>) {\n\n assert_eq!(self.chemical_shifts.len(), experimental.len(), \"Peaks and chemical shifts not aligned.\");\n\n let zipped = self.chemical_shifts.iter().zip(experimental.iter());\n\n self.fitness = (1.0/experimental.len() as f32) * (zipped.fold(0.0, |acc, (calc, exp)| acc + (calc-exp).powi(2))).sqrt();\n\n }\n\n /// Assigns functional groups to each carbon.\n\n // this needs to be broken up and cleaned up\n\n pub fn assign_carbons(&mut self, atoms: &Vec<&str>) {\n\n let num_carbons = atoms.iter().filter(|&c| *c == \"C\").count();\n\n let edge_list = edges(&self.structure);\n\n for index in 0..num_carbons {\n\n let mut primary_edges = HashSet::new();\n\n let mut hydrogen_count;\n\n let mut dupes = HashSet::new();\n\n let mut alcohol = false;\n\n // get primary edges\n", "file_path": "src/atoms.rs", "rank": 46, "score": 17049.151417470915 }, { "content": " panic!(\"No molecular assignment made\");\n\n }\n\n }\n\n /// Computes chemical shift of each carbon in the structure\n\n /// See README for details about implemtation\n\n pub fn compute_shifts(&mut self, atoms: &Vec<&str>) {\n\n self.chemical_shifts.clear();\n\n let steric_corrects = arr2(&[\n\n [0.0, 0.0, -1.1, -3.4],\n\n [0.0, 0.0, -2.5, -7.5],\n\n [0.0, -3.7, -8.5, -10.0],\n\n [0.0, -8.4, -10.0, -12.5]]);\n\n let num_carbons = atoms.iter().filter(|&c| *c == \"C\").count();\n\n let edges = edges(&self.structure);\n\n // START delta-C assignment\n\n for i in 0..num_carbons {\n\n let mut shift = 0.0;\n\n let mut linear = true;\n\n let mut alkane = false;\n\n let mut aromatic = false;\n", "file_path": "src/atoms.rs", "rank": 47, "score": 17048.404324313862 }, { "content": " // Linear and branched alkane\n\n if linear {\n\n let tree = build_tree(i, &self.structure, atoms);\n\n shift = -2.3\n\n + 9.1 * tree.alpha.len() as f32\n\n + 9.4 * tree.beta.len() as f32\n\n - 2.5 * tree.gamma.len() as f32\n\n + 0.3 * tree.delta.len() as f32\n\n + 0.1 * tree.epsilon.len() as f32;\n\n let observed = tree.alpha.len();\n\n // adjust based on steric corrections\n\n for node in tree.alpha {\n\n let degree = get_adjacent_carbons(node, &edges, atoms).len();\n\n shift += steric_corrects[[observed-1, degree-1]];\n\n }\n\n // Add substiuent effects\n\n }\n\n // linear and branched alkenes\n\n // aromatic rings\n\n self.chemical_shifts.push(shift);\n\n }\n\n // END delta-C assignment\n\n }\n\n // END impls for Molecule\n\n}\n\n#[test]\n", "file_path": "src/atoms.rs", "rank": 48, "score": 17047.833779197317 }, { "content": " pub kind: Vec<FunctionalGroup>,\n\n pub chemical_shifts: Vec<f32>,\n\n pub fitness: f32,\n\n}\n\n#[derive(Debug, PartialEq)]\n\npub struct Tree {\n\n alpha: Vec<usize>,\n\n beta: Vec<usize>,\n\n gamma: Vec<usize>,\n\n delta: Vec<usize>,\n\n epsilon: Vec<usize>,\n\n}\n\n\n\n// fitness init is ugly\n\nimpl Molecule {\n\n /// Creates new Molecule from a given structure. Kind and chemical shift vectors are initialized empty\n\n pub fn new(structure: Structure) -> Self {\n\n let kind: Vec<FunctionalGroup> = Vec::new();\n\n let chemical_shifts: Vec<f32> = Vec::new();\n\n Molecule {structure, kind, chemical_shifts, fitness: -999.9}\n", "file_path": "src/atoms.rs", "rank": 49, "score": 17047.436285681037 }, { "content": " correct_child(&mut child, &chrom0, &chrom1, num_bonds);\n\n if rng.gen_bool(MUTATION_PROBABILITY) {\n\n mutate_child(&mut child, atoms, num_bonds)\n\n }\n\n if validate_chromosome(&child, atoms, num_bonds) {\n\n break;\n\n }\n\n child.clear();\n\n }\n\n Molecule::new(chromosome_to_structure(&child))\n\n}\n", "file_path": "src/atoms.rs", "rank": 50, "score": 17047.43158643531 }, { "content": " CN,\n\n CO,\n\n Alkene,\n\n Alkyne,\n\n // Aromatic, -- Need better ring detection and maybe huckels rule fn to add this\n\n Amide,\n\n Imine,\n\n CarboxylicAcid,\n\n Ester,\n\n Aldehyde,\n\n Ketone,\n\n AcylChloride,\n\n AcylBromide,\n\n Cyanide,\n\n}\n\nuse FunctionalGroup::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Molecule {\n\n pub structure: Structure,\n", "file_path": "src/atoms.rs", "rank": 51, "score": 17046.77046288895 }, { "content": " 2 => self.kind.push(CH2),\n\n 1 => self.kind.push(CH),\n\n 0 => self.kind.push(C),\n\n _ => panic!(\"assign_bonds: Too many hydrogens!\"),\n\n }\n\n continue;\n\n }\n\n if secondary_atoms.contains(&\"Cl\") {\n\n self.kind.push(CCl);\n\n continue;\n\n } else if secondary_atoms.contains(&\"Br\") {\n\n self.kind.push(CBr);\n\n continue;\n\n } else if secondary_atoms.contains(&\"O\") {\n\n self.kind.push(CO);\n\n continue;\n\n } else if secondary_atoms.contains(&\"N\") {\n\n self.kind.push(CN);\n\n continue;\n\n }\n", "file_path": "src/atoms.rs", "rank": 52, "score": 17046.04019535281 }, { "content": " for (n1, n2) in edge_list.iter() {\n\n if *n1 == index {\n\n if !primary_edges.insert((*n1, *n2)) {\n\n dupes.insert(*n2);\n\n }\n\n } else if *n2 == index {\n\n if !primary_edges.insert((*n2, *n1)) {\n\n dupes.insert(*n1);\n\n }\n\n }\n\n }\n\n // remove anything in dupes vec from primary edges, should only contain single bonded atoms\n\n primary_edges.retain(|&(_x, y)| !dupes.contains(&y));\n\n hydrogen_count = 4 - primary_edges.len() - (dupes.len() * 2);\n\n let mut secondary_atoms = Vec::new();\n\n for (a, b) in primary_edges.iter() {\n\n secondary_atoms.push(atoms[*b]);\n\n if atoms[*b] == \"O\" {\n\n alcohol = edge_list.iter().filter(|(x, y)| (x != a && y == b) || (x == b && y != a)).count() == 0;\n\n }\n", "file_path": "src/atoms.rs", "rank": 53, "score": 17044.7898834594 }, { "content": " // match carbonyl\n\n if carbonyl {\n\n // carboxylic acid or ester or amide\n\n if alcohol {\n\n self.kind.push(CarboxylicAcid);\n\n continue;\n\n }\n\n for (_a, b) in primary_edges.iter() {\n\n if atoms[*b] == \"O\" {\n\n self.kind.push(Ester);\n\n continue;\n\n } else if atoms[*b] == \"N\" {\n\n self.kind.push(Amide);\n\n continue;\n\n }\n\n }\n\n // ketone or aldehyde or acyl halide\n\n if hydrogen_count == 0 {\n\n for (_a, b) in primary_edges.iter() {\n\n if atoms[*b] == \"Cl\" {\n", "file_path": "src/atoms.rs", "rank": 54, "score": 17044.758040040615 }, { "content": " }\n\n // assign if double bonds present\n\n if !dupes.is_empty() {\n\n let mut carbonyl = false;\n\n let mut alkene = false;\n\n let mut alkyne = false;\n\n let mut imine = false;\n\n let mut cyanide = false;\n\n for d in dupes.iter() {\n\n match atoms[*d] {\n\n \"O\" => carbonyl = true,\n\n \"N\" => match imine {\n\n true => cyanide = true,\n\n false => imine = true,\n\n }\n\n \"C\" => match alkene {\n\n true => alkyne = true,\n\n false => alkene = true,\n\n }\n\n _ => panic!(\"unexpected element in duplicate matching\"),\n", "file_path": "src/atoms.rs", "rank": 55, "score": 17044.738620558917 }, { "content": " if !bnd {\n\n bnd_cnt += 1;\n\n }\n\n if !con {\n\n con_cnt += 1;\n\n }\n\n if bnd && con {\n\n population.push(mol);\n\n pop += 1;\n\n }\n\n }\n\n let new_pop = generate_children(population, &atoms, bonds);\n\n assert_eq!(new_pop.len(), POPULATION);\n\n for p in new_pop {\n\n assert!(connected(&p.structure));\n\n assert!(check_bonds(&p.structure, &atoms));\n\n }\n\n}\n\n\n", "file_path": "src/atoms.rs", "rank": 56, "score": 17044.467934141638 }, { "content": " self.kind.push(AcylChloride);\n\n continue;\n\n } else if atoms[*b] == \"Br\" {\n\n self.kind.push(AcylBromide);\n\n continue;\n\n }\n\n }\n\n self.kind.push(Ketone);\n\n continue;\n\n } else {\n\n self.kind.push(Aldehyde);\n\n continue;\n\n }\n\n }\n\n }\n\n // if only single bonds present\n\n // only bonded to carbons\n\n if secondary_atoms.iter().filter(|a| *a != &\"C\").count() == 0 {\n\n match hydrogen_count {\n\n 3 => self.kind.push(CH3),\n", "file_path": "src/atoms.rs", "rank": 57, "score": 17044.42427997842 }, { "content": " // gamma layer\n\n for node in beta.iter() {\n\n let mut temp = get_adjacent_carbons(*node, &edges, atoms);\n\n gamma.append(&mut temp);\n\n edges.retain(|(x, y)| x != node && y != node);\n\n }\n\n // delta level\n\n for node in gamma.iter() {\n\n let mut temp = get_adjacent_carbons(*node, &edges, atoms);\n\n delta.append(&mut temp);\n\n edges.retain(|(x, y)| x != node && y != node);\n\n }\n\n //epsilon level\n\n for node in delta.iter() {\n\n let mut temp = get_adjacent_carbons(*node, &edges, atoms);\n\n epsilon.append(&mut temp);\n\n edges.retain(|(x, y)| x != node && y != node);\n\n }\n\n\n\n Tree { alpha, beta, gamma, delta, epsilon }\n\n}\n", "file_path": "src/atoms.rs", "rank": 58, "score": 17044.29179864035 }, { "content": " }\n\n // add a bond from parent chromosome to child\n\n else if current_bonds < num_bonds {\n\n match rng.gen_range(0, 2) {\n\n 0 => {\n\n r = rng.gen_range(0, len);\n\n while parent0[r] <= child[r] {\n\n r = rng.gen_range(0, len);\n\n }\n\n current_bonds += parent0[r] - child[r];\n\n child[r] = parent0[r];\n\n }\n\n 1=> {\n\n r = rng.gen_range(0, len);\n\n while parent1[r] <= child[r] {\n\n r = rng.gen_range(0, len);\n\n }\n\n current_bonds += parent1[r] - child[r];\n\n child[r] = parent1[r];\n\n }\n\n _ => panic!(\"this isn't a thing\"),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/atoms.rs", "rank": 59, "score": 17042.486484512567 }, { "content": " }\n\n }\n\n // match triple bonds\n\n if cyanide {\n\n self.kind.push(Cyanide);\n\n continue;\n\n }\n\n if alkyne {\n\n self.kind.push(Alkyne);\n\n continue;\n\n }\n\n // match imine - could go more specific if needed\n\n if imine {\n\n self.kind.push(Imine);\n\n continue;\n\n }\n\n if alkene {\n\n self.kind.push(Alkene);\n\n continue;\n\n }\n", "file_path": "src/atoms.rs", "rank": 60, "score": 17042.486484512567 }, { "content": " while !processing.is_empty() {\n\n v = processing.pop().unwrap();\n\n if processed.contains(&v) {\n\n continue;\n\n }\n\n processed.insert(v);\n\n marks[nodes.iter().position(|&x| x == v).unwrap()] += components;\n\n //find adjacent edges\n\n for (n1, n2) in edges.iter() {\n\n if *n1 == v && !processed.contains(n2) {\n\n processing.push(*n2);\n\n } else if *n2 == v && !processed.contains(n1) {\n\n processing.push(*n1);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n for i in 1..components+1 {\n\n let mut temp = Vec::new();\n\n for index in 0..len {\n\n if marks[index] == i {\n\n temp.push(nodes[index]);\n\n }\n\n }\n\n ret.push(temp);\n\n }\n\n ret\n\n}\n", "file_path": "src/atoms.rs", "rank": 61, "score": 17042.486484512567 }, { "content": " for d in degree.iter().enumerate() { // TODO - can probably make this for loop to an iter\n\n if *d.1 == 1 {\n\n singletons.push(d.0);\n\n }\n\n }\n\n nodes.retain(|node1| !singletons.contains(node1));\n\n edge_list.retain(|(node1,node2)| !singletons.contains(node1) && !singletons.contains(node2));\n\n degree = vec![0; len];\n\n }\n\n None\n\n}\n", "file_path": "src/atoms.rs", "rank": 62, "score": 17042.486484512567 }, { "content": "# Molecule\n\nA C13 NMR interpreter written in Rust. From an input file containing a chemical formula and chemical shifts a genetic algorithm is used to find the interpreted structure.\n\n\n\nInspired by the Genius genetic algorithim. \n\n[Genius: A genetic algorithm for automated structure elucidation from (super13)C NMR specrta. Jens Meiler, Will Martin.](https://pubs.acs.org/doi/abs/10.1021/ja0109388)\n\n\n\n\n\n## Getting Started\n\nMolecule uses the [standard cargo building process](https://doc.rust-lang.org/cargo/guide/working-on-an-existing-project.html).\n\n```sh\n\ngit clone https://github.com/jeff-lund/Molecule.git\n\ncd Molecule\n\ncargo build\n\n```\n\n\n\nMolecule is run from the command line on a file. Samples files are included in the\n\n`test_files` directory.\n\n```\n\ncargo run test_files/short/acetic_acid.txt\n\n```\n\n\n\nFile Input is expected to be two lines. The first containing the chemical formula and the second csv floats for each peak in the C13 spectra including symmetrical peaks.\n\n```\n\nC2H4O2\n\n162.0, 51.0\n\n```\n\n\n\nCurrently chemical formulas must explicitly list the number for each element, C2H4O\n\nwill not recognize the lone Oxygen.\n\nElements are limited to C, H, O, N, Cl, Br\n\n\n\nUnit tests are run with `Cargo test`\n\n\n\nThe output best fit molecule is represented as an adjacency matrix\n\nwith the row/columns matched with the element in the `atoms` array at the corresponding index. \n\nHydrogens are filled in where appropriate and not explicitly referred to in the adjacency matrix. \n\nFor Acetic Acid `C2H4O2`: \n\n| |C(0) |C(1) |O(2) |O(3) | \n\n|----|:---:|:---:|:---:|----:| \n\n|C(0)| 0 | 1 | 2 | 1 | \n\n|C(1)| 1 | 0 | 0 | 0 | \n\n|O(2)| 2 | 0 | 0 | 0 | \n\n|O(3)| 1 | 0 | 0 | 0 | \n\n\n\nMolecular Representation:\n\n```\n\n H O(2)\n\n | ||\n\nH--C(1)--C(0)--O(3)--H\n\n | \n\n H\n\n```\n\n\n", "file_path": "README.md", "rank": 63, "score": 14935.32820923775 }, { "content": "## Chemical Shift Calculation\n\nFor a molecule chain ...C&#949;-C&#948;-C&#947;-C&#946;-C&#945;-**C**-C&#945;-C&#946;-C&#947;-C&#948;-C&#949;... \n\nThe chemical shift of **C** for alkanes: &#948;C = -2.3 + 9.1&#945; + 9.4&#946; - 2.5&#947; + 0.3&#948; + 0.1&#949; + &#931; (steric corrections) ppm\n\n\n\n### Steric corrections\n\n| Carbon Atom Observed | Primary | Secondary | Tertiary | Quaternary |\n\n|----------------------|:-------:|:---------:|:--------:|-----------:|\n\n| Primary | 0 | 0 | -1.1 | -3.4 |\n\n| Secondary | 0 | 0 | -2.5 | -7.5 |\n\n| Tertiary | 0 | -3.7 | -8.5 | -10.0 |\n\n| Quaternary | 0 | -8.4 | -10.0 | -12.5 |\n\n\n\n&#948;C is further affected by substituent effects too numerous to list here. \n\nMore information with more tables than anyone could possibly hope for [here](https://www.chem.wisc.edu/areas/reich/nmr/c13-data/cdata.htm)\n\n\n\nReference: Introduction to Spectroscopy 4th ed, Pavia, Lampman, Kriz, Vyvyan. Appendix 8.\n\n\n\n\n\n\n\nThis program is licensed under the \"MIT License\". Please see the file LICENSE in\n\nthe source distribution of this software for license terms.\n\n\n\nCopyright (c) 2018 Jeff Lund\n", "file_path": "README.md", "rank": 64, "score": 14933.743123905435 } ]
Rust
src/hotkey_config.rs
Ynscription/livesplit-core
1027a92faaf8eeb14ce8be8a32ce8d3f202bc664
#![allow(clippy::trivially_copy_pass_by_ref)] use crate::hotkey::KeyCode; use crate::platform::prelude::*; use crate::settings::{Field, SettingsDescription, Value}; use serde::{Deserialize, Serialize}; #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[serde(default)] pub struct HotkeyConfig { pub split: Option<KeyCode>, pub reset: Option<KeyCode>, pub undo: Option<KeyCode>, pub skip: Option<KeyCode>, pub pause: Option<KeyCode>, pub undo_all_pauses: Option<KeyCode>, pub previous_comparison: Option<KeyCode>, pub next_comparison: Option<KeyCode>, pub toggle_timing_method: Option<KeyCode>, } #[cfg(any(windows, target_os = "linux"))] impl Default for HotkeyConfig { fn default() -> Self { use crate::hotkey::KeyCode::*; Self { split: Some(NumPad1), reset: Some(NumPad3), undo: Some(NumPad8), skip: Some(NumPad2), pause: Some(NumPad5), undo_all_pauses: None, previous_comparison: Some(NumPad4), next_comparison: Some(NumPad6), toggle_timing_method: None, } } } #[cfg(all(target_arch = "wasm32", target_os = "unknown"))] impl Default for HotkeyConfig { fn default() -> Self { use crate::hotkey::KeyCode::*; Self { split: Some(Numpad1), reset: Some(Numpad3), undo: Some(Numpad8), skip: Some(Numpad2), pause: Some(Numpad5), undo_all_pauses: None, previous_comparison: Some(Numpad4), next_comparison: Some(Numpad6), toggle_timing_method: None, } } } #[cfg(not(any( windows, target_os = "linux", all(target_arch = "wasm32", target_os = "unknown"), )))] impl Default for HotkeyConfig { fn default() -> Self { Self { split: Some(KeyCode), reset: Some(KeyCode), undo: Some(KeyCode), skip: Some(KeyCode), pause: Some(KeyCode), undo_all_pauses: None, previous_comparison: Some(KeyCode), next_comparison: Some(KeyCode), toggle_timing_method: None, } } } impl HotkeyConfig { pub fn settings_description(&self) -> SettingsDescription { SettingsDescription::with_fields(vec![ Field::new("Start / Split".into(), self.split.into()), Field::new("Reset".into(), self.reset.into()), Field::new("Undo Split".into(), self.undo.into()), Field::new("Skip Split".into(), self.skip.into()), Field::new("Pause".into(), self.pause.into()), Field::new("Undo All Pauses".into(), self.undo_all_pauses.into()), Field::new( "Previous Comparison".into(), self.previous_comparison.into(), ), Field::new("Next Comparison".into(), self.next_comparison.into()), Field::new( "Toggle Timing Method".into(), self.toggle_timing_method.into(), ), ]) } pub fn set_value(&mut self, index: usize, value: Value) -> Result<(), ()> { let value: Option<KeyCode> = value.into(); if value.is_some() { let any = [ self.split, self.reset, self.undo, self.skip, self.pause, self.undo_all_pauses, self.previous_comparison, self.next_comparison, self.toggle_timing_method, ] .iter() .enumerate() .filter(|&(i, _)| i != index) .any(|(_, &v)| v == value); if any { return Err(()); } } match index { 0 => self.split = value, 1 => self.reset = value, 2 => self.undo = value, 3 => self.skip = value, 4 => self.pause = value, 5 => self.undo_all_pauses = value, 6 => self.previous_comparison = value, 7 => self.next_comparison = value, 8 => self.toggle_timing_method = value, _ => panic!("Unsupported Setting Index"), } Ok(()) } #[cfg(feature = "std")] pub fn from_json<R>(reader: R) -> serde_json::Result<Self> where R: std::io::Read, { serde_json::from_reader(reader) } #[cfg(feature = "std")] pub fn write_json<W>(&self, writer: W) -> serde_json::Result<()> where W: std::io::Write, { serde_json::to_writer(writer, self) } }
#![allow(clippy::trivially_copy_pass_by_ref)] use crate::hotkey::KeyCode; use crate::platform::prelude::*; use crate::settings::{Field, SettingsDescription, Value}; use serde::{Deserialize, Serialize}; #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[serde(default)] pub struct HotkeyConfig { pub split: Option<KeyCode>, pub reset: Option<KeyCode>, pub undo: Option<KeyCode>, pub skip: Option<KeyCode>, pub pause: Option<KeyCode>, pub undo_all_pauses: Option<KeyCode>, pub previous_comparison: Option<KeyCode>, pub next_comparison: Option<KeyCode>, pub toggle_timing_method: Option<KeyCode>, } #[cfg(any(windows, target_os = "linux"))] impl Default for HotkeyConfig { fn default() -> Self { use crate::hotkey::KeyCode::*; Self { split: Some(NumPad1), reset: Some(NumPad3), undo: Some(NumPad8), skip: Some
on = value, 8 => self.toggle_timing_method = value, _ => panic!("Unsupported Setting Index"), } Ok(()) } #[cfg(feature = "std")] pub fn from_json<R>(reader: R) -> serde_json::Result<Self> where R: std::io::Read, { serde_json::from_reader(reader) } #[cfg(feature = "std")] pub fn write_json<W>(&self, writer: W) -> serde_json::Result<()> where W: std::io::Write, { serde_json::to_writer(writer, self) } }
(NumPad2), pause: Some(NumPad5), undo_all_pauses: None, previous_comparison: Some(NumPad4), next_comparison: Some(NumPad6), toggle_timing_method: None, } } } #[cfg(all(target_arch = "wasm32", target_os = "unknown"))] impl Default for HotkeyConfig { fn default() -> Self { use crate::hotkey::KeyCode::*; Self { split: Some(Numpad1), reset: Some(Numpad3), undo: Some(Numpad8), skip: Some(Numpad2), pause: Some(Numpad5), undo_all_pauses: None, previous_comparison: Some(Numpad4), next_comparison: Some(Numpad6), toggle_timing_method: None, } } } #[cfg(not(any( windows, target_os = "linux", all(target_arch = "wasm32", target_os = "unknown"), )))] impl Default for HotkeyConfig { fn default() -> Self { Self { split: Some(KeyCode), reset: Some(KeyCode), undo: Some(KeyCode), skip: Some(KeyCode), pause: Some(KeyCode), undo_all_pauses: None, previous_comparison: Some(KeyCode), next_comparison: Some(KeyCode), toggle_timing_method: None, } } } impl HotkeyConfig { pub fn settings_description(&self) -> SettingsDescription { SettingsDescription::with_fields(vec![ Field::new("Start / Split".into(), self.split.into()), Field::new("Reset".into(), self.reset.into()), Field::new("Undo Split".into(), self.undo.into()), Field::new("Skip Split".into(), self.skip.into()), Field::new("Pause".into(), self.pause.into()), Field::new("Undo All Pauses".into(), self.undo_all_pauses.into()), Field::new( "Previous Comparison".into(), self.previous_comparison.into(), ), Field::new("Next Comparison".into(), self.next_comparison.into()), Field::new( "Toggle Timing Method".into(), self.toggle_timing_method.into(), ), ]) } pub fn set_value(&mut self, index: usize, value: Value) -> Result<(), ()> { let value: Option<KeyCode> = value.into(); if value.is_some() { let any = [ self.split, self.reset, self.undo, self.skip, self.pause, self.undo_all_pauses, self.previous_comparison, self.next_comparison, self.toggle_timing_method, ] .iter() .enumerate() .filter(|&(i, _)| i != index) .any(|(_, &v)| v == value); if any { return Err(()); } } match index { 0 => self.split = value, 1 => self.reset = value, 2 => self.undo = value, 3 => self.skip = value, 4 => self.pause = value, 5 => self.undo_all_pauses = value, 6 => self.previous_comparison = value, 7 => self.next_comparis
random
[ { "content": "/// Registers a clock as the global handler for providing the high precision\n\n/// time stamps on a `no_std` target.\n\npub fn register_clock(clock: impl Clock) {\n\n let clock: Box<dyn Clock> = Box::new(clock);\n\n let clock = Box::new(clock);\n\n // FIXME: This isn't entirely clean as this should really be\n\n // compare_and_swap, but we can't do that on every platform.\n\n if !CLOCK.load(atomic::Ordering::SeqCst).is_null() {\n\n panic!(\"The clock has already been registered\");\n\n }\n\n CLOCK.store(Box::into_raw(clock), atomic::Ordering::SeqCst);\n\n}\n\n\n\n#[derive(Copy, Clone, PartialOrd, PartialEq, Ord, Eq, Debug)]\n\npub struct Instant(Duration);\n\n\n\nimpl Instant {\n\n /// Accesses the current point in time.\n\n pub fn now() -> Self {\n\n let clock = CLOCK.load(atomic::Ordering::SeqCst);\n\n if clock.is_null() {\n\n panic!(\"No clock registered\");\n", "file_path": "src/platform/no_std/time.rs", "rank": 0, "score": 196075.33485758526 }, { "content": "pub fn state(\n\n column: &ColumnSettings,\n\n timer: &Timer,\n\n layout_settings: &GeneralLayoutSettings,\n\n segment: &Segment,\n\n segment_index: usize,\n\n current_split: Option<usize>,\n\n method: TimingMethod,\n\n) -> ColumnState {\n\n let method = column.timing_method.unwrap_or_else(|| method);\n\n let resolved_comparison = comparison::resolve(&column.comparison_override, timer);\n\n let comparison = comparison::or_current(resolved_comparison, timer);\n\n\n\n let update_value = column_update_value(\n\n column,\n\n timer,\n\n segment,\n\n segment_index,\n\n current_split,\n\n method,\n", "file_path": "src/component/splits/column.rs", "rank": 1, "score": 195277.11138944852 }, { "content": "/// Chooses a split color from the Layout Settings based on the current run.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `time_difference`: The delta that you want to find a color for.\n\n/// - `segment_index`: The split number that is associated with this delta.\n\n/// - `show_segment_deltas`: Can show ahead gaining and behind losing colors if\n\n/// true.\n\n/// - `show_best_segments`: Can show the best segment color if true.\n\n/// - `comparison`: The comparison that you are comparing this delta to.\n\n/// - `method`: The timing method of this delta.\n\n///\n\n/// Returns the chosen color.\n\npub fn split_color(\n\n timer: &Timer,\n\n time_difference: Option<TimeSpan>,\n\n segment_index: usize,\n\n show_segment_deltas: bool,\n\n show_best_segments: bool,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> SemanticColor {\n\n if show_best_segments && check_best_segment(timer, segment_index, method) {\n\n SemanticColor::BestSegment\n\n } else if let Some(time_difference) = time_difference {\n\n let last_delta = segment_index\n\n .checked_sub(1)\n\n .and_then(|n| last_delta(timer.run(), n, comparison, method));\n\n if time_difference < TimeSpan::zero() {\n\n if show_segment_deltas && last_delta.map_or(false, |d| time_difference > d) {\n\n SemanticColor::AheadLosingTime\n\n } else {\n\n SemanticColor::AheadGainingTime\n", "file_path": "src/analysis/state_helper.rs", "rank": 2, "score": 191060.43778001948 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut split_gradient_builder = GradientBuilder::<GradientKind>::with_tags(\n\n b\"CurrentSplitTopColor\",\n\n b\"CurrentSplitBottomColor\",\n\n b\"CurrentSplitGradient\",\n\n );\n\n let mut background_builder = GradientBuilder::<ListGradientKind>::new_gradient_type();\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if let Some(tag) = split_gradient_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"VisualSplitCount\" {\n", "file_path": "src/layout/parser/splits.rs", "rank": 3, "score": 185734.02735771987 }, { "content": "pub fn default_layout<'i: 'fd, 'fd>(\n\n font: ScaledFont<'i, 'fd>,\n\n text: &'i str,\n\n [x, y]: Pos,\n\n) -> impl Iterator<Item = PositionedGlyph<'fd>> + Clone + 'i {\n\n font.font.layout(text, font.scale, point(x, y))\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 4, "score": 173948.69965945225 }, { "content": "pub fn fill_builder(mesh: &mut Mesh) -> impl FillGeometryBuilder + '_ {\n\n vertex_builder::<_, _, (), _>(&mut mesh.buffers, |p: Point, _: FillAttributes<'_>| {\n\n Vertex {\n\n x: p.x,\n\n y: p.y,\n\n u: 0.0,\n\n v: 0.0,\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/rendering/mesh.rs", "rank": 5, "score": 172626.87856137526 }, { "content": "pub fn basic_builder(mesh: &mut Mesh) -> impl BasicGeometryBuilder + '_ {\n\n vertex_builder::<_, _, (), _>(&mut mesh.buffers, |p: Point| Vertex {\n\n x: p.x,\n\n y: p.y,\n\n u: 0.0,\n\n v: 0.0,\n\n })\n\n}\n\n\n", "file_path": "src/rendering/mesh.rs", "rank": 6, "score": 172626.87856137526 }, { "content": "pub fn stroke_builder(mesh: &mut Mesh) -> impl StrokeGeometryBuilder + '_ {\n\n vertex_builder::<_, _, (), _>(\n\n &mut mesh.buffers,\n\n |p: Point, _: StrokeAttributes<'_, '_>| Vertex {\n\n x: p.x,\n\n y: p.y,\n\n u: 0.0,\n\n v: 0.0,\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/rendering/mesh.rs", "rank": 7, "score": 172626.87856137526 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Pause {\n\n /// Ended At is the date and time at which the pause was ended, specified in RFC 3339 format.\n\n #[serde(rename = \"endedAt\")]\n\n ended_at: Option<String>,\n\n /// Started At is the date and time at which the pause was started, specified in RFC 3339\n\n /// format.\n\n #[serde(rename = \"startedAt\")]\n\n started_at: String,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 8, "score": 171900.44072841734 }, { "content": "#[derive(Deserialize)]\n\nstruct Split {\n\n title: Option<String>,\n\n time: Option<TimeSpan>,\n\n best_time: Option<TimeSpan>,\n\n best_segment: Option<TimeSpan>,\n\n}\n\n\n", "file_path": "src/run/parser/urn.rs", "rank": 9, "score": 171654.61991080845 }, { "content": "#[derive(Deserialize)]\n\nstruct Split {\n\n title: Option<String>,\n\n last_split: Option<i32>,\n\n}\n\n\n", "file_path": "src/run/parser/worstrun.rs", "rank": 10, "score": 171654.61991080845 }, { "content": "#[derive(Deserialize)]\n\nstruct Split {\n\n name: String,\n\n pb_split: Option<f64>,\n\n split_best: Option<f64>,\n\n}\n\n\n", "file_path": "src/run/parser/splitty.rs", "rank": 11, "score": 171654.61991080845 }, { "content": "pub fn run_with_splits(timer: &mut Timer, splits: &[f64]) {\n\n start_run(timer);\n\n\n\n for &split in splits {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n }\n\n\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 12, "score": 169544.35083812912 }, { "content": "#[derive(Deserialize)]\n\nstruct Split {\n\n time: Option<TimeSpan>,\n\n}\n\n\n", "file_path": "src/run/parser/flitter/mod.rs", "rank": 13, "score": 168617.27630410955 }, { "content": "#[allow(non_snake_case)]\n\n#[derive(Deserialize)]\n\nstruct Split {\n\n Map: String,\n\n Name: Option<String>,\n\n Ticks: Option<u64>,\n\n BestSegment: Option<u64>,\n\n}\n\n\n", "file_path": "src/run/parser/source_live_timer.rs", "rank": 14, "score": 165776.90480847762 }, { "content": "pub fn measure<'fd>(layout: impl IntoIterator<Item = PositionedGlyph<'fd>>) -> f32 {\n\n let mut first = None;\n\n layout\n\n .into_iter()\n\n .inspect(|g| {\n\n first.get_or_insert_with(|| g.position().x);\n\n })\n\n .last()\n\n .map_or(0.0, |g| {\n\n g.position().x + g.unpositioned().h_metrics().advance_width - first.unwrap()\n\n })\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 15, "score": 162886.33443439682 }, { "content": "#[test]\n\nfn default() {\n\n let mut run = tests_helper::create_run(&[\"A\", \"B\", \"C\", \"D\"]);\n\n run.set_game_name(\"Some Game Name\");\n\n run.set_category_name(\"Some Category Name\");\n\n run.set_attempt_count(1337);\n\n let mut timer = Timer::new(run).unwrap();\n\n let mut layout = Layout::default_layout();\n\n\n\n tests_helper::start_run(&mut timer);\n\n tests_helper::make_progress_run_with_splits_opt(&mut timer, &[Some(5.0), None, Some(10.0)]);\n\n\n\n let state = layout.state(&timer);\n\n\n\n check(&state, \"luCCVRJIPLE=\", \"default\");\n\n}\n\n\n", "file_path": "src/rendering/software/tests.rs", "rank": 16, "score": 162774.65010288416 }, { "content": "pub fn run_with_splits_opt(timer: &mut Timer, splits: &[Option<f64>]) {\n\n start_run(timer);\n\n make_progress_run_with_splits_opt(timer, splits);\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 17, "score": 161021.96093066438 }, { "content": "/// Creates a list of all the Comparison Generators that are active by default.\n\n/// Which comparison generators are in this list may change in future versions.\n\npub fn default_generators() -> Vec<Box<dyn ComparisonGenerator>> {\n\n vec![\n\n Box::new(BestSegments),\n\n Box::new(BestSplitTimes),\n\n Box::new(AverageSegments),\n\n Box::new(MedianSegments),\n\n Box::new(WorstSegments),\n\n Box::new(BalancedPB),\n\n Box::new(LatestRun),\n\n Box::new(None),\n\n ]\n\n}\n\n\n", "file_path": "src/comparison/mod.rs", "rank": 18, "score": 160023.96627460324 }, { "content": "fn column_update_value(\n\n column: &ColumnSettings,\n\n timer: &Timer,\n\n segment: &Segment,\n\n segment_index: usize,\n\n current_split: Option<usize>,\n\n method: TimingMethod,\n\n comparison: &str,\n\n) -> Option<(Option<TimeSpan>, SemanticColor, ColumnFormatter)> {\n\n use self::{ColumnUpdateTrigger::*, ColumnUpdateWith::*};\n\n\n\n if current_split < Some(segment_index) {\n\n // Didn't reach the segment yet.\n\n return None;\n\n }\n\n\n\n let is_current_split = current_split == Some(segment_index);\n\n\n\n if is_current_split {\n\n if column.update_trigger == OnEndingSegment {\n", "file_path": "src/component/splits/column.rs", "rank": 19, "score": 158057.4722072857 }, { "content": "/// Same as run_with_splits_opt, but progresses an already active attempt and\n\n/// doesn't reset it. Useful for checking intermediate states.\n\npub fn make_progress_run_with_splits_opt(timer: &mut Timer, splits: &[Option<f64>]) {\n\n for &split in splits {\n\n if let Some(split) = split {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n } else {\n\n timer.skip_split();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 20, "score": 155684.17443220058 }, { "content": "/// Gets the last non-live delta in the run starting from `segment_index`.\n\n///\n\n/// - `run`: The current run.\n\n/// - `segment_index`: The split number to start checking deltas from.\n\n/// - `comparison`: The comparison that you are comparing with.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the last non-live delta or None if there have been no deltas yet.\n\npub fn last_delta(\n\n run: &Run,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n find_previous_non_empty_split_and_comparison_time(\n\n &run.segments()[..=segment_index],\n\n comparison,\n\n method,\n\n )\n\n .map(|(split_time, comparison_time)| split_time - comparison_time)\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 21, "score": 149013.344294418 }, { "content": "/// Populates the segments with a goal comparison for the timing method\n\n/// specified. Every other timing method is left untouched. The segment history\n\n/// is used to generate comparison times such that they end up with the goal\n\n/// time specified. The values are stored in the comparison with the name\n\n/// provided. Only the range between the sum of the best segments and the sum of\n\n/// the worst segments is supported. Every other goal time is capped within that\n\n/// range.\n\npub fn generate_for_timing_method(\n\n segments: &mut [Segment],\n\n method: TimingMethod,\n\n goal_time: TimeSpan,\n\n comparison: &str,\n\n) {\n\n let mut all_weighted_segment_times = vec![Vec::new(); segments.len()];\n\n let mut time_span_buf = Vec::with_capacity(segments.len());\n\n\n\n generate_for_timing_method_with_buf(\n\n segments,\n\n method,\n\n Some(goal_time),\n\n comparison,\n\n &mut time_span_buf,\n\n &mut all_weighted_segment_times,\n\n );\n\n}\n\n\n", "file_path": "src/comparison/goal.rs", "rank": 22, "score": 149012.8609465557 }, { "content": "/// Calculates how much time could be saved on the given segment with the given\n\n/// comparison. This information is based on the best segments. Considering the\n\n/// best segments don't represent theoretically perfect segment times, this\n\n/// information is only an approximation of how much time can actually be saved.\n\n/// If the parameter `live` is set to `true`, then the segment time of the\n\n/// current attempt is used if it gets longer than the segment time of the\n\n/// segment the possible time save is calculated for. So the possible time save\n\n/// shrinks towards zero as time goes on. The time returned by this function can\n\n/// never be below zero.\n\npub fn calculate(\n\n timer: &Timer,\n\n segment_index: usize,\n\n comparison: &str,\n\n live: bool,\n\n) -> Option<TimeSpan> {\n\n let segments = timer.run().segments();\n\n let method = timer.current_timing_method();\n\n let mut prev_time = TimeSpan::zero();\n\n let segment = timer.run().segment(segment_index);\n\n let mut best_segments = segment.best_segment_time()[method];\n\n\n\n for segment in segments[..segment_index].iter().rev() {\n\n if let Some(best_segments) = &mut best_segments {\n\n if let Some(split_time) = segment.comparison(comparison)[method] {\n\n prev_time = split_time;\n\n break;\n\n } else if let Some(best_segment) = segment.best_segment_time()[method] {\n\n *best_segments += best_segment;\n\n }\n", "file_path": "src/analysis/possible_time_save.rs", "rank": 23, "score": 149008.9399153476 }, { "content": "#[allow(clippy::needless_range_loop)]\n\npub fn calculate(\n\n segments: &[Segment],\n\n predictions: &mut [Option<Prediction>],\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n predictions[0] = Some(Prediction::default());\n\n let end_index = segments.len();\n\n for segment_index in 0..end_index {\n\n populate_predictions(\n\n segments,\n\n predictions[segment_index],\n\n segment_index,\n\n predictions,\n\n use_current_run,\n\n method,\n\n );\n\n }\n\n Some(predictions[end_index]?.time)\n\n}\n", "file_path": "src/analysis/sum_of_segments/worst.rs", "rank": 24, "score": 149006.12895454443 }, { "content": "#[allow(clippy::needless_range_loop)]\n\npub fn calculate(\n\n segments: &[Segment],\n\n predictions: &mut [Option<Prediction>],\n\n simple_calculation: bool,\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n predictions[0] = Some(Prediction::default());\n\n let end_index = segments.len();\n\n for segment_index in 0..end_index {\n\n populate_predictions(\n\n segments,\n\n predictions[segment_index],\n\n segment_index,\n\n predictions,\n\n simple_calculation,\n\n use_current_run,\n\n method,\n\n );\n\n }\n\n Some(predictions[end_index]?.time)\n\n}\n", "file_path": "src/analysis/sum_of_segments/best.rs", "rank": 25, "score": 149006.12895454443 }, { "content": "#[test]\n\nfn not_when_undoing_all_pauses_without_an_attempt() {\n\n let mut timer = timer();\n\n timer.undo_all_pauses();\n\n assert!(!timer.run().has_been_modified());\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 26, "score": 147502.94341521894 }, { "content": "#[test]\n\nfn not_when_skipping_a_split_without_an_attempt() {\n\n let mut timer = timer();\n\n timer.skip_split();\n\n assert!(!timer.run().has_been_modified());\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 27, "score": 146967.78479672802 }, { "content": "#[test]\n\nfn not_when_undoing_a_split_without_an_attempt() {\n\n let mut timer = timer();\n\n timer.undo_split();\n\n assert!(!timer.run().has_been_modified());\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 28, "score": 146965.5618018698 }, { "content": "#[test]\n\nfn when_starting_and_resetting_with_update_splits() {\n\n let mut timer = timer();\n\n timer.start();\n\n timer.reset(true);\n\n assert!(timer.run().has_been_modified());\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 29, "score": 146955.48044794757 }, { "content": "#[test]\n\nfn monotically_increasing_split_times_after_resetting() {\n\n let mut timer = timer();\n\n\n\n let (first, second, third) = (\n\n TimeSpan::from_seconds(5.0),\n\n TimeSpan::from_seconds(15.0),\n\n TimeSpan::from_seconds(10.0),\n\n );\n\n run_with_splits(\n\n &mut timer,\n\n &[\n\n first.total_seconds(),\n\n second.total_seconds(),\n\n third.total_seconds(),\n\n ],\n\n );\n\n\n\n let run = timer.into_run(true);\n\n\n\n // The first segment's time should be unchanged.\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 30, "score": 146955.48044794757 }, { "content": "pub fn rectangle() -> Mesh {\n\n let mut buffers = VertexBuffers::new();\n\n\n\n buffers.vertices = vec![\n\n Vertex {\n\n x: 0.0,\n\n y: 0.0,\n\n u: 0.0,\n\n v: 0.0,\n\n },\n\n Vertex {\n\n x: 1.0,\n\n y: 0.0,\n\n u: 1.0,\n\n v: 0.0,\n\n },\n\n Vertex {\n\n x: 1.0,\n\n y: 1.0,\n\n u: 1.0,\n", "file_path": "src/rendering/mesh.rs", "rank": 31, "score": 146493.08136546175 }, { "content": "pub fn ellipsis<'fd>(\n\n layout: impl IntoIterator<Item = PositionedGlyph<'fd>>,\n\n mut max_x: f32,\n\n font: ScaledFont<'_, 'fd>,\n\n) -> impl Iterator<Item = PositionedGlyph<'fd>> {\n\n let ellipsis = font.font.glyph(Codepoint('…' as u32)).scaled(font.scale);\n\n let ellipsis_width = ellipsis.h_metrics().advance_width;\n\n\n\n let mut glyphs = layout.into_iter().collect::<SmallVec<[_; 32]>>();\n\n\n\n let mut positioned_ellipsis = None;\n\n while let Some(glyph) = glyphs.last() {\n\n if glyph.position().x + glyph.unpositioned().h_metrics().advance_width > max_x {\n\n if positioned_ellipsis.is_none() {\n\n max_x -= ellipsis_width;\n\n }\n\n positioned_ellipsis = Some(ellipsis.clone().positioned(glyph.position()));\n\n glyphs.pop();\n\n } else {\n\n break;\n\n }\n\n }\n\n if let Some(ellipsis) = positioned_ellipsis {\n\n glyphs.push(ellipsis);\n\n }\n\n\n\n glyphs.into_iter()\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 32, "score": 146493.08136546175 }, { "content": "pub fn centered<'fd>(\n\n layout: impl IntoIterator<Item = PositionedGlyph<'fd>> + Clone,\n\n min_x: f32,\n\n) -> impl Iterator<Item = PositionedGlyph<'fd>> {\n\n let mut offset = measure(layout.clone()) / 2.0;\n\n let mut is_first = true;\n\n layout.into_iter().map(move |g| {\n\n let pos = g.position();\n\n if is_first {\n\n if pos.x - offset < min_x {\n\n offset = pos.x - min_x;\n\n }\n\n is_first = false;\n\n }\n\n g.into_unpositioned()\n\n .positioned(point(pos.x - offset, pos.y))\n\n })\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 33, "score": 146493.08136546175 }, { "content": "/// Gets the amount of time lost or gained on a certain split, using the live\n\n/// segment delta if the split is not completed yet.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split for which the delta is calculated.\n\n/// - `comparison`: The comparison that you are comparing with.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the segment delta for a certain split, returning the live segment\n\n/// delta if the split is not completed yet.\n\npub fn live_segment_delta(\n\n timer: &Timer,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_delta(\n\n timer.run(),\n\n segment_index,\n\n timer.current_time()[method]?,\n\n comparison,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 34, "score": 146384.31780835698 }, { "content": "/// Gets the length of the last segment that leads up to a certain split, using\n\n/// the live segment time if the split is not completed yet.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split that represents the end of the\n\n/// segment.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the length of the segment leading up to `segment_index`, returning\n\n/// the live segment time if the split is not completed yet.\n\npub fn live_segment_time(\n\n timer: &Timer,\n\n segment_index: usize,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_time(\n\n timer.run(),\n\n segment_index,\n\n timer.current_time()[method]?,\n\n method,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 35, "score": 146384.0914673065 }, { "content": "/// Gets the amount of time lost or gained on a certain split.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split for which the delta is calculated.\n\n/// - `comparison`: The comparison that you are comparing with.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the segment delta for a certain split, returning None if the split\n\n/// is not completed yet.\n\npub fn previous_segment_delta(\n\n timer: &Timer,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_delta(\n\n timer.run(),\n\n segment_index,\n\n timer.run().segment(segment_index).split_time()[method]?,\n\n comparison,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 36, "score": 146383.3197870009 }, { "content": "/// Gets the length of the last segment that leads up to a certain split.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split that represents the end of the\n\n/// segment.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the length of the segment leading up to `segment_index`, returning\n\n/// None if the split is not completed yet.\n\npub fn previous_segment_time(\n\n timer: &Timer,\n\n segment_index: usize,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_time(\n\n timer.run(),\n\n segment_index,\n\n timer.run().segment(segment_index).split_time()[method]?,\n\n method,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 37, "score": 146383.0107240774 }, { "content": "/// Follows a path starting from a certain segment in a certain attempt to the\n\n/// next split that didn't get skipped. Returns the index of the segment after\n\n/// the segment that has the next split time and a sum of the combined segment\n\n/// times and the current time provided. If the tracked attempt ends before a\n\n/// split time is found, the index returned is 0.\n\npub fn track_branch(\n\n segments: &[Segment],\n\n current_time: Option<TimeSpan>,\n\n segment_index: usize,\n\n run_index: i32,\n\n method: TimingMethod,\n\n) -> (usize, Time) {\n\n for (segment_index, segment) in segments.iter().enumerate().skip(segment_index) {\n\n if let Some(cur_time) = segment.segment_history().get(run_index) {\n\n if let Some(cur_time) = cur_time[method] {\n\n return (\n\n segment_index + 1,\n\n Time::new().with_timing_method(method, current_time.map(|t| cur_time + t)),\n\n );\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n (0, Time::default())\n\n}\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 38, "score": 146382.97501438466 }, { "content": "/// Checks whether the live segment should now be shown.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `split_delta`: Specifies whether to return a split delta\n\n/// rather than a segment delta and to start showing the live\n\n/// segment once you are behind.\n\n/// - `comparison`: The comparison that you are comparing with.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the current live delta.\n\npub fn check_live_delta(\n\n timer: &Timer,\n\n split_delta: bool,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n if timer.current_phase() == TimerPhase::Running || timer.current_phase() == TimerPhase::Paused {\n\n let current_split = timer\n\n .current_split()\n\n .unwrap()\n\n .comparison_timing_method(comparison, method);\n\n let current_time = timer.current_time()[method];\n\n let segment_index = timer.current_split_index().unwrap();\n\n let current_segment = live_segment_time(timer, segment_index, method);\n\n let best_segment = timer.run().segment(segment_index).best_segment_time()[method];\n\n let best_segment_delta =\n\n live_segment_delta(timer, segment_index, best_segments::NAME, method);\n\n let comparison_delta = live_segment_delta(timer, segment_index, comparison, method);\n\n\n\n if split_delta && current_time > current_split\n", "file_path": "src/analysis/state_helper.rs", "rank": 39, "score": 146382.15879746893 }, { "content": "/// Renders the layout state provided into an image of the selected resolution.\n\n/// The `samples_sqrt` argument is the square root of the amount of samples used\n\n/// for anti aliasing the final image. Note that this is software rendered and\n\n/// thus will be much slower than rendering on the GPU.\n\npub fn render_anti_aliased(\n\n state: &LayoutState,\n\n [width, height]: [usize; 2],\n\n samples_sqrt: usize,\n\n) -> RgbaImage {\n\n let image = render(state, [width * samples_sqrt, height * samples_sqrt]);\n\n image::imageops::thumbnail(&image, width as u32, height as u32)\n\n}\n", "file_path": "src/rendering/software/mod.rs", "rank": 40, "score": 146377.6228648782 }, { "content": "/// Calculates the Sum of Worst Segments for the timing method provided. This is\n\n/// the slowest time possible to complete a run of a category, based on\n\n/// information collected from all the previous attempts. This obviously isn't\n\n/// really the worst possible time, but may be useful information regardless.\n\n/// If there's an active attempt, you can choose to take it into account as\n\n/// well.\n\npub fn calculate_worst(\n\n segments: &[Segment],\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n let mut predictions = vec![None; segments.len() + 1];\n\n worst::calculate(segments, &mut predictions, use_current_run, method)\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 41, "score": 146377.33221134334 }, { "content": "/// Calculates the Sum of Best Segments for the timing method provided. This is\n\n/// the fastest time possible to complete a run of a category, based on\n\n/// information collected from all the previous attempts. This often matches up\n\n/// with the sum of the best segment times of all the segments, but that may not\n\n/// always be the case, as skipped segments may introduce combined segments that\n\n/// may be faster than the actual sum of their best segment times. The name is\n\n/// therefore a bit misleading, but sticks around for historical reasons. You\n\n/// can choose to do a simple calculation instead, which excludes the Segment\n\n/// History from the calculation process. If there's an active attempt, you can\n\n/// choose to take it into account as well.\n\npub fn calculate_best(\n\n segments: &[Segment],\n\n simple_calculation: bool,\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n let mut predictions = vec![None; segments.len() + 1];\n\n best::calculate(\n\n segments,\n\n &mut predictions,\n\n simple_calculation,\n\n use_current_run,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 42, "score": 146376.51833904383 }, { "content": "#[test]\n\npub fn sum_of_best() {\n\n let mut timer = create_timer(&[\"A\", \"B\", \"C\"]);\n\n\n\n run_with_splits_opt(&mut timer, &[Some(5.0), Some(20.0), Some(60.0)]);\n\n let mut predictions = [None; 4];\n\n best::calculate(\n\n timer.run().segments(),\n\n &mut predictions,\n\n false,\n\n false,\n\n TimingMethod::GameTime,\n\n );\n\n assert(\n\n &timer,\n\n predictions,\n\n [(5.0, 0, true), (20.0, 1, true), (60.0, 2, true)],\n\n );\n\n\n\n run_with_splits_opt(&mut timer, &[None, Some(10.0), None]);\n\n predictions = [None; 4];\n", "file_path": "src/analysis/sum_of_segments/tests.rs", "rank": 43, "score": 146373.764913139 }, { "content": "pub fn measure_default_layout(font: ScaledFont<'_, '_>, text: &str) -> f32 {\n\n measure(default_layout(font, text, [0.0; 2]))\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 44, "score": 146024.5338831372 }, { "content": "/// Attempts to parse a ShitSplit splits file.\n\npub fn parse<R: BufRead>(source: R) -> Result<Run> {\n\n let mut lines = source.lines();\n\n\n\n let line = lines.next().context(Empty)?.context(ReadTitleLine)?;\n\n\n\n let mut splits = line.split('|');\n\n let category_name = splits.next().context(ExpectedCategoryName)?;\n\n if !category_name.starts_with('#') {\n\n return Err(Error::ExpectedCategoryName);\n\n }\n\n\n\n let mut run = Run::new();\n\n\n\n run.set_category_name(&category_name[1..]);\n\n run.set_attempt_count(\n\n splits\n\n .next()\n\n .context(ExpectedAttemptCount)?\n\n .parse()\n\n .context(ParseAttemptCount)?,\n", "file_path": "src/run/parser/shit_split.rs", "rank": 45, "score": 145080.44323782856 }, { "content": "/// Calculates the comparison's segment time of the segment with the timing\n\n/// method specified. This is not calculating the current attempt's segment\n\n/// times.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the provided `segment_index` is greater than or equal to\n\n/// `run.len()`.\n\npub fn comparison_single_segment_time(\n\n run: &Run,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n if comparison == best_segments::NAME {\n\n return run.segment(segment_index).best_segment_time()[method];\n\n }\n\n\n\n if segment_index == 0 {\n\n run.segment(segment_index).comparison(comparison)[method]\n\n } else {\n\n let current_comparison_time = run.segment(segment_index).comparison(comparison)[method]?;\n\n\n\n let previous_comparison_time =\n\n run.segment(segment_index - 1).comparison(comparison)[method]?;\n\n\n\n Some(current_comparison_time - previous_comparison_time)\n\n }\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 46, "score": 143901.98482505122 }, { "content": "/// Calculates the comparison's segment time of the segment with the timing\n\n/// method specified, combining segments if the segment before it is empty.\n\n/// This is not calculating the current attempt's segment times.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the provided `segment_index` is greater than or equal to\n\n/// `run.len()`.\n\npub fn comparison_combined_segment_time(\n\n run: &Run,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n if comparison == best_segments::NAME {\n\n return run.segment(segment_index).best_segment_time()[method];\n\n }\n\n\n\n let current_comparison_time = run.segment(segment_index).comparison(comparison)[method]?;\n\n\n\n let previous_comparison_time = find_previous_non_empty_comparison_time(\n\n &run.segments()[..segment_index],\n\n comparison,\n\n method,\n\n )\n\n .unwrap_or_default();\n\n\n\n Some(current_comparison_time - previous_comparison_time)\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 47, "score": 143901.98482505122 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut override_color = false;\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TimerHeight\" {\n\n text_parsed(reader, tag.into_buf(), |v| settings.height = v)\n\n } else if tag.name() == b\"TimerFormat\" {\n\n // Version >= 1.5\n\n timer_format(reader, tag.into_buf(), |d, a| {\n\n settings.digits_format = d;\n", "file_path": "src/layout/parser/timer.rs", "rank": 48, "score": 143683.95900260785 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let (mut override_label, mut override_value) = (false, false);\n\n let (mut left_center, mut right) = (String::new(), String::new());\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| {\n\n settings.left_center_color = Some(c)\n\n })\n\n } else if tag.name() == b\"OverrideTextColor\" {\n", "file_path": "src/layout/parser/text.rs", "rank": 49, "score": 143683.95900260785 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut override_title_color = false;\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"ShowGameName\" {\n\n parse_bool(reader, tag.into_buf(), |b| settings.show_game_name = b)\n\n } else if tag.name() == b\"ShowCategoryName\" {\n\n parse_bool(reader, tag.into_buf(), |b| settings.show_category_name = b)\n\n } else if tag.name() == b\"ShowAttemptCount\" {\n\n parse_bool(reader, tag.into_buf(), |b| settings.show_attempt_count = b)\n", "file_path": "src/layout/parser/title.rs", "rank": 50, "score": 143683.95900260785 }, { "content": "pub fn reencode_children<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n target_buf: &mut Vec<u8>,\n\n) -> Result<(), Error>\n\nwhere\n\n R: BufRead,\n\n{\n\n reader.expand_empty_elements(false);\n\n let mut writer = Writer::new(target_buf);\n\n let mut depth = 0;\n\n loop {\n\n buf.clear();\n\n match reader.read_event(buf)? {\n\n Event::Start(start) => {\n\n depth += 1;\n\n writer.write_event(Event::Start(start))?;\n\n }\n\n Event::End(end) => {\n\n if depth == 0 {\n", "file_path": "src/xml_util.rs", "rank": 51, "score": 143683.95900260785 }, { "content": "pub fn dynamic_align<'fd>(\n\n layout: impl IntoIterator<Item = PositionedGlyph<'fd>> + Clone,\n\n align: f32,\n\n min_x: f32,\n\n) -> impl Iterator<Item = PositionedGlyph<'fd>> {\n\n let mut offset = align * measure(layout.clone());\n\n let mut is_first = true;\n\n layout.into_iter().map(move |g| {\n\n let pos = g.position();\n\n if is_first {\n\n if pos.x - offset < min_x {\n\n offset = pos.x - min_x;\n\n }\n\n is_first = false;\n\n }\n\n g.into_unpositioned()\n\n .positioned(point(pos.x - offset, pos.y))\n\n })\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 52, "score": 143683.95900260785 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut override_label = false;\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"Accuracy\" {\n\n accuracy(reader, tag.into_buf(), |a| settings.accuracy = a)\n", "file_path": "src/layout/parser/delta.rs", "rank": 53, "score": 143683.95900260785 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if tag.name() == b\"Height\" {\n\n text_parsed(reader, tag.into_buf(), |v| settings.height = v)\n\n } else if tag.name() == b\"BehindGraphColor\" {\n\n color(reader, tag.into_buf(), |c| {\n\n settings.behind_background_color = c\n\n })\n\n } else if tag.name() == b\"AheadGraphColor\" {\n\n color(reader, tag.into_buf(), |c| {\n\n settings.ahead_background_color = c\n", "file_path": "src/layout/parser/graph.rs", "rank": 54, "score": 143683.95900260785 }, { "content": "#[test]\n\nfn when_starting_and_resetting_without_update_splits() {\n\n let mut timer = timer();\n\n timer.start();\n\n timer.reset(false);\n\n assert!(timer.run().has_been_modified());\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 55, "score": 143654.66621414496 }, { "content": "/// Attempts to parse a generic Splits I/O splits file.\n\npub fn parse<R: Read>(source: R) -> Result<(Run, String)> {\n\n let splits: Splits = from_reader(source).context(Json)?;\n\n\n\n let mut run = Run::new();\n\n\n\n if let Some(game) = splits.game {\n\n run.set_game_name(game.longname);\n\n }\n\n if let Some(category) = splits.category {\n\n run.set_category_name(category.longname);\n\n }\n\n if let Some(attempts) = splits.attempts {\n\n if let Some(total) = attempts.total {\n\n run.set_attempt_count(total);\n\n }\n\n for attempt in attempts.histories.into_iter().flatten() {\n\n run.add_attempt_with_index(\n\n attempt.duration.into(),\n\n attempt.attempt_number as i32,\n\n None,\n", "file_path": "src/run/parser/splits_io.rs", "rank": 56, "score": 142747.01212461 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let (mut override_label, mut override_value) = (false, false);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"TimeColor\" {\n\n color(reader, tag.into_buf(), |c| settings.value_color = Some(c))\n", "file_path": "src/layout/parser/total_playtime.rs", "rank": 57, "score": 141051.59496120235 }, { "content": "pub fn align_right_and_measure<'fd>(\n\n layout: impl IntoIterator<Item = PositionedGlyph<'fd>> + Clone,\n\n) -> (impl Iterator<Item = PositionedGlyph<'fd>>, f32) {\n\n let width = measure(layout.clone());\n\n let layout = layout.into_iter().map(move |g| {\n\n let pos = g.position();\n\n g.into_unpositioned()\n\n .positioned(point(pos.x - width, pos.y))\n\n });\n\n (layout, width)\n\n}\n\n\n", "file_path": "src/rendering/font.rs", "rank": 58, "score": 141051.59496120235 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let (mut override_label, mut override_value) = (false, false);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"TimeColor\" {\n\n color(reader, tag.into_buf(), |c| settings.value_color = Some(c))\n", "file_path": "src/layout/parser/sum_of_best.rs", "rank": 59, "score": 141051.59496120235 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut override_label = false;\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"DeltaAccuracy\" {\n\n accuracy(reader, tag.into_buf(), |v| settings.accuracy = v)\n", "file_path": "src/layout/parser/previous_segment.rs", "rank": 60, "score": 141051.59496120235 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut settings = component.settings().clone();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut timer_override_color = false;\n\n let (mut total_height, mut segment_timer_ratio) = (65u32, 0.4);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"Height\" {\n\n text_parsed(reader, tag.into_buf(), |v| total_height = v)\n\n } else if tag.name() == b\"SegmentTimerSizeRatio\" {\n\n text_parsed(reader, tag.into_buf(), |v: u32| {\n\n segment_timer_ratio = v as f32 / 100.0\n", "file_path": "src/layout/parser/detailed_timer.rs", "rank": 61, "score": 141051.59496120235 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n\n\n parse_children::<_, _, Error>(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"SpaceHeight\" {\n\n text_parsed(reader, tag.into_buf(), |h| settings.size = h)\n\n } else {\n\n // FIXME:\n\n // SpaceWidth\n\n end_tag(reader, tag.into_buf())\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n })?;\n\n\n\n settings.background = background_builder.build();\n\n\n\n Ok(())\n\n}\n", "file_path": "src/layout/parser/blank_space.rs", "rank": 62, "score": 141051.59496120235 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let (mut override_label, mut override_value) = (false, false);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"TimeColor\" {\n\n color(reader, tag.into_buf(), |c| settings.value_color = Some(c))\n", "file_path": "src/layout/parser/current_comparison.rs", "rank": 63, "score": 141051.59496120235 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let (mut override_label, mut override_value) = (false, false);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"TimeColor\" {\n\n color(reader, tag.into_buf(), |c| settings.value_color = Some(c))\n", "file_path": "src/layout/parser/current_pace.rs", "rank": 64, "score": 141051.59496120235 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Splits {\n\n /// Schema Version specifies which version of the Splits I/O JSON Schema is being used. This\n\n /// schema specifies only v1.0.0.\n\n #[serde(rename = \"_schemaVersion\")]\n\n _schemaversion: String,\n\n /// Attempts contains historical information about previous runs by this runner in this\n\n /// category.\n\n attempts: Option<Attempts>,\n\n /// Category specifies information about the category being run.\n\n category: Option<Category>,\n\n /// Ended At is the date and time at which the run was ended, specified in RFC 3339 format.\n\n #[serde(rename = \"endedAt\")]\n\n ended_at: Option<String>,\n\n /// Game specifies information about the game being run.\n\n game: Option<Game>,\n\n /// Image URL is the location of an image associated with this run. Often this is a screenshot\n\n /// of the timer at run completion, but can be anything the runner wants displayed alongside\n\n /// the run.\n\n #[serde(rename = \"imageURL\")]\n\n image_url: Option<String>,\n", "file_path": "src/run/parser/splits_io.rs", "rank": 65, "score": 139994.03879011387 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let (mut override_label, mut override_value) = (false, false);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"TimeColor\" {\n\n color(reader, tag.into_buf(), |c| settings.value_color = Some(c))\n", "file_path": "src/layout/parser/possible_time_save.rs", "rank": 66, "score": 138579.81487311464 }, { "content": "pub fn write<W: Write>(\n\n mut writer: W,\n\n classes: &BTreeMap<String, Class>,\n\n type_script: bool,\n\n) -> Result<()> {\n\n if type_script {\n\n writeln!(\n\n writer,\n\n \"{}{}\",\n\n r#\"// tslint:disable\n\nlet wasm: WebAssembly.ResultObject | null = null;\n\n\n\ndeclare namespace WebAssembly {\n", "file_path": "capi/bind_gen/src/wasm.rs", "rank": 67, "score": 134059.39811968687 }, { "content": "pub fn write<W: Write>(\n\n mut writer: W,\n\n classes: &BTreeMap<String, Class>,\n\n type_script: bool,\n\n) -> Result<()> {\n\n if type_script {\n\n write!(\n\n writer,\n\n r#\"\"use strict\";\n\n// tslint:disable\n\nimport ffi = require('ffi');\n\nimport fs = require('fs');\n\nimport ref = require('ref');\n\n\n\n{}\n\n\n\nconst liveSplitCoreNative = ffi.Library('livesplit_core', {{\"#,\n\n typescript::HEADER\n\n )?;\n\n } else {\n", "file_path": "capi/bind_gen/src/node.rs", "rank": 68, "score": 134059.39811968687 }, { "content": "/// Attempts to parse a FaceSplit splits file. In addition to the source to\n\n/// parse, you need to specify if additional files for the icons should be\n\n/// loaded from the file system. If you are using livesplit-core in a\n\n/// server-like environment, set this to `false`. Only client-side applications\n\n/// should set this to `true`.\n\npub fn parse<R: BufRead>(source: R, load_icons: bool) -> Result<Run> {\n\n let mut run = Run::new();\n\n let mut icon_buf = Vec::new();\n\n let mut lines = source.lines();\n\n\n\n run.set_category_name(lines.next().context(ExpectedTitle)?.context(ReadTitle)?);\n\n\n\n let goal = lines.next().context(ExpectedGoal)?.context(ReadGoal)?;\n\n if !goal.trim_start().is_empty() {\n\n run.metadata_mut()\n\n .custom_variable_mut(\"Goal\")\n\n .permanent()\n\n .set_value(goal);\n\n }\n\n\n\n run.set_attempt_count(\n\n lines\n\n .next()\n\n .context(ExpectedAttemptCount)?\n\n .context(ReadAttemptCount)?\n", "file_path": "src/run/parser/face_split.rs", "rank": 69, "score": 133124.1340488196 }, { "content": "pub fn layout_numbers<'i: 'fd, 'fd>(\n\n font: ScaledFont<'i, 'fd>,\n\n text: &'i str,\n\n [mut x, y]: Pos,\n\n) -> impl Iterator<Item = PositionedGlyph<'fd>> + Clone + 'i {\n\n let scale = font.scale;\n\n\n\n let mut digits = [GlyphId(0); 10];\n\n let mut digit_width = 0.0;\n\n for (digit, glyph) in digits.iter_mut().enumerate() {\n\n let the_glyph = font.font.glyph(Codepoint(digit as u32 + u32::from(b'0')));\n\n\n\n *glyph = the_glyph.id();\n\n\n\n let width = the_glyph.scaled(scale).h_metrics().advance_width;\n\n if width > digit_width {\n\n digit_width = width;\n\n }\n\n }\n\n\n", "file_path": "src/rendering/font.rs", "rank": 70, "score": 132644.03580538277 }, { "content": "pub fn render<'fd, B: Backend>(\n\n layout: impl IntoIterator<Item = PositionedGlyph<'fd>>,\n\n [top, bottom]: [Color; 2],\n\n font: &Font<'_>,\n\n glyph_cache: &mut GlyphCache<B::Mesh>,\n\n transform: &Transform,\n\n backend: &mut B,\n\n) -> Option<PositionedGlyph<'fd>> {\n\n let top = decode_color(&top);\n\n let bottom = decode_color(&bottom);\n\n let colors = [top, top, bottom, bottom];\n\n\n\n let mut last_glyph = None;\n\n for glyph in layout {\n\n let glyph_mesh = glyph_cache.lookup_or_insert(font, glyph.id(), backend);\n\n let pos = glyph.position();\n\n let scale = glyph.scale();\n\n last_glyph = Some(glyph);\n\n\n\n let transform = transform\n\n .pre_translate([pos.x, pos.y].into())\n\n .pre_scale(scale.x, scale.y);\n\n\n\n backend.render_mesh(glyph_mesh, transform, colors, None);\n\n }\n\n\n\n last_glyph\n\n}\n", "file_path": "src/rendering/font.rs", "rank": 71, "score": 132644.03580538277 }, { "content": "/// Shortens a comparison name. If the name of the comparison matches one of the\n\n/// comparison generators, the short name of that comparison generator is\n\n/// returned. Otherwise the comparison name is returned without being shortened.\n\n/// Additional shortening logic for other comparison names may happen in the\n\n/// future.\n\npub fn shorten(comparison: &str) -> &str {\n\n match comparison {\n\n personal_best::NAME => personal_best::SHORT_NAME,\n\n world_record::NAME => world_record::SHORT_NAME,\n\n average_segments::NAME => average_segments::SHORT_NAME,\n\n median_segments::NAME => median_segments::SHORT_NAME,\n\n balanced_pb::NAME => balanced_pb::SHORT_NAME,\n\n best_segments::NAME => best_segments::SHORT_NAME,\n\n best_split_times::NAME => best_split_times::SHORT_NAME,\n\n latest_run::NAME => latest_run::SHORT_NAME,\n\n none::NAME => none::SHORT_NAME,\n\n worst_segments::NAME => worst_segments::SHORT_NAME,\n\n c => c,\n\n }\n\n}\n\n\n", "file_path": "src/comparison/mod.rs", "rank": 72, "score": 132644.03580538277 }, { "content": "pub fn utc_now() -> DateTime<Utc> {\n\n let clock = CLOCK.load(atomic::Ordering::SeqCst);\n\n if clock.is_null() {\n\n panic!(\"No clock registered\");\n\n }\n\n let clock = unsafe { &*clock };\n\n clock.date_now()\n\n}\n", "file_path": "src/platform/no_std/time.rs", "rank": 73, "score": 131733.94259897282 }, { "content": "pub fn utc_now() -> DateTime<Utc> {\n\n Utc::now()\n\n}\n", "file_path": "src/platform/normal/mod.rs", "rank": 74, "score": 131733.94259897282 }, { "content": "pub fn write<W: Write>(\n\n mut writer: W,\n\n classes: &BTreeMap<String, Class>,\n\n type_script: bool,\n\n) -> Result<()> {\n\n if type_script {\n\n writeln!(\n\n writer,\n\n \"{}{}\",\n\n r#\"// tslint:disable\n\nimport * as wasm from \"./livesplit_core_bg\";\n\n\n\ndeclare class TextEncoder {\n\n constructor(label?: string, options?: TextEncoding.TextEncoderOptions);\n\n encoding: string;\n\n encode(input?: string, options?: TextEncoding.TextEncodeOptions): Uint8Array;\n\n}\n\n\n\ndeclare class TextDecoder {\n\n constructor(utfLabel?: string, options?: TextEncoding.TextDecoderOptions)\n", "file_path": "capi/bind_gen/src/wasm_bindgen.rs", "rank": 75, "score": 131733.94259897282 }, { "content": "fn check_column_state(state: &State, state_index: usize, expected_values: Values) {\n\n let actual_values = state\n\n .splits\n\n .iter()\n\n .map(|split| split.columns[0].value.as_str())\n\n .collect::<Vec<_>>();\n\n let actual_colors = state\n\n .splits\n\n .iter()\n\n .map(|split| split.columns[0].semantic_color)\n\n .collect::<Vec<_>>();\n\n let actual_state = (actual_values, actual_colors);\n\n let (expected_values, expected_colors) = &expected_values[state_index];\n\n let expected_state = (expected_values.to_vec(), expected_colors.to_vec());\n\n assert_eq!(actual_state, expected_state, \"State index: {}\", state_index);\n\n}\n\n\n", "file_path": "src/component/splits/tests/column.rs", "rank": 76, "score": 131134.96078405358 }, { "content": "#[derive(Deserialize)]\n\nstruct Splits {\n\n title: Option<String>,\n\n attempt_count: Option<u32>,\n\n start_delay: Option<TimeSpan>,\n\n splits: Option<Vec<Split>>,\n\n}\n\n\n", "file_path": "src/run/parser/urn.rs", "rank": 77, "score": 130851.09183990018 }, { "content": "#[derive(Deserialize)]\n\nstruct Splits {\n\n run_name: String,\n\n start_delay: f64,\n\n run_count: u32,\n\n splits: Vec<Split>,\n\n timer_type: u8,\n\n}\n\n\n", "file_path": "src/run/parser/splitty.rs", "rank": 78, "score": 130851.09183990018 }, { "content": "#[derive(Deserialize)]\n\nstruct Splits {\n\n game: Option<String>,\n\n category: Option<String>,\n\n record_time: Option<i32>,\n\n // best_time: Option<i32>, Completely unused, even by worstrun\n\n initial_delay: Option<i32>,\n\n splits: Option<Vec<Split>>,\n\n}\n\n\n", "file_path": "src/run/parser/worstrun.rs", "rank": 79, "score": 130851.09183990018 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Splits {\n\n /// The Game Information about this run\n\n game: GameInfo,\n\n /// The delay of how much time the timer should wait when starting a new run in milliseconds\n\n start_delay: Option<i64>,\n\n /// An array of segments which are associated to these splits\n\n segments: Vec<SplitterinoSegment>,\n\n // /// The timing-method which is used for the splits\n\n // timing: SplitterinoTimingMethod,\n\n}\n\n\n\n/// Timing methods which can be used for segment times\n", "file_path": "src/run/parser/splitterino.rs", "rank": 80, "score": 130850.73707455996 }, { "content": "pub fn create_run(names: &[&str]) -> Run {\n\n let mut run = Run::new();\n\n for &name in names {\n\n run.push_segment(Segment::new(name));\n\n }\n\n run\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 81, "score": 130172.255717295 }, { "content": "pub fn span(seconds: f64) -> TimeSpan {\n\n TimeSpan::from_seconds(seconds)\n\n}\n", "file_path": "src/tests_helper.rs", "rank": 82, "score": 130172.255717295 }, { "content": "pub fn start_run(timer: &mut Timer) {\n\n timer.set_current_timing_method(TimingMethod::GameTime);\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n timer.set_game_time(TimeSpan::zero());\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 83, "score": 130172.255717295 }, { "content": "pub fn create_timer(names: &[&str]) -> Timer {\n\n Timer::new(create_run(names)).unwrap()\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 84, "score": 130172.255717295 }, { "content": "pub fn parse_base<R, F, E>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n tag: &[u8],\n\n mut f: F,\n\n) -> Result<(), E>\n\nwhere\n\n R: BufRead,\n\n F: FnMut(&mut Reader<R>, Tag<'_>) -> Result<(), E>,\n\n E: From<Error>,\n\n{\n\n unsafe {\n\n let ptr_buf: *mut Vec<u8> = buf;\n\n loop {\n\n buf.clear();\n\n match reader\n\n .read_event(buf)\n\n .map_err(|error| Error::Xml { error })?\n\n {\n\n Event::Start(start) => {\n", "file_path": "src/xml_util.rs", "rank": 85, "score": 130172.255717295 }, { "content": "pub fn utc_now() -> DateTime<Utc> {\n\n Utc::now()\n\n}\n", "file_path": "src/platform/wasm/web/mod.rs", "rank": 86, "score": 129542.19198128107 }, { "content": "pub fn utc_now() -> DateTime<Utc> {\n\n let unix = SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .unwrap();\n\n DateTime::from_utc(\n\n NaiveDateTime::from_timestamp(unix.as_secs() as _, unix.subsec_nanos()),\n\n Utc,\n\n )\n\n}\n", "file_path": "src/platform/wasm/wasi/mod.rs", "rank": 87, "score": 129542.19198128107 }, { "content": "pub fn utc_now() -> DateTime<Utc> {\n\n unsafe {\n\n let mut date_time = MaybeUninit::uninit();\n\n Date_now(date_time.as_mut_ptr());\n\n let date_time = date_time.assume_init();\n\n DateTime::from_utc(\n\n NaiveDateTime::from_timestamp(date_time.secs, date_time.nsecs),\n\n Utc,\n\n )\n\n }\n\n}\n", "file_path": "src/platform/wasm/unknown/mod.rs", "rank": 88, "score": 129542.19198128107 }, { "content": "/// Calculates the PB chance for a timer. The chance is calculated in terms of\n\n/// the current attempt. If there is no attempt in progress it yields the same\n\n/// result as the PB chance for the run. The value is being reported as a\n\n/// floating point number in the range from 0 (0%) to 1 (100%).\n\npub fn for_timer(timer: &Timer) -> f64 {\n\n let method = timer.current_timing_method();\n\n let all_segments = timer.run().segments();\n\n\n\n let live_delta = super::check_live_delta(timer, false, comparison::personal_best::NAME, method);\n\n\n\n let (segments, current_time) = if live_delta.is_some() {\n\n // If there is a live delta, act as if we did just split.\n\n (\n\n &all_segments[timer.current_split_index().unwrap() + 1..],\n\n timer.current_time()[method].unwrap_or_default(),\n\n )\n\n } else if let Some((index, time)) = all_segments\n\n .iter()\n\n .enumerate()\n\n .rev()\n\n .find_map(|(i, s)| Some((i, s.split_time()[method]?)))\n\n {\n\n // Otherwise fall back to the the last split that we did split.\n\n (&all_segments[index + 1..], time)\n", "file_path": "src/analysis/pb_chance/mod.rs", "rank": 89, "score": 127850.98515459774 }, { "content": "#[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)]\n\nstruct Attempts {\n\n /// Histories is an array of previous attempts by this runner of this category.\n\n histories: Option<Vec<Attempt>>,\n\n /// Total holds the total number of attempts for this category.\n\n total: Option<u32>,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 90, "score": 127833.85309195565 }, { "content": "#[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)]\n\nstruct Segment {\n\n #[serde(rename = \"bestDuration\")]\n\n best_duration: Option<Duration>,\n\n #[serde(rename = \"endedAt\")]\n\n ended_at: Option<RunTime>,\n\n /// Histories is an array of previous completions of this segment by this runner.\n\n histories: Option<Vec<SegmentHistoryElement>>,\n\n /// Is Reset should be true if the runner reset the run during this segment. If so, this and\n\n /// all future segments' Ended Ats for this run are ignored.\n\n #[serde(rename = \"isReset\")]\n\n is_reset: Option<bool>,\n\n /// Is Skipped should be true if the runner skipped over the split that ends this segment,\n\n /// rather than splitting. If so, this segment's Ended At is ignored.\n\n #[serde(rename = \"isSkipped\")]\n\n is_skipped: Option<bool>,\n\n /// Name is the runner-provided name of this segment\n\n name: Option<String>,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 91, "score": 127833.85309195565 }, { "content": "#[serde(rename = \"duration\")]\n\n#[derive(Clone, PartialEq, Debug, Default, Deserialize, Serialize)]\n\nstruct Duration {\n\n /// Gametime (Milliseconds) is a duration of milliseconds in game-world time.\n\n #[serde(rename = \"gametimeMS\")]\n\n gametime_ms: Option<f64>,\n\n /// Realtime (Milliseconds) is a duration of milliseconds in real-world time.\n\n #[serde(rename = \"realtimeMS\")]\n\n realtime_ms: Option<f64>,\n\n}\n\n/// Run Time represents a moment inside a run, and indicates the duration of the run so far at that\n\n/// moment. It holds a realtime run duration so far and a gametime run duration so far.\n", "file_path": "src/run/parser/splits_io.rs", "rank": 92, "score": 127832.73110245162 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Attempt {\n\n /// Attempt Number is the number of lifetime attempts the runner will have made after this one.\n\n /// The Attempt Number for an attempt is a label, not an index; the first attempt for a\n\n /// category has an Attempt Number of 1 (not 0).\n\n #[serde(rename = \"attemptNumber\")]\n\n attempt_number: i64,\n\n duration: Option<Duration>,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 93, "score": 127829.03070272866 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Timer {\n\n /// Longname is a human-readable timer name, intended for display to users.\n\n longname: String,\n\n /// Shortname is a machine-readable timer name, intended for use in APIs, databases, URLs, and\n\n /// filenames.\n\n shortname: String,\n\n /// Version is the version of the timer used to record this run. Semantic Versioning is\n\n /// strongly recommended but not enforced.\n\n version: String,\n\n /// Website is the URL for the timer's website.\n\n website: Option<String>,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 94, "score": 127829.03070272866 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Category {\n\n /// Links specifies the category's identity in other services.\n\n links: Option<CategoryLinks>,\n\n /// Longname is a human-readable category name, intended for display to users.\n\n longname: String,\n\n /// Shortname is a machine-readable category name, intended for use in APIs, databases, URLs,\n\n /// and filenames.\n\n shortname: Option<String>,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 95, "score": 127829.03070272866 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Game {\n\n /// Links specifies the game's identity in other services.\n\n links: Option<GameLinks>,\n\n /// Longname is a human-readable game name, intended for display to users.\n\n longname: String,\n\n /// Shortname is a machine-readable game name, intended for use in APIs, databases, URLs, and\n\n /// filenames.\n\n shortname: Option<String>,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 96, "score": 127829.03070272866 }, { "content": "#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]\n\nstruct Runner {\n\n /// Links specifies the runner's identity in other services.\n\n links: Option<RunnerLinks>,\n\n /// Longname is a human-readable runner name, intended for display to users.\n\n longname: Option<String>,\n\n /// Shortname is a machine-readable runner name, intended for use in APIs, databases, URLs, and\n\n /// filenames.\n\n shortname: String,\n\n}\n", "file_path": "src/run/parser/splits_io.rs", "rank": 97, "score": 127829.03070272866 }, { "content": "#[derive(Deserialize)]\n\nstruct Splits {\n\n title: String,\n\n category: String,\n\n attempts: u32,\n\n split_names: Vec<String>,\n\n golds: Option<Vec<Gold>>,\n\n personal_best: Option<Comparison>,\n\n world_record: Option<Comparison>,\n\n}\n\n\n", "file_path": "src/run/parser/flitter/mod.rs", "rank": 98, "score": 127813.74823320127 }, { "content": "#[derive(Deserialize)]\n\nstruct SplitsFormat {\n\n // version: String,\n\n splits: Splits,\n\n}\n\n\n\n/// Format in which splits are getting saved to file or should be transmitted\n", "file_path": "src/run/parser/splitterino.rs", "rank": 99, "score": 127813.74823320127 } ]
Rust
sqldb/rust/src/sqldb.rs
thomastaylor312/interfaces
621d0d88772db85591d8625cca82264799e6ef64
#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)] use async_trait::async_trait; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, io::Write, string::ToString}; use wasmbus_rpc::{ deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts, Timestamp, Transport, }; pub const SMITHY_VERSION: &str = "1.0"; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct Column { pub ordinal: u32, #[serde(default)] pub name: String, #[serde(rename = "dbType")] #[serde(default)] pub db_type: String, } pub type Columns = Vec<Column>; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct ExecuteResult { #[serde(rename = "rowsAffected")] pub rows_affected: u64, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<SqlDbError>, } #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct FetchResult { #[serde(rename = "numRows")] pub num_rows: u64, pub columns: Columns, #[serde(with = "serde_bytes")] #[serde(default)] pub rows: Vec<u8>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<SqlDbError>, } pub type Query = String; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct SqlDbError { #[serde(default)] pub code: String, #[serde(default)] pub message: String, } #[async_trait] pub trait SqlDb { fn contract_id() -> &'static str { "wasmcloud:sqldb" } async fn execute(&self, ctx: &Context, arg: &Query) -> RpcResult<ExecuteResult>; async fn fetch(&self, ctx: &Context, arg: &Query) -> RpcResult<FetchResult>; } #[doc(hidden)] #[async_trait] pub trait SqlDbReceiver: MessageDispatch + SqlDb { async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> { match message.method { "Execute" => { let value: Query = deserialize(message.arg.as_ref()) .map_err(|e| RpcError::Deser(format!("message '{}': {}", message.method, e)))?; let resp = SqlDb::execute(self, ctx, &value).await?; let buf = serialize(&resp)?; Ok(Message { method: "SqlDb.Execute", arg: Cow::Owned(buf), }) } "Fetch" => { let value: Query = deserialize(message.arg.as_ref()) .map_err(|e| RpcError::Deser(format!("message '{}': {}", message.method, e)))?; let resp = SqlDb::fetch(self, ctx, &value).await?; let buf = serialize(&resp)?; Ok(Message { method: "SqlDb.Fetch", arg: Cow::Owned(buf), }) } _ => Err(RpcError::MethodNotHandled(format!( "SqlDb::{}", message.method ))), } } } #[derive(Debug)] pub struct SqlDbSender<T: Transport> { transport: T, } impl<T: Transport> SqlDbSender<T> { pub fn via(transport: T) -> Self { Self { transport } } pub fn set_timeout(&self, interval: std::time::Duration) { self.transport.set_timeout(interval); } } #[cfg(target_arch = "wasm32")] impl SqlDbSender<wasmbus_rpc::actor::prelude::WasmHost> { pub fn new() -> Self { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider("wasmcloud:sqldb", "default") .unwrap(); Self { transport } } pub fn new_with_link(link_name: &str) -> wasmbus_rpc::RpcResult<Self> { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider("wasmcloud:sqldb", link_name)?; Ok(Self { transport }) } } #[async_trait] impl<T: Transport + std::marker::Sync + std::marker::Send> SqlDb for SqlDbSender<T> { #[allow(unused)] async fn execute(&self, ctx: &Context, arg: &Query) -> RpcResult<ExecuteResult> { let buf = serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "SqlDb.Execute", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value = deserialize(&resp) .map_err(|e| RpcError::Deser(format!("response to {}: {}", "Execute", e)))?; Ok(value) } #[allow(unused)] async fn fetch(&self, ctx: &Context, arg: &Query) -> RpcResult<FetchResult> { let buf = serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "SqlDb.Fetch", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value = deserialize(&resp) .map_err(|e| RpcError::Deser(format!("response to {}: {}", "Fetch", e)))?; Ok(value) } }
#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)] use async_trait::async_trait; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, io::Write, string::ToString}; use wasmbus_rpc::{ deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts, Timestamp, Transport, }; pub const SMITHY_VERSION: &str = "1.0"; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct Column { pub ordinal: u32, #[serde(default)] pub name: String, #[serde(rename = "dbType")] #[serde(default)] pub db_type: String, } pub type Columns = Vec<Column>; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct ExecuteResult { #[serde(rename = "rowsAffected")] pub rows_affected: u64, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<SqlDbError>, } #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct FetchResult { #[serde(rename = "numRows")] pub num_rows: u64, pub columns: Columns, #[serde(with = "serde_bytes")] #[serde(default)] pub rows: Vec<u8>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<SqlDbError>, } pub type Query = String; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct SqlDbError { #[serde(default)] pub code: String, #[serde(default)] pub message: String, } #[async_trait] pub trait SqlDb { fn contract_id() -> &'static str { "wasmcloud:sqldb" } async fn execute(&self, ctx: &Context, arg: &Query) -> RpcResult<ExecuteResult>; async fn fetch(&self, ctx: &Context, arg: &Query) -> RpcResult<FetchResult>; } #[doc(hidden)] #[async_trait] pub trait SqlDbReceiver: MessageDispatch + SqlDb { async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> { match message.method { "Execute" => { let value: Query = deserialize(message.arg.as_ref()) .map_err(|e| RpcError::Deser(format!("message '{}': {}", message.method, e)))?; let resp = SqlDb::execute(self, ctx, &value).await?; let buf = serialize(&resp)?; Ok(Message { method: "SqlDb.Execute", arg: Cow::Owned(buf), }) } "Fetch" => { let value: Query = deserialize(message.arg.as_ref()) .
} #[derive(Debug)] pub struct SqlDbSender<T: Transport> { transport: T, } impl<T: Transport> SqlDbSender<T> { pub fn via(transport: T) -> Self { Self { transport } } pub fn set_timeout(&self, interval: std::time::Duration) { self.transport.set_timeout(interval); } } #[cfg(target_arch = "wasm32")] impl SqlDbSender<wasmbus_rpc::actor::prelude::WasmHost> { pub fn new() -> Self { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider("wasmcloud:sqldb", "default") .unwrap(); Self { transport } } pub fn new_with_link(link_name: &str) -> wasmbus_rpc::RpcResult<Self> { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider("wasmcloud:sqldb", link_name)?; Ok(Self { transport }) } } #[async_trait] impl<T: Transport + std::marker::Sync + std::marker::Send> SqlDb for SqlDbSender<T> { #[allow(unused)] async fn execute(&self, ctx: &Context, arg: &Query) -> RpcResult<ExecuteResult> { let buf = serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "SqlDb.Execute", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value = deserialize(&resp) .map_err(|e| RpcError::Deser(format!("response to {}: {}", "Execute", e)))?; Ok(value) } #[allow(unused)] async fn fetch(&self, ctx: &Context, arg: &Query) -> RpcResult<FetchResult> { let buf = serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "SqlDb.Fetch", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value = deserialize(&resp) .map_err(|e| RpcError::Deser(format!("response to {}: {}", "Fetch", e)))?; Ok(value) } }
map_err(|e| RpcError::Deser(format!("message '{}': {}", message.method, e)))?; let resp = SqlDb::fetch(self, ctx, &value).await?; let buf = serialize(&resp)?; Ok(Message { method: "SqlDb.Fetch", arg: Cow::Owned(buf), }) } _ => Err(RpcError::MethodNotHandled(format!( "SqlDb::{}", message.method ))), } }
function_block-function_prefix_line
[ { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait KeyValueReceiver: MessageDispatch + KeyValue {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"Increment\" => {\n\n let value: IncrementRequest = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = KeyValue::increment(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"KeyValue.Increment\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"Contains\" => {\n\n let value: String = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = KeyValue::contains(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"KeyValue.Contains\",\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 1, "score": 149576.97155714795 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait MessagingReceiver: MessageDispatch + Messaging {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"Publish\" => {\n\n let value: PubMessage = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let _resp = Messaging::publish(self, ctx, &value).await?;\n\n let buf = Vec::new();\n\n Ok(Message {\n\n method: \"Messaging.Publish\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"Request\" => {\n\n let value: RequestMessage = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = Messaging::request(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"Messaging.Request\",\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 3, "score": 139884.82352588727 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait MessageSubscriberReceiver: MessageDispatch + MessageSubscriber {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"HandleMessage\" => {\n\n let value: SubMessage = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let _resp = MessageSubscriber::handle_message(self, ctx, &value).await?;\n\n let buf = Vec::new();\n\n Ok(Message {\n\n method: \"MessageSubscriber.HandleMessage\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"MessageSubscriber::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 4, "score": 135873.83457582892 }, { "content": "#[async_trait]\n\npub trait Messaging {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:messaging\"\n\n }\n\n /// Publish - send a message\n\n /// The function returns after the message has been sent.\n\n /// If the sender expects to receive an asynchronous reply,\n\n /// the replyTo field should be filled with the\n\n /// subject for the response.\n\n async fn publish(&self, ctx: &Context, arg: &PubMessage) -> RpcResult<()>;\n\n /// Request - send a message in a request/reply pattern,\n\n /// waiting for a response.\n\n async fn request(&self, ctx: &Context, arg: &RequestMessage) -> RpcResult<ReplyMessage>;\n\n}\n\n\n\n/// MessagingReceiver receives messages defined in the Messaging service trait\n\n/// The Messaging interface describes a service\n\n/// that can deliver messages\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 5, "score": 132917.98404001538 }, { "content": "#[async_trait]\n\npub trait MessageSubscriber {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:messaging\"\n\n }\n\n /// subscription handler\n\n async fn handle_message(&self, ctx: &Context, arg: &SubMessage) -> RpcResult<()>;\n\n}\n\n\n\n/// MessageSubscriberReceiver receives messages defined in the MessageSubscriber service trait\n\n/// The MessageSubscriber interface describes\n\n/// an actor interface that receives messages\n\n/// sent by the Messaging provider\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 6, "score": 130101.71865209108 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait TestingReceiver: MessageDispatch + Testing {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"Start\" => {\n\n let value: TestOptions = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = Testing::start(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"Testing.Start\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"Testing::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "testing/rust/src/testing.rs", "rank": 7, "score": 121598.21968870191 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait LoggingReceiver: MessageDispatch + Logging {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"WriteLog\" => {\n\n let value: LogEntry = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let _resp = Logging::write_log(self, ctx, &value).await?;\n\n let buf = Vec::new();\n\n Ok(Message {\n\n method: \"Logging.WriteLog\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"Logging::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "logging/rust/src/logging.rs", "rank": 8, "score": 121598.21968870191 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait FactorialReceiver: MessageDispatch + Factorial {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"Calculate\" => {\n\n let value: u32 = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = Factorial::calculate(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"Factorial.Calculate\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"Factorial::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "factorial/rust/src/factorial.rs", "rank": 9, "score": 121598.21968870191 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "sqldb/rust/build.rs", "rank": 10, "score": 118278.25615479329 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "messaging/rust/build.rs", "rank": 11, "score": 117993.23648280237 }, { "content": "#[async_trait]\n\npub trait KeyValue {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:keyvalue\"\n\n }\n\n /// Increments a numeric value, returning the new value\n\n async fn increment(&self, ctx: &Context, arg: &IncrementRequest) -> RpcResult<i32>;\n\n /// returns whether the store contains the key\n\n async fn contains<TS: ToString + ?Sized + std::marker::Sync>(\n\n &self,\n\n ctx: &Context,\n\n arg: &TS,\n\n ) -> RpcResult<bool>;\n\n /// Deletes a key, returning true if the key was deleted\n\n async fn del<TS: ToString + ?Sized + std::marker::Sync>(\n\n &self,\n\n ctx: &Context,\n\n arg: &TS,\n\n ) -> RpcResult<bool>;\n\n /// Gets a value for a specified key. If the key exists,\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 12, "score": 117876.01811400994 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait HttpClientReceiver: MessageDispatch + HttpClient {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"Request\" => {\n\n let value: HttpRequest = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = HttpClient::request(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"HttpClient.Request\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"HttpClient::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "httpclient/rust/src/httpclient.rs", "rank": 13, "score": 117045.38244420743 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait NumberGenReceiver: MessageDispatch + NumberGen {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"GenerateGuid\" => {\n\n let resp = NumberGen::generate_guid(self, ctx).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"NumberGen.GenerateGuid\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"RandomInRange\" => {\n\n let value: RangeLimit = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = NumberGen::random_in_range(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"NumberGen.RandomInRange\",\n\n arg: Cow::Owned(buf),\n\n })\n", "file_path": "numbergen/rust/src/numbergen.rs", "rank": 14, "score": 117045.38244420743 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait HttpServerReceiver: MessageDispatch + HttpServer {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"HandleRequest\" => {\n\n let value: HttpRequest = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = HttpServer::handle_request(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"HttpServer.HandleRequest\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"HttpServer::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "httpserver/rust/src/httpserver.rs", "rank": 15, "score": 117045.38244420743 }, { "content": "#[doc(hidden)]\n\n#[async_trait]\n\npub trait LatticeControllerReceiver: MessageDispatch + LatticeController {\n\n async fn dispatch(&self, ctx: &Context, message: &Message<'_>) -> RpcResult<Message<'_>> {\n\n match message.method {\n\n \"AuctionProvider\" => {\n\n let value: ProviderAuctionRequest = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = LatticeController::auction_provider(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"LatticeController.AuctionProvider\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"AuctionActor\" => {\n\n let value: ActorAuctionRequest = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = LatticeController::auction_actor(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"LatticeController.AuctionActor\",\n", "file_path": "lattice-control/rust/src/control.rs", "rank": 16, "score": 114975.25160647409 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "httpserver/rust/build.rs", "rank": 17, "score": 93743.63481885027 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "logging/rust/build.rs", "rank": 18, "score": 93743.63481885027 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "httpclient/rust/build.rs", "rank": 19, "score": 93743.63481885027 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "keyvalue/rust/build.rs", "rank": 20, "score": 93743.63481885027 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "numbergen/rust/build.rs", "rank": 21, "score": 93743.63481885027 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "factorial/rust/build.rs", "rank": 22, "score": 93743.63481885027 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "testing/rust/build.rs", "rank": 23, "score": 93743.63481885027 }, { "content": "#[async_trait]\n\npub trait Testing {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:testing\"\n\n }\n\n /// Begin tests\n\n async fn start(&self, ctx: &Context, arg: &TestOptions) -> RpcResult<TestResults>;\n\n}\n\n\n\n/// TestingReceiver receives messages defined in the Testing service trait\n\n/// Test api for testable actors and providers\n", "file_path": "testing/rust/src/testing.rs", "rank": 24, "score": 92329.86119118461 }, { "content": "#[async_trait]\n\npub trait Factorial {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:example:factorial\"\n\n }\n\n /// Calculates the factorial (n!) of the input parameter\n\n async fn calculate(&self, ctx: &Context, arg: &u32) -> RpcResult<u64>;\n\n}\n\n\n\n/// FactorialReceiver receives messages defined in the Factorial service trait\n\n/// The Factorial service has a single method, calculate, which\n\n/// calculates the factorial of its whole number parameter.\n", "file_path": "factorial/rust/src/factorial.rs", "rank": 25, "score": 92329.86119118461 }, { "content": "#[async_trait]\n\npub trait Logging {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:builtin:logging\"\n\n }\n\n ///\n\n /// WriteLog - log a text message\n\n ///\n\n async fn write_log(&self, ctx: &Context, arg: &LogEntry) -> RpcResult<()>;\n\n}\n\n\n\n/// LoggingReceiver receives messages defined in the Logging service trait\n", "file_path": "logging/rust/src/logging.rs", "rank": 26, "score": 92329.86119118461 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "lattice-control/rust/build.rs", "rank": 27, "score": 92199.96431846035 }, { "content": "#[async_trait]\n\npub trait NumberGen {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:builtin:numbergen\"\n\n }\n\n ///\n\n /// GenerateGuid - return a 128-bit guid in the form 123e4567-e89b-12d3-a456-426655440000\n\n /// These guids are known as \"version 4\", meaning all bits are random or pseudo-random.\n\n ///\n\n async fn generate_guid(&self, ctx: &Context) -> RpcResult<String>;\n\n /// Request a random integer within a range\n\n /// The result will will be in the range [min,max), i.e., >= min and < max.\n\n async fn random_in_range(&self, ctx: &Context, arg: &RangeLimit) -> RpcResult<u32>;\n\n /// Request a 32-bit random number\n\n async fn random_32(&self, ctx: &Context) -> RpcResult<u32>;\n\n}\n\n\n\n/// NumberGenReceiver receives messages defined in the NumberGen service trait\n", "file_path": "numbergen/rust/src/numbergen.rs", "rank": 28, "score": 90252.09794067581 }, { "content": "#[async_trait]\n\npub trait HttpServer {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:httpserver\"\n\n }\n\n async fn handle_request(&self, ctx: &Context, arg: &HttpRequest) -> RpcResult<HttpResponse>;\n\n}\n\n\n\n/// HttpServerReceiver receives messages defined in the HttpServer service trait\n\n/// HttpServer is the contract to be implemented by actor\n", "file_path": "httpserver/rust/src/httpserver.rs", "rank": 29, "score": 90252.09794067581 }, { "content": "#[async_trait]\n\npub trait HttpClient {\n\n /// returns the capability contract id for this interface\n\n fn contract_id() -> &'static str {\n\n \"wasmcloud:httpclient\"\n\n }\n\n /// Issue outgoing http request\n\n async fn request(&self, ctx: &Context, arg: &HttpRequest) -> RpcResult<HttpResponse>;\n\n}\n\n\n\n/// HttpClientReceiver receives messages defined in the HttpClient service trait\n\n/// HttpClient - issue outgoing http requests via an external provider\n\n/// To use this capability, the actor must be linked\n\n/// with \"wasmcloud:httpclient\"\n", "file_path": "httpclient/rust/src/httpclient.rs", "rank": 30, "score": 90252.09794067581 }, { "content": "#[async_trait]\n\npub trait LatticeController {\n\n /// Seek out a list of suitable hosts for a capability provider given\n\n /// a set of host label constraints. Hosts on which this provider is already\n\n /// running will not be among the successful \"bidders\" in this auction.\n\n async fn auction_provider(\n\n &self,\n\n ctx: &Context,\n\n arg: &ProviderAuctionRequest,\n\n ) -> RpcResult<ProviderAuctionAcks>;\n\n /// Seek out a list of suitable hosts for an actor given a set of host\n\n /// label constraints.\n\n async fn auction_actor(\n\n &self,\n\n ctx: &Context,\n\n arg: &ActorAuctionRequest,\n\n ) -> RpcResult<ActorAuctionAcks>;\n\n /// Queries the list of hosts currently visible to the lattice. This is\n\n /// a \"gather\" operation and so can be influenced by short timeouts,\n\n /// network partition events, etc.\n\n async fn get_hosts(&self, ctx: &Context) -> RpcResult<Hosts>;\n", "file_path": "lattice-control/rust/src/control.rs", "rank": 31, "score": 88322.1893678221 }, { "content": "#[derive(Debug, Default, Actor, HealthResponder)]\n\n#[services(Actor, Testing)]\n\nstruct NumbergenTestActor {}\n\n\n\n#[async_trait]\n\nimpl Testing for NumbergenTestActor {\n\n async fn start(&self, _ctx: &Context, opts: &TestOptions) -> RpcResult<Vec<TestResult>> {\n\n console_log(&format!(\"numbergen test actor starting\"));\n\n let results = run_selected!(\n\n opts,\n\n numbergen_uuid,\n\n numbergen_rand32,\n\n numbergen_rand_range,\n\n logging_levels,\n\n );\n\n Ok(results)\n\n }\n\n}\n\n\n\n/// test uuid generation\n\nasync fn numbergen_uuid(_opt: &TestOptions) -> RpcResult<()> {\n\n // generate twice and confirm they aren't the same\n", "file_path": "tests/actor/builtins/src/lib.rs", "rank": 32, "score": 50551.747182865394 }, { "content": "# SQL Database\n\n\n\nThis interface defines a basic SQL Database \n\nprovider with the capability contract wasmcloud:sqldb.\n\n\n\nThe initial version of this interface (0.1) supports\n\nexecuting sql queries (inserts, update, create table, etc.)\n\nand fetching data (select).\n\n\n\nThe api is intended to be independent of any specific relational database implementation\n\n(postgres, mysql, mariadb, sqlite, etc.).\n\n\n\nFor efficiency, query results are encoded in Compact Binary Object\n\nRepresentation [CBOR](https://cbor.io), a language-neutral format.\n\nCBOR is designed to be an extensible, language-neutral,\n\nabout 50-70% denser than JSON, and suitable for constrained\n\nenvironments (low cpu and memory requirements). Parsers are simple to\n\nwrite, and libraries are available in [several languages](https://cbor.io/impls.html).\n\n\n\nThis interface is **pre-release and subject to change**.\n\nThe following features are currently unsupported:\n\n- nullable fields\n\n- transactions\n\n- prepared statements\n\n- streaming results\n", "file_path": "sqldb/README.md", "rank": 33, "score": 44672.012346813455 }, { "content": "# Messaging Interface\n\nThis is the interface for the `wasmcloud:messaging` contract. This contract is a very simple abstraction over the concept of a message broker. This contract does not have controls or knobs or settings for things like delivery guarantees (e.g. \"at most once\" vs \"at least once\"), persistence of messages, etc. Such details are the responsibility of individual providers.\n\n\n\n## Implementations\n\nThe following is a list of known implementations of the `wasmcloud:messaging` contract. Feel free to submit a PR if you know of additional providers.\n\n\n\n| Name | Vendor | Description |\n\n| :---: | :---: | :--- |\n", "file_path": "messaging/README.md", "rank": 34, "score": 44410.873044996435 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// A message to be published\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct PubMessage {\n\n /// The subject, or topic, of the message\n\n #[serde(default)]\n\n pub subject: String,\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 44, "score": 38432.26172373418 }, { "content": " }\n\n}\n\n#[async_trait]\n\nimpl<T: Transport + std::marker::Sync + std::marker::Send> MessageSubscriber\n\n for MessageSubscriberSender<T>\n\n{\n\n #[allow(unused)]\n\n /// subscription handler\n\n async fn handle_message(&self, ctx: &Context, arg: &SubMessage) -> RpcResult<()> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"MessageSubscriber.HandleMessage\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 45, "score": 38420.740741962785 }, { "content": " let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"Messaging.Publish\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n Ok(())\n\n }\n\n #[allow(unused)]\n\n /// Request - send a message in a request/reply pattern,\n\n /// waiting for a response.\n\n async fn request(&self, ctx: &Context, arg: &RequestMessage) -> RpcResult<ReplyMessage> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 46, "score": 38420.12952340904 }, { "content": " }\n\n\n\n /// Constructs a client for sending to a Messaging provider\n\n /// implementing the 'wasmcloud:messaging' capability contract, with the specified link name\n\n pub fn new_with_link(link_name: &str) -> wasmbus_rpc::RpcResult<Self> {\n\n let transport =\n\n wasmbus_rpc::actor::prelude::WasmHost::to_provider(\"wasmcloud:messaging\", link_name)?;\n\n Ok(Self { transport })\n\n }\n\n}\n\n#[async_trait]\n\nimpl<T: Transport + std::marker::Sync + std::marker::Send> Messaging for MessagingSender<T> {\n\n #[allow(unused)]\n\n /// Publish - send a message\n\n /// The function returns after the message has been sent.\n\n /// If the sender expects to receive an asynchronous reply,\n\n /// the replyTo field should be filled with the\n\n /// subject for the response.\n\n async fn publish(&self, ctx: &Context, arg: &PubMessage) -> RpcResult<()> {\n\n let buf = serialize(arg)?;\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 47, "score": 38418.16674826471 }, { "content": "\n\n/// Message received as part of a subscription\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct SubMessage {\n\n /// The subject, or topic, of the message\n\n #[serde(default)]\n\n pub subject: String,\n\n /// An optional topic on which the reply should be sent.\n\n #[serde(rename = \"replyTo\")]\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub reply_to: Option<String>,\n\n /// The message payload\n\n #[serde(with = \"serde_bytes\")]\n\n #[serde(default)]\n\n pub body: Vec<u8>,\n\n}\n\n\n\n/// The Messaging interface describes a service\n\n/// that can deliver messages\n\n/// wasmbus.contractId: wasmcloud:messaging\n\n/// wasmbus.providerReceive\n\n#[async_trait]\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 48, "score": 38415.53278224921 }, { "content": " .send(\n\n ctx,\n\n Message {\n\n method: \"Messaging.Request\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Request\", e)))?;\n\n Ok(value)\n\n }\n\n}\n\n\n\n/// The MessageSubscriber interface describes\n\n/// an actor interface that receives messages\n\n/// sent by the Messaging provider\n\n/// wasmbus.contractId: wasmcloud:messaging\n\n/// wasmbus.actorReceive\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 49, "score": 38413.97604474015 }, { "content": " /// The message payload\n\n #[serde(with = \"serde_bytes\")]\n\n #[serde(default)]\n\n pub body: Vec<u8>,\n\n}\n\n\n\n/// Message sent as part of a request, with timeout\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct RequestMessage {\n\n /// The subject, or topic, of the message\n\n #[serde(default)]\n\n pub subject: String,\n\n /// The message payload\n\n #[serde(with = \"serde_bytes\")]\n\n #[serde(default)]\n\n pub body: Vec<u8>,\n\n /// A timeout, in milliseconds\n\n #[serde(rename = \"timeoutMs\")]\n\n pub timeout_ms: u32,\n\n}\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 50, "score": 38413.50963856644 }, { "content": " arg: Cow::Owned(buf),\n\n })\n\n }\n\n _ => Err(RpcError::MethodNotHandled(format!(\n\n \"Messaging::{}\",\n\n message.method\n\n ))),\n\n }\n\n }\n\n}\n\n\n\n/// MessagingSender sends messages to a Messaging service\n\n/// The Messaging interface describes a service\n\n/// that can deliver messages\n\n/// client for sending Messaging messages\n\n#[derive(Debug)]\n\npub struct MessagingSender<T: Transport> {\n\n transport: T,\n\n}\n\n\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 51, "score": 38413.41486888608 }, { "content": " /// An optional topic on which the reply should be sent.\n\n #[serde(rename = \"replyTo\")]\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub reply_to: Option<String>,\n\n /// The message payload\n\n #[serde(with = \"serde_bytes\")]\n\n #[serde(default)]\n\n pub body: Vec<u8>,\n\n}\n\n\n\n/// Reply received from a Request operation\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct ReplyMessage {\n\n /// The subject, or topic, of the message\n\n #[serde(default)]\n\n pub subject: String,\n\n /// An optional topic on which the reply should be sent.\n\n #[serde(rename = \"replyTo\")]\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub reply_to: Option<String>,\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 52, "score": 38412.01199352127 }, { "content": "}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nimpl<'send> MessageSubscriberSender<wasmbus_rpc::provider::ProviderTransport<'send>> {\n\n /// Constructs a Sender using an actor's LinkDefinition,\n\n /// Uses the provider's HostBridge for rpc\n\n pub fn for_actor(ld: &'send wasmbus_rpc::core::LinkDefinition) -> Self {\n\n Self {\n\n transport: wasmbus_rpc::provider::ProviderTransport::new(ld, None),\n\n }\n\n }\n\n}\n\n#[cfg(target_arch = \"wasm32\")]\n\nimpl MessageSubscriberSender<wasmbus_rpc::actor::prelude::WasmHost> {\n\n /// Constructs a client for actor-to-actor messaging\n\n /// using the recipient actor's public key\n\n pub fn to_actor(actor_id: &str) -> Self {\n\n let transport =\n\n wasmbus_rpc::actor::prelude::WasmHost::to_actor(actor_id.to_string()).unwrap();\n\n Self { transport }\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 53, "score": 38405.60494528799 }, { "content": "\n\n/// MessageSubscriberSender sends messages to a MessageSubscriber service\n\n/// The MessageSubscriber interface describes\n\n/// an actor interface that receives messages\n\n/// sent by the Messaging provider\n\n/// client for sending MessageSubscriber messages\n\n#[derive(Debug)]\n\npub struct MessageSubscriberSender<T: Transport> {\n\n transport: T,\n\n}\n\n\n\nimpl<T: Transport> MessageSubscriberSender<T> {\n\n /// Constructs a MessageSubscriberSender with the specified transport\n\n pub fn via(transport: T) -> Self {\n\n Self { transport }\n\n }\n\n\n\n pub fn set_timeout(&self, interval: std::time::Duration) {\n\n self.transport.set_timeout(interval);\n\n }\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 54, "score": 38403.87333227894 }, { "content": "impl<T: Transport> MessagingSender<T> {\n\n /// Constructs a MessagingSender with the specified transport\n\n pub fn via(transport: T) -> Self {\n\n Self { transport }\n\n }\n\n\n\n pub fn set_timeout(&self, interval: std::time::Duration) {\n\n self.transport.set_timeout(interval);\n\n }\n\n}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nimpl MessagingSender<wasmbus_rpc::actor::prelude::WasmHost> {\n\n /// Constructs a client for sending to a Messaging provider\n\n /// implementing the 'wasmcloud:messaging' capability contract, with the \"default\" link\n\n pub fn new() -> Self {\n\n let transport =\n\n wasmbus_rpc::actor::prelude::WasmHost::to_provider(\"wasmcloud:messaging\", \"default\")\n\n .unwrap();\n\n Self { transport }\n", "file_path": "messaging/rust/src/messaging.rs", "rank": 55, "score": 38401.20678558279 }, { "content": "// build.rs - build smithy models into rust sources at compile tile\n\n\n\n// path to codegen.toml relative to location of Cargo.toml\n\nconst CONFIG: &str = \"../codegen.toml\";\n\n\n", "file_path": "sqldb/rust/build.rs", "rank": 56, "score": 31970.66524758494 }, { "content": "const CONFIG: &str = \"../codegen.toml\";\n\n\n", "file_path": "messaging/rust/build.rs", "rank": 57, "score": 31599.970220159652 }, { "content": "//! - prepared statements\n\n//!\n\n\n\nmod sqldb;\n\npub use sqldb::*;\n\n// re-export minicbor\n\npub use minicbor;\n\n\n\nimpl SqlDbError {\n\n pub fn new<T: ToString>(code: T, message: String) -> SqlDbError {\n\n SqlDbError {\n\n code: code.to_string(),\n\n message,\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for SqlDbError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"SqlDbError {}: {}\", &self.code, &self.message)\n", "file_path": "sqldb/rust/src/lib.rs", "rank": 58, "score": 30665.421392323828 }, { "content": " }\n\n}\n\n\n\nimpl From<minicbor::decode::Error> for SqlDbError {\n\n fn from(e: minicbor::decode::Error) -> SqlDbError {\n\n SqlDbError {\n\n code: \"decoding\".to_string(),\n\n message: e.to_string(),\n\n }\n\n }\n\n}\n\n\n\nuse wasmbus_rpc::RpcError;\n\nimpl From<SqlDbError> for RpcError {\n\n fn from(e: SqlDbError) -> RpcError {\n\n RpcError::Other(format!(\"SqlDb error {}: {}\", e.code, e.message))\n\n }\n\n}\n\n\n\nimpl From<RpcError> for SqlDbError {\n\n fn from(e: RpcError) -> SqlDbError {\n\n SqlDbError::new(\"rpc\", e.to_string())\n\n }\n\n}\n", "file_path": "sqldb/rust/src/lib.rs", "rank": 59, "score": 30656.975174758623 }, { "content": "//! This library provides the Rust implementation\n\n//! of the wasmcloud SQL database capability contract wasmcloud:sqldb.\n\n//!\n\n//! The initial version of this interface supports\n\n//! executing sql queries (inserts, update, create table, etc.)\n\n//! and fetching data (select).\n\n//!\n\n//! The api is intended to be independent of any specific relational database implementation\n\n//! (postgres, mysql, mariadb, sqlite, etc.).\n\n//!\n\n//! For efficiency, query results are encoded in Compact Binary Object\n\n//! Representation [CBOR](https://cbor.io), a language-neutral format.\n\n//! CBOR is designed to be an extensible, language-neutral,\n\n//! about 50-70% denser than JSON, and suitable for constrained\n\n//! environments (low cpu and memory requirements). Parsers are simple to\n\n//! write, and libraries are available in [several languages](https://cbor.io/impls.html).\n\n//!\n\n//! This interface currently does not support:\n\n//! - transactions\n\n//! - streaming results\n", "file_path": "sqldb/rust/src/lib.rs", "rank": 60, "score": 30649.661201139083 }, { "content": "//! org.wasmcloud.interface.messaging\n\n\n\nmod messaging;\n\npub use messaging::*;\n", "file_path": "messaging/rust/src/lib.rs", "rank": 61, "score": 30296.44264842644 }, { "content": "# wasmCloud API Interfaces\n\nThis repository contains the wasmCloud contract interface definitions (defined in the _Smithy_ IDL) for those interfaces that are defined and supported by the wasmCloud team. These interfaces are definitely not the _only_ interfaces available, as teams and companies can create their own private or bespoke interfaces as desired.\n\n\n\n## Smithy IDLs and Shared Libraries\n\n\n\nEach interface is defined in a file with the `.smithy` extension. If\n\nthe folder contains a `codegen.toml` file,\n\na library and/or html documentation can be\n\nautomatically generated from the `.smithy` file. \n\n\n\nMore information on code\n\ngeneration and the `codegen.toml` files is in the [weld\n\ncrate](https://github.com/wasmcloud/weld)\n\n\n\nThe `docs` folder in this repository is published to github pages at\n\n[Wasmcloud Interfaces](https://wasmcloud.github.io/interfaces/), and\n\ncontains copies of the interfaces available for direct download, and\n\nhtml generated documentation.\n\n\n\nFor more information on Smithy, see\n\n - [Smithy](https://awslabs.github.io/smithy/index.html) A language for\n\n defining services and SDKs\n\n - [IDL specification](https://awslabs.github.io/smithy/1.0/spec/core/idl.html)\n\n\n\nFor more on wasmcloud, see\n\n - [wasmCloud](https://wasmcloud.dev)\n\n\n", "file_path": "README.md", "rank": 62, "score": 23050.71856668323 }, { "content": "# HTTP Server Interface\n\nThis is the interface for an HTTP Server capability with the contract ID `wasmcloud:httpserver`\n\n\n\nThis folder contains \n\n- Model definition for `wasmcloud:httpserver`\n\n- Generated documentation (in HTML)\n\n- Generated Rust library (in Rust)\n\n\n\nAny Rust actor or capability provider using `wasmcloud:httpserver` should rely upon this library. A capability provider implements the trait `HttpServerReceiver`.\n\n\n\n## Implementations\n\nThe following is a list of known implementations of the HTTP server interface. Feel free to submit a PR if you know of others.\n\n\n\n| Name | Vendor | Description |\n\n| :---: | :---: | :--- |\n\n| [Default Server](https://github.com/wasmCloud/capability-providers/tree/main/httpserver-rs) | wasmCloud | wasmCloud Default HTTP Server Provider\n\n\n\n\n", "file_path": "httpserver/README.md", "rank": 63, "score": 22361.203814733643 }, { "content": "# HTTP Client\n\nThis is the interface definition for the interface with the contract ID `wasmcloud:httpclient`.\n\n\n\nActors utilizing this interface can make HTTP requests and receive HTTP responses for processing. Since this is just an interface, and not an actual provider, you will need to check the documentation for individual provider implementations for a list of link definition values supported by that provider.\n\n\n\n## Implementations\n\nThe following is a list of implementations of the HTTP client contract. Feel free to submit a PR adding your implementation if you have a community/open source version.\n\n\n\n| Name | Vendor | Description |\n\n| :---: | :---: | :--- |\n", "file_path": "httpclient/README.md", "rank": 64, "score": 22359.364372513523 }, { "content": "# Key Value Interface\n\nThis is the key-value interface with the contract ID of `wasmcloud:keyvalue`. This interface defines a set of common operations for interacting with key-value stores. \n\n\n\nNote that things like consistency guarantees, backup, failover support, replications, and more are all concerns specific to individual providers and not the interface itself.\n\n\n\n## Implementations\n\nThe following is a list of known implementations of the `wasmcloud:keyvalue` interface. Feel free to submit a PR if you know of more.\n\n\n\n| Name | Vendor | Description |\n\n| :---: | :---: | :--- |\n", "file_path": "keyvalue/README.md", "rank": 65, "score": 22358.498404188118 }, { "content": "# Factorial Interface\n\nThis is the definition for the interface used for examples and illustrations with the contract ID of `wasmcloud:example:factorial`.\n\n\n\nThis is an interface for a simple service that calculates the factorial of a whole number. \n\n\n\n**NOTE** that this is just an example, and we would not recommend a real-world production scenario where you use an interface and accompanying capability provider for factorial calculations.\n", "file_path": "factorial/README.md", "rank": 66, "score": 22356.48302169161 }, { "content": "# Number Generator\n\nThis is the interface definition for the wasmCloud built-in interface that is guaranteed to be supported by all runtime hosts, `wasmcloud:builtin:numbergen`. The number generator interface provides for the creation of things like random numbers, random numbers within a given range, and globally unique identifiers (GUIDs).\n\n\n\n## Implementations\n", "file_path": "numbergen/README.md", "rank": 67, "score": 22353.031405055513 }, { "content": "# wasmCloud Testing Interface\n\nThis is the interface definition for the `wasmcloud:testing` contract.\n\n\n", "file_path": "testing/README.md", "rank": 68, "score": 22353.031405055513 }, { "content": "Placeholder - image reference info will go here\n", "file_path": "docs/README.md", "rank": 69, "score": 22353.031405055513 }, { "content": "# wasmCloud Builtin Logging Interface\n\nThis interface defines the wasmCloud built-in logging interface (`wasmcloud:builtin:logging`) that comes with each of our supported host runtimes. If you are looking for a different kind of logging, then that will likely be covered in a different interface.\n\n\n\n## Implementations\n\n\n", "file_path": "logging/README.md", "rank": 70, "score": 22353.031405055513 }, { "content": "## wasmCloud Core Interface\n\nAll other interfaces rely upon shared definitions and structures defined within the core.\n\n\n\nThis folder contains core interfaces\n\n- [wasmcloud-model.smithy](./wasmcloud-model.smithy) (namespace org.wasmcloud.model)\n\n- [wasmcloud-core.smithy](./wasmcloud-core.smithy) (namespace org.wasmcloud.core)\n", "file_path": "core/README.md", "rank": 71, "score": 22353.031405055513 }, { "content": "![Crates.io](https://img.shields.io/crates/v/wasmcloud-interface-lattice-control)\n\n[![Documentation](https://img.shields.io/badge/Docs-Documentation-blue)](https://wasmcloud.dev)\n\n[![Rustdocs](https://docs.rs/lattice-control-interface/badge.svg)](https://docs.rs/wasmcloud-interface-lattice-control)\n\n\n\n# Lattice Control Interface\n\nThe lattice control interface is a smithy-defined interface contract that is expected to be consumed in one of two different ways:\n\n\n\n* Directly - A [NATS client](https://github.com/wasmcloud/control-interface-client) library may use the data structures from this interface to communicate over the lattice control interface topic\n\n* Indirectly - Either side of the `wasmcloud:latticecontrol` contract\n\n * Capability Providers - Capability providers can simply provide a wrapper around the NATS client, exposing lattice control functionality to actors\n\n * Actors - Actors can make use of this crate as they would any other wasmCloud interface crate\n\n\n", "file_path": "lattice-control/README.md", "rank": 72, "score": 21699.963865587353 }, { "content": "# Actor implementations of interface tests\n\n\n\n# The tests in this subdirectory are all implemented\n\n# as webassembly actors\n", "file_path": "tests/actor/README.md", "rank": 73, "score": 21697.62259028742 }, { "content": "This interface defines the wasmcloud:testing api contract.\n\n\n\nThis api is currently in development\n", "file_path": "testing/rust/README.md", "rank": 74, "score": 21697.62259028742 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// Response to get request\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct GetResponse {\n\n /// the value, if it existed\n\n #[serde(default)]\n\n pub value: String,\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 75, "score": 43.17611233036932 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// Input range for RandomInRange, inclusive. Result will be >= min and <= max\n\n/// Example:\n\n/// random_in_range(RangeLimit{0,4}) returns one the values, 0, 1, 2, 3, or 4.\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct RangeLimit {\n\n pub min: u32,\n\n pub max: u32,\n\n}\n\n\n\n/// wasmbus.contractId: wasmcloud:builtin:numbergen\n\n/// wasmbus.providerReceive\n\n#[async_trait]\n", "file_path": "numbergen/rust/src/numbergen.rs", "rank": 76, "score": 41.19971042816496 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct LogEntry {\n\n /// severity level: debug,info,warn,error\n\n #[serde(default)]\n\n pub level: String,\n\n /// message to log\n\n #[serde(default)]\n\n pub text: String,\n\n}\n\n\n\n/// wasmbus.contractId: wasmcloud:builtin:logging\n\n/// wasmbus.providerReceive\n\n#[async_trait]\n", "file_path": "logging/rust/src/logging.rs", "rank": 77, "score": 40.7819467776909 }, { "content": " link_name,\n\n )?;\n\n Ok(Self { transport })\n\n }\n\n}\n\n#[async_trait]\n\nimpl<T: Transport + std::marker::Sync + std::marker::Send> Factorial for FactorialSender<T> {\n\n #[allow(unused)]\n\n /// Calculates the factorial (n!) of the input parameter\n\n async fn calculate(&self, ctx: &Context, arg: &u32) -> RpcResult<u64> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"Factorial.Calculate\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Calculate\", e)))?;\n\n Ok(value)\n\n }\n\n}\n", "file_path": "factorial/rust/src/factorial.rs", "rank": 78, "score": 40.13377662864238 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// One of a potential list of responses to an actor auction\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct ActorAuctionAck {\n\n /// The original actor reference used for the auction\n\n #[serde(default)]\n\n pub actor_ref: String,\n", "file_path": "lattice-control/rust/src/control.rs", "rank": 79, "score": 39.19121778797651 }, { "content": " arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"SetQuery\" => {\n\n let value: String = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = KeyValue::set_query(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"KeyValue.SetQuery\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"SetUnion\" => {\n\n let value: StringList = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = KeyValue::set_union(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"KeyValue.SetUnion\",\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 80, "score": 37.602906517510164 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// map data structure for holding http headers\n\n///\n\npub type HeaderMap = std::collections::HashMap<String, HeaderValues>;\n\n\n\npub type HeaderValues = Vec<String>;\n\n\n", "file_path": "httpserver/rust/src/httpserver.rs", "rank": 81, "score": 36.36494344284316 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// map data structure for holding http headers\n\n///\n\npub type HeaderMap = std::collections::HashMap<String, HeaderValues>;\n\n\n\npub type HeaderValues = Vec<String>;\n\n\n", "file_path": "httpclient/rust/src/httpclient.rs", "rank": 82, "score": 36.36494344284316 }, { "content": " /// returns whether the store contains the key\n\n async fn contains<TS: ToString + ?Sized + std::marker::Sync>(\n\n &self,\n\n ctx: &Context,\n\n arg: &TS,\n\n ) -> RpcResult<bool> {\n\n let buf = serialize(&arg.to_string())?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.Contains\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Contains\", e)))?;\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 83, "score": 36.17265279553762 }, { "content": " /// clears all values from the set and removes it\n\n /// input: set name\n\n /// output: true if the set existed and was deleted\n\n async fn set_clear<TS: ToString + ?Sized + std::marker::Sync>(\n\n &self,\n\n ctx: &Context,\n\n arg: &TS,\n\n ) -> RpcResult<bool> {\n\n let buf = serialize(&arg.to_string())?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.SetClear\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"SetClear\", e)))?;\n\n Ok(value)\n\n }\n\n}\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 84, "score": 36.015052273099286 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// A map of test options.\n\n/// Keys may be test case names, or other keys meaningful for the test.\n\n/// Values are utf8 strings containing serialized json, with contents specific to the test\n\npub type OptMap = std::collections::HashMap<String, String>;\n\n\n\n/// list of regex patterns\n", "file_path": "testing/rust/src/testing.rs", "rank": 85, "score": 35.58643825081176 }, { "content": " arg: &TS,\n\n ) -> RpcResult<StringList> {\n\n let buf = serialize(&arg.to_string())?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.SetQuery\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"SetQuery\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n\n /// perform union of sets and returns values from the union\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 86, "score": 35.386327399284134 }, { "content": " arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"Del\" => {\n\n let value: String = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = KeyValue::del(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"KeyValue.Del\",\n\n arg: Cow::Owned(buf),\n\n })\n\n }\n\n \"Get\" => {\n\n let value: String = deserialize(message.arg.as_ref())\n\n .map_err(|e| RpcError::Deser(format!(\"message '{}': {}\", message.method, e)))?;\n\n let resp = KeyValue::get(self, ctx, &value).await?;\n\n let buf = serialize(&resp)?;\n\n Ok(Message {\n\n method: \"KeyValue.Get\",\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 87, "score": 35.20620670114815 }, { "content": " method: \"KeyValue.Get\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Get\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n\n /// Append a value onto the end of a list. Returns the new list size\n\n async fn list_add(&self, ctx: &Context, arg: &ListAddRequest) -> RpcResult<u32> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.ListAdd\",\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 88, "score": 35.16232915963029 }, { "content": " /// input: list of sets for performing union (at least two)\n\n /// output: union of values\n\n async fn set_union(&self, ctx: &Context, arg: &StringList) -> RpcResult<StringList> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.SetUnion\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"SetUnion\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 89, "score": 35.003888909080075 }, { "content": " ctx,\n\n Message {\n\n method: \"KeyValue.SetDel\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"SetDel\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n\n /// perform intersection of sets and returns values from the intersection.\n\n /// input: list of sets for performing intersection (at least two)\n\n /// output: values\n\n async fn set_intersection(&self, ctx: &Context, arg: &StringList) -> RpcResult<StringList> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 90, "score": 34.550933863120065 }, { "content": "#[async_trait]\n\nimpl<T: Transport + std::marker::Sync + std::marker::Send> Testing for TestingSender<T> {\n\n #[allow(unused)]\n\n /// Begin tests\n\n async fn start(&self, ctx: &Context, arg: &TestOptions) -> RpcResult<TestResults> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"Testing.Start\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Start\", e)))?;\n\n Ok(value)\n\n }\n\n}\n", "file_path": "testing/rust/src/testing.rs", "rank": 91, "score": 34.49213327564784 }, { "content": " .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.SetAdd\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"SetAdd\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n\n /// Deletes an item from the set. Returns number of items removed from the set (1 or 0)\n\n async fn set_del(&self, ctx: &Context, arg: &SetDelRequest) -> RpcResult<u32> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 92, "score": 34.27455613007897 }, { "content": "// This file is generated automatically using wasmcloud/weld-codegen and smithy model definitions\n\n//\n\n\n\n#![allow(unused_imports, clippy::ptr_arg, clippy::needless_lifetimes)]\n\nuse async_trait::async_trait;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{borrow::Cow, io::Write, string::ToString};\n\nuse wasmbus_rpc::{\n\n deserialize, serialize, Context, Message, MessageDispatch, RpcError, RpcResult, SendOpts,\n\n Timestamp, Transport,\n\n};\n\n\n\npub const SMITHY_VERSION: &str = \"1.0\";\n\n\n\n/// The Factorial service has a single method, calculate, which\n\n/// calculates the factorial of its whole number parameter.\n\n/// wasmbus.contractId: wasmcloud:example:factorial\n\n/// wasmbus.providerReceive\n\n/// wasmbus.actorReceive\n\n#[async_trait]\n", "file_path": "factorial/rust/src/factorial.rs", "rank": 93, "score": 33.899921843741765 }, { "content": " Ok(value)\n\n }\n\n #[allow(unused)]\n\n /// Request a 32-bit random number\n\n async fn random_32(&self, ctx: &Context) -> RpcResult<u32> {\n\n let buf = *b\"\";\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"NumberGen.Random32\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Random32\", e)))?;\n\n Ok(value)\n\n }\n\n}\n", "file_path": "numbergen/rust/src/numbergen.rs", "rank": 94, "score": 33.80144185741178 }, { "content": "#[async_trait]\n\nimpl<T: Transport + std::marker::Sync + std::marker::Send> HttpServer for HttpServerSender<T> {\n\n #[allow(unused)]\n\n async fn handle_request(&self, ctx: &Context, arg: &HttpRequest) -> RpcResult<HttpResponse> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"HttpServer.HandleRequest\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"HandleRequest\", e)))?;\n\n Ok(value)\n\n }\n\n}\n", "file_path": "httpserver/rust/src/httpserver.rs", "rank": 95, "score": 33.645759240826095 }, { "content": " #[allow(unused)]\n\n /// Increments a numeric value, returning the new value\n\n async fn increment(&self, ctx: &Context, arg: &IncrementRequest) -> RpcResult<i32> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.Increment\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Increment\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 96, "score": 33.59451129078947 }, { "content": " let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.Set\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n Ok(())\n\n }\n\n #[allow(unused)]\n\n /// Add an item into a set. Returns number of items added (1 or 0)\n\n async fn set_add(&self, ctx: &Context, arg: &SetAddRequest) -> RpcResult<u32> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 97, "score": 33.59076696475198 }, { "content": "}\n\n#[async_trait]\n\nimpl<T: Transport + std::marker::Sync + std::marker::Send> HttpClient for HttpClientSender<T> {\n\n #[allow(unused)]\n\n /// Issue outgoing http request\n\n async fn request(&self, ctx: &Context, arg: &HttpRequest) -> RpcResult<HttpResponse> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"HttpClient.Request\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"Request\", e)))?;\n\n Ok(value)\n\n }\n\n}\n", "file_path": "httpclient/rust/src/httpclient.rs", "rank": 98, "score": 33.44085862183445 }, { "content": " .transport\n\n .send(\n\n ctx,\n\n Message {\n\n method: \"KeyValue.ListClear\",\n\n arg: Cow::Borrowed(&buf),\n\n },\n\n None,\n\n )\n\n .await?;\n\n let value = deserialize(&resp)\n\n .map_err(|e| RpcError::Deser(format!(\"response to {}: {}\", \"ListClear\", e)))?;\n\n Ok(value)\n\n }\n\n #[allow(unused)]\n\n /// Deletes a value from a list. Returns true if the item was removed.\n\n async fn list_del(&self, ctx: &Context, arg: &ListDelRequest) -> RpcResult<bool> {\n\n let buf = serialize(arg)?;\n\n let resp = self\n\n .transport\n", "file_path": "keyvalue/rust/src/keyvalue.rs", "rank": 99, "score": 33.37864021298358 } ]
Rust
src/usbdcd/control/mod.rs
Meptl/mk20d7
c7cd01cf55214b0b53fae7c7672607e739b64663
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CONTROL { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `IF`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IFR { #[doc = "No interrupt is pending."] _0, #[doc = "An interrupt is pending."] _1, } impl IFR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { IFR::_0 => false, IFR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> IFR { match value { false => IFR::_0, true => IFR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == IFR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == IFR::_1 } } #[doc = "Possible values of the field `IE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IER { #[doc = "Disable interrupts to the system."] _0, #[doc = "Enable interrupts to the system."] _1, } impl IER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { IER::_0 => false, IER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> IER { match value { false => IER::_0, true => IER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == IER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == IER::_1 } } #[doc = "Values that can be written to the field `IACK`"] pub enum IACKW { #[doc = "Do not clear the interrupt."] _0, #[doc = "Clear the IF bit (interrupt flag)."] _1, } impl IACKW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { IACKW::_0 => false, IACKW::_1 => true, } } } #[doc = r" Proxy"] pub struct _IACKW<'a> { w: &'a mut W, } impl<'a> _IACKW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: IACKW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Do not clear the interrupt."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(IACKW::_0) } #[doc = "Clear the IF bit (interrupt flag)."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(IACKW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `IE`"] pub enum IEW { #[doc = "Disable interrupts to the system."] _0, #[doc = "Enable interrupts to the system."] _1, } impl IEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { IEW::_0 => false, IEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _IEW<'a> { w: &'a mut W, } impl<'a> _IEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: IEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Disable interrupts to the system."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(IEW::_0) } #[doc = "Enable interrupts to the system."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(IEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `START`"] pub enum STARTW { #[doc = "Do not start the sequence. Writes of this value have no effect."] _0, #[doc = "Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect."] _1, } impl STARTW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { STARTW::_0 => false, STARTW::_1 => true, } } } #[doc = r" Proxy"] pub struct _STARTW<'a> { w: &'a mut W, } impl<'a> _STARTW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: STARTW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Do not start the sequence. Writes of this value have no effect."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(STARTW::_0) } #[doc = "Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(STARTW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `SR`"] pub enum SRW { #[doc = "Do not perform a software reset."] _0, #[doc = "Perform a software reset."] _1, } impl SRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { SRW::_0 => false, SRW::_1 => true, } } } #[doc = r" Proxy"] pub struct _SRW<'a> { w: &'a mut W, } impl<'a> _SRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: SRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Do not perform a software reset."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(SRW::_0) } #[doc = "Perform a software reset."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(SRW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 25; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 8 - Interrupt Flag"] #[inline] pub fn if_(&self) -> IFR { IFR::_from({ const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 16 - Interrupt Enable"] #[inline] pub fn ie(&self) -> IER { IER::_from({ const MASK: bool = true; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 65536 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Interrupt Acknowledge"] #[inline] pub fn iack(&mut self) -> _IACKW { _IACKW { w: self } } #[doc = "Bit 16 - Interrupt Enable"] #[inline] pub fn ie(&mut self) -> _IEW { _IEW { w: self } } #[doc = "Bit 24 - Start Change Detection Sequence"] #[inline] pub fn start(&mut self) -> _STARTW { _STARTW { w: self } } #[doc = "Bit 25 - Software Reset"] #[inline] pub fn sr(&mut self) -> _SRW { _SRW { w: self } } }
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CONTROL { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `IF`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IFR { #[doc = "No interrupt is pending."] _0, #[doc = "An interrupt is pending."] _1, } impl IFR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline]
#[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> IFR { match value { false => IFR::_0, true => IFR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == IFR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == IFR::_1 } } #[doc = "Possible values of the field `IE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IER { #[doc = "Disable interrupts to the system."] _0, #[doc = "Enable interrupts to the system."] _1, } impl IER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { IER::_0 => false, IER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> IER { match value { false => IER::_0, true => IER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == IER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == IER::_1 } } #[doc = "Values that can be written to the field `IACK`"] pub enum IACKW { #[doc = "Do not clear the interrupt."] _0, #[doc = "Clear the IF bit (interrupt flag)."] _1, } impl IACKW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { IACKW::_0 => false, IACKW::_1 => true, } } } #[doc = r" Proxy"] pub struct _IACKW<'a> { w: &'a mut W, } impl<'a> _IACKW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: IACKW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Do not clear the interrupt."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(IACKW::_0) } #[doc = "Clear the IF bit (interrupt flag)."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(IACKW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `IE`"] pub enum IEW { #[doc = "Disable interrupts to the system."] _0, #[doc = "Enable interrupts to the system."] _1, } impl IEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { IEW::_0 => false, IEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _IEW<'a> { w: &'a mut W, } impl<'a> _IEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: IEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Disable interrupts to the system."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(IEW::_0) } #[doc = "Enable interrupts to the system."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(IEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `START`"] pub enum STARTW { #[doc = "Do not start the sequence. Writes of this value have no effect."] _0, #[doc = "Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect."] _1, } impl STARTW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { STARTW::_0 => false, STARTW::_1 => true, } } } #[doc = r" Proxy"] pub struct _STARTW<'a> { w: &'a mut W, } impl<'a> _STARTW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: STARTW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Do not start the sequence. Writes of this value have no effect."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(STARTW::_0) } #[doc = "Initiate the charger detection sequence. If the sequence is already running, writes of this value have no effect."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(STARTW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `SR`"] pub enum SRW { #[doc = "Do not perform a software reset."] _0, #[doc = "Perform a software reset."] _1, } impl SRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { SRW::_0 => false, SRW::_1 => true, } } } #[doc = r" Proxy"] pub struct _SRW<'a> { w: &'a mut W, } impl<'a> _SRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: SRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Do not perform a software reset."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(SRW::_0) } #[doc = "Perform a software reset."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(SRW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 25; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 8 - Interrupt Flag"] #[inline] pub fn if_(&self) -> IFR { IFR::_from({ const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 16 - Interrupt Enable"] #[inline] pub fn ie(&self) -> IER { IER::_from({ const MASK: bool = true; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 65536 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Interrupt Acknowledge"] #[inline] pub fn iack(&mut self) -> _IACKW { _IACKW { w: self } } #[doc = "Bit 16 - Interrupt Enable"] #[inline] pub fn ie(&mut self) -> _IEW { _IEW { w: self } } #[doc = "Bit 24 - Start Change Detection Sequence"] #[inline] pub fn start(&mut self) -> _STARTW { _STARTW { w: self } } #[doc = "Bit 25 - Software Reset"] #[inline] pub fn sr(&mut self) -> _SRW { _SRW { w: self } } }
pub fn bit(&self) -> bool { match *self { IFR::_0 => false, IFR::_1 => true, } }
function_block-full_function
[ { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 0, "score": 65680.29500482805 }, { "content": "\n", "file_path": "src/interrupt/mod.rs", "rank": 1, "score": 62917.47296695953 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DATAW1SL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fmc/dataw1sl/mod.rs", "rank": 3, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DFCR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/portc/dfcr/mod.rs", "rank": 4, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FMS {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/fms/mod.rs", "rank": 5, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::QDCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/qdctrl/mod.rs", "rank": 6, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SYNC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/sync/mod.rs", "rank": 7, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CSC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/csc/mod.rs", "rank": 8, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SC1 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/adc0/sc1/mod.rs", "rank": 9, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::EEI {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/dma/eei/mod.rs", "rank": 10, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CNT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/cnt/mod.rs", "rank": 11, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CLP0 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/adc0/clp0/mod.rs", "rank": 12, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRP {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips1/pacrp/mod.rs", "rank": 13, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::TSR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/rtc/tsr/mod.rs", "rank": 14, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FILTER {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/filter/mod.rs", "rank": 15, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRB {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips0/pacrb/mod.rs", "rank": 16, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::OUTINIT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/outinit/mod.rs", "rank": 17, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::OUTINIT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/outinit/mod.rs", "rank": 18, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DATAW2SL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fmc/dataw2sl/mod.rs", "rank": 19, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CV {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/cv/mod.rs", "rank": 20, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/sc/mod.rs", "rank": 21, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::EXTTRIG {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/exttrig/mod.rs", "rank": 22, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRG {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips0/pacrg/mod.rs", "rank": 23, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CSPMCR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fb/cspmcr/mod.rs", "rank": 24, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::OUTINIT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/outinit/mod.rs", "rank": 25, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::WORD09 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/word09/mod.rs", "rank": 26, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CONF {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/conf/mod.rs", "rank": 27, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DFER {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/portc/dfer/mod.rs", "rank": 28, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CLKDIV1 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/sim/clkdiv1/mod.rs", "rank": 29, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/spi0/sr/mod.rs", "rank": 30, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::QDCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/qdctrl/mod.rs", "rank": 31, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::STATUS {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/status/mod.rs", "rank": 32, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::ERQ {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/dma/erq/mod.rs", "rank": 33, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PFAPR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fmc/pfapr/mod.rs", "rank": 34, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRN {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips1/pacrn/mod.rs", "rank": 35, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SYNCONF {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/synconf/mod.rs", "rank": 36, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::TAGVDW0S {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fmc/tagvdw0s/mod.rs", "rank": 37, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/rtc/sr/mod.rs", "rank": 38, "score": 97.98366981033908 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::MOD {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/mod_/mod.rs", "rank": 39, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CS14 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/cs14/mod.rs", "rank": 40, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CNT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/cnt/mod.rs", "rank": 41, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRP {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips0/pacrp/mod.rs", "rank": 42, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips1/pacrl/mod.rs", "rank": 43, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRA {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips1/pacra/mod.rs", "rank": 44, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DFWR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/porte/dfwr/mod.rs", "rank": 45, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRK {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips0/pacrk/mod.rs", "rank": 46, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SOPT4 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/sim/sopt4/mod.rs", "rank": 47, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FILTER {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/filter/mod.rs", "rank": 48, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DEADTIME {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/deadtime/mod.rs", "rank": 49, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PFB0CR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fmc/pfb0cr/mod.rs", "rank": 50, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SWOCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/swoctrl/mod.rs", "rank": 51, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::INVCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/invctrl/mod.rs", "rank": 52, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SYNC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/sync/mod.rs", "rank": 53, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CTRL2 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/ctrl2/mod.rs", "rank": 54, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CLPD {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/adc0/clpd/mod.rs", "rank": 55, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRJ {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips0/pacrj/mod.rs", "rank": 56, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::STATUS {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/status/mod.rs", "rank": 57, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::INVCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/invctrl/mod.rs", "rank": 58, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::INVCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/invctrl/mod.rs", "rank": 59, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FLTCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/fltctrl/mod.rs", "rank": 60, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DFCR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/portd/dfcr/mod.rs", "rank": 61, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DFWR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/portd/dfwr/mod.rs", "rank": 62, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::EXTTRIG {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/exttrig/mod.rs", "rank": 63, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::WORD11 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/word11/mod.rs", "rank": 64, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PWMLOAD {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/pwmload/mod.rs", "rank": 65, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::MODE {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/mode/mod.rs", "rank": 66, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PRS {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/axbs/prs/mod.rs", "rank": 67, "score": 97.98366981033908 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FLTCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/fltctrl/mod.rs", "rank": 68, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::MOD {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/mod_/mod.rs", "rank": 69, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SWOCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/swoctrl/mod.rs", "rank": 70, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::ERR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/dma/err/mod.rs", "rank": 71, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::DEADTIME {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/deadtime/mod.rs", "rank": 72, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::ESR1 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/esr1/mod.rs", "rank": 73, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PWMLOAD {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/pwmload/mod.rs", "rank": 74, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SYNC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/sync/mod.rs", "rank": 75, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::WORD10 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/word10/mod.rs", "rank": 76, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SCR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/system_control/scr/mod.rs", "rank": 77, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CLP3 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/adc1/clp3/mod.rs", "rank": 78, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PFB1CR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fmc/pfb1cr/mod.rs", "rank": 79, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CSC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/csc/mod.rs", "rank": 80, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SC {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/sc/mod.rs", "rank": 81, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::EXTTRIG {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/exttrig/mod.rs", "rank": 82, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::TPR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/rtc/tpr/mod.rs", "rank": 83, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SYNCONF {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm0/synconf/mod.rs", "rank": 84, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PACRF {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/aips0/pacrf/mod.rs", "rank": 85, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CONF {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/conf/mod.rs", "rank": 86, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CNTIN {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/cntin/mod.rs", "rank": 87, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::RXFGMASK {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/rxfgmask/mod.rs", "rank": 88, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PCR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/portc/pcr/mod.rs", "rank": 89, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SWOCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/swoctrl/mod.rs", "rank": 90, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::POL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm1/pol/mod.rs", "rank": 91, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::WORD05 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/can0/word05/mod.rs", "rank": 92, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::ISFR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/porte/isfr/mod.rs", "rank": 93, "score": 97.98366981033908 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::RMR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/i2s0/rmr/mod.rs", "rank": 94, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PSR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/lptmr0/psr/mod.rs", "rank": 95, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CSAR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fb/csar/mod.rs", "rank": 96, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FLTCTRL {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ftm2/fltctrl/mod.rs", "rank": 97, "score": 97.98366981033905 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::PGA {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/adc0/pga/mod.rs", "rank": 98, "score": 97.98366981033907 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SC2 {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/adc1/sc2/mod.rs", "rank": 99, "score": 97.98366981033907 } ]
Rust
src/utils/redis/pubsub.rs
greglearns/portier-broker
935fcce0405fd0e3ac70ee8922c0f62bf0823a75
use futures_util::future::poll_fn; use redis::{ConnectionAddr, ConnectionInfo, ErrorKind, RedisError, RedisResult, Value}; use std::collections::hash_map::{Entry, HashMap}; use std::future::Future; use std::io::Result as IoResult; use std::net::ToSocketAddrs; use std::pin::Pin; use std::task::Poll; use tokio::io::{self, AsyncWriteExt}; use tokio::net::TcpStream; use tokio::stream::Stream; use tokio::sync::{broadcast, mpsc, oneshot}; #[cfg(unix)] use tokio::net::UnixStream; struct ReadHalf(io::BufReader<Box<dyn io::AsyncRead + Unpin + Send>>); impl ReadHalf { async fn read(&mut self) -> RedisResult<Value> { redis::parse_redis_value_async(&mut self.0).await } } struct WriteHalf(Box<dyn io::AsyncWrite + Unpin + Send>); impl WriteHalf { async fn write(&mut self, cmd: &[&[u8]]) -> IoResult<()> { let mut data = format!("*{}\r\n", cmd.len()).into_bytes(); for part in cmd { data.append(&mut format!("${}\r\n", part.len()).into_bytes()); data.extend_from_slice(part); data.extend_from_slice(b"\r\n"); } self.0.write_all(&data).await } } pub type RecvChan = broadcast::Receiver<Vec<u8>>; type ReplyChan = oneshot::Sender<RecvChan>; struct Cmd { chan: Vec<u8>, reply: ReplyChan, } struct Sub { tx: broadcast::Sender<Vec<u8>>, pending: Option<Vec<ReplyChan>>, } enum LoopEvent { Cmd(Cmd), CmdClosed, Interval, Read((RedisResult<Value>, ReadHalf)), } async fn conn_loop(mut rx: ReadHalf, mut tx: WriteHalf, mut cmd: mpsc::Receiver<Cmd>) { let interval = tokio::time::interval(tokio::time::Duration::from_secs(20)); tokio::pin!(interval); interval.as_mut().tick().await; let mut read_fut: Pin<Box<dyn Future<Output = _> + Send>> = Box::pin(async move { let res = rx.read().await; (res, rx) }); let mut subs: HashMap<Vec<u8>, Sub> = HashMap::new(); loop { match poll_fn(|cx| { if let Poll::Ready(res) = cmd.poll_recv(cx) { match res { Some(cmd) => Poll::Ready(LoopEvent::Cmd(cmd)), None => Poll::Ready(LoopEvent::CmdClosed), } } else if let Poll::Ready(_) = interval.as_mut().poll_next(cx) { Poll::Ready(LoopEvent::Interval) } else if let Poll::Ready(res) = read_fut.as_mut().poll(cx) { Poll::Ready(LoopEvent::Read(res)) } else { Poll::Pending } }) .await { LoopEvent::Cmd(Cmd { chan, reply }) => match subs.entry(chan.clone()) { Entry::Occupied(mut entry) => { let sub = entry.get_mut(); if let Some(ref mut pending) = sub.pending { pending.push(reply); } else { let _ = reply.send(sub.tx.subscribe()); } } Entry::Vacant(entry) => { entry.insert(Sub { tx: broadcast::channel(8).0, pending: Some(vec![reply]), }); tx.write(&[b"SUBSCRIBE", &chan]) .await .expect("Failed to send subscribe command to Redis"); } }, LoopEvent::CmdClosed => { unimplemented!(); } LoopEvent::Interval => { let to_unsub: Vec<Vec<u8>> = subs .iter() .filter_map(|(chan, sub)| { if sub.pending.is_none() && sub.tx.receiver_count() == 0 { Some(chan.clone()) } else { None } }) .collect(); if to_unsub.is_empty() { tx.write(&[b"PING"]) .await .expect("Failed to send ping command to Redis"); } else { for chan in &to_unsub { subs.remove(chan); } let mut unsub_cmd: Vec<&[u8]> = vec![b"UNSUBSCRIBE"]; unsub_cmd.extend(to_unsub.iter().map(|chan| &chan[..])); tx.write(&unsub_cmd) .await .expect("Failed to send unsubscribe command to Redis"); } } LoopEvent::Read((res, mut rx)) => { read_fut = Box::pin(async move { let res = rx.read().await; (res, rx) }); let value = res.expect("Failed to read from Redis"); let vec = match value { Value::Status(status) if status == "PONG" => continue, Value::Bulk(ref vec) if vec.len() >= 2 => vec, _ => panic!("Unexpected value from Redis: {:?}", value), }; match (&vec[0], &vec[1], vec.get(2)) { ( &Value::Data(ref ev), &Value::Data(ref chan), Some(&Value::Data(ref data)), ) if ev == b"message" => { if let Some(ref sub) = subs.get(&chan[..]) { let _ = sub.tx.send(data.to_vec()); } } (&Value::Data(ref ev), &Value::Data(ref chan), _) if ev == b"subscribe" => { if let Some(ref mut sub) = subs.get_mut(&chan[..]) { if let Some(pending) = sub.pending.take() { for reply in pending { let _ = reply.send(sub.tx.subscribe()); } } } } (&Value::Data(ref ev), _, _) if ev == b"unsubscribe" || ev == b"pong" => {} _ => panic!("Unexpected value from Redis: {:?}", value), } } } } } #[derive(Clone)] pub struct Subscriber { cmd: mpsc::Sender<Cmd>, } impl Subscriber { pub async fn subscribe(&mut self, chan: Vec<u8>) -> broadcast::Receiver<Vec<u8>> { let (reply_tx, reply_rx) = oneshot::channel(); let cmd = Cmd { chan, reply: reply_tx, }; if self.cmd.send(cmd).await.is_ok() { if let Ok(rx) = reply_rx.await { return rx; } } panic!("Tried to subscribe on closed pubsub connection"); } } pub async fn connect(info: &ConnectionInfo) -> RedisResult<Subscriber> { let (rx, tx): ( Box<dyn io::AsyncRead + Unpin + Send>, Box<dyn io::AsyncWrite + Unpin + Send>, ) = match *info.addr { ConnectionAddr::Tcp(ref host, port) => { let socket_addr = { let mut socket_addrs = (&host[..], port).to_socket_addrs()?; match socket_addrs.next() { Some(socket_addr) => socket_addr, None => { return Err(RedisError::from(( ErrorKind::InvalidClientConfig, "No address found for host", ))); } } }; let (rx, tx) = io::split(TcpStream::connect(&socket_addr).await?); (Box::new(rx), Box::new(tx)) } #[cfg(unix)] ConnectionAddr::Unix(ref path) => { let (rx, tx) = io::split(UnixStream::connect(path).await?); (Box::new(rx), Box::new(tx)) } #[cfg(not(unix))] ConnectionAddr::Unix(_) => { return Err(RedisError::from(( ErrorKind::InvalidClientConfig, "Cannot connect to unix sockets \ on this platform", ))) } }; let mut rx = ReadHalf(io::BufReader::new(rx)); let mut tx = WriteHalf(tx); if let Some(ref passwd) = info.passwd { tx.write(&[b"AUTH", passwd.as_bytes()]).await?; match rx.read().await { Ok(Value::Okay) => (), _ => { return Err(( ErrorKind::AuthenticationFailed, "Password authentication failed", ) .into()); } } } let (cmd_tx, cmd_rx) = mpsc::channel(8); tokio::spawn(conn_loop(rx, tx, cmd_rx)); Ok(Subscriber { cmd: cmd_tx }) }
use futures_util::future::poll_fn; use redis::{ConnectionAddr, ConnectionInfo, ErrorKind, RedisError, RedisResult, Value}; use std::collections::hash_map::{Entry, HashMap}; use std::future::Future; use std::io::Result as IoResult; use std::net::ToSocketAddrs; use std::pin::Pin; use std::task::Poll; use tokio::io::{self, AsyncWriteExt}; use tokio::net::TcpStream; use tokio::stream::Stream; use tokio::sync::{broadcast, mpsc, oneshot}; #[cfg(unix)] use tokio::net::UnixStream; struct ReadHalf(io::BufReader<Box<dyn io::AsyncRead + Unpin + Send>>); impl ReadHalf { async fn read(&mut self) -> RedisResult<Value> { redis::parse_redis_value_async(&mut self.0).await } } struct WriteHalf(Box<dyn io::AsyncWrite + Unpin + Send>); impl WriteHalf { async fn write(&mut self, cmd: &[&[u8]]) -> IoResult<()> { let mut data = format!("*{}\r\n", cmd.len()).into_bytes(); for part in cmd { data.append(&mut format!("${}\r\n", part.len()).into_bytes()); data.extend_from_slice(part); data.extend_from_slice(b"\r\n"); } self.0.write_all(&data).await } } pub type RecvChan = broadcast::Receiver<Vec<u8>>; type ReplyChan = oneshot::Sender<RecvChan>; struct Cmd { chan: Vec<u8>, reply: ReplyChan, } struct Sub { tx: broadcast::Sender<Vec<u8>>, pending: Option<Vec<ReplyChan>>, } enum LoopEvent { Cmd(Cmd), CmdClosed, Interval, Read((RedisResult<Value>, ReadHalf)), } async fn conn_loop(mut rx: ReadHalf, mut tx: WriteHalf, mut cmd: mpsc::Receiver<Cmd>) { let interval = tokio::time::interval(tokio::time::Duration::from_secs(20)); tokio::pin!(interval); interval.as_mut().tick().await; let mut read_fut: Pin<Box<dyn Future<Output = _> + Send>> = Box::pin(async move { let res = rx.read().await; (res, rx) }); let mut subs: HashMap<Vec<u8>, Sub> = HashMap::new(); loop { match poll_fn(|cx| { if let Poll::Ready(res) = cmd.poll_recv(cx) { match res { Some(cmd) => Poll::Ready(LoopEvent::Cmd(cmd)), None => Poll::Ready(LoopEvent::CmdClosed), } } else if let Poll::Ready(_) = interval.as_mut().poll_next(cx) { Poll::Ready(LoopEvent::Interval) } else if let Poll::Ready(res) = read_fut.as_mut().poll(cx) { Poll::Ready(LoopEvent::Read(res)) } else { Poll::Pending } }) .await { LoopEvent::Cmd(Cmd { chan, reply }) => match subs.entry(chan.clone()) { Entry::Occupied(mut entry) => { let sub = entry.get_mut(); if let Some(ref mut pending) = sub.pending { pending.push(reply); } else { let _ = reply.send(sub.tx.subscribe()); } } Entry::Vacant(entry) => { entry.insert(Sub { tx: broadcast::channel(8).0, pending: Some(vec![reply]), }); tx.write(&[b"SUBSCRIBE", &chan]) .await .expect("Failed to send subscribe command to Redis"); } }, LoopEvent::CmdClosed => { unimplemented!(); } LoopEvent::Interval => { let to_unsub: Vec<Vec<u8>> = subs .iter() .filter_map(|(chan, sub)| { if sub.pending.is_none() && sub.tx.receiver_count() == 0 { Some(chan.clone()) } else { None } }) .collect(); if to_unsub.is_empty() { tx.write(&[b"PING"]) .await .expect("Failed to send ping command to Redis"); } else { for chan in &to_unsub { subs.remove(chan); } let mut unsub_cmd: Vec<&[u8]> = vec![b"UNSUBSCRIBE"]; unsub_cmd.extend(to_unsub.iter().map(|chan| &chan[..])); tx.write(&unsub_cmd) .await .expect("Failed to send unsubscribe command to Redis"); } } LoopEvent::Read((res, mut rx)) => { read_fut = Box::pin(async move { let res = rx.read().await; (res, rx) }); let value = res.expect("Failed to read from Redis"); let vec = match value { Value::Status(status) if status == "PONG" => continue, Value::Bulk(ref vec) if vec.len() >= 2 => vec, _ => panic!("Unexpected value from Redis: {:?}", value), }; match (&vec[0], &vec[1], vec.get(2)) { ( &Value::Data(ref ev), &Value::Data(ref chan), Some(&Value::Data(ref data)), ) if ev == b"message" => { if let Some(ref sub) = subs.get(&chan[..]) { let _ = sub.tx.send(data.to_vec()); } } (&Value::Data(ref ev), &Value::Data(ref chan), _) if ev == b"subscribe" => { if let Some(ref mut sub) = subs.get_mut(&chan[..]) { if let Some(pending) = sub.pending.take() { for reply in pending { let _ = reply.send(sub.tx.subscribe()); } } } } (&Value::Data(ref ev), _, _) if ev == b"unsubscribe" || ev == b"pong" => {} _ => panic!("Unexpected value from Redis: {:?}", value), } } } } } #[derive(Clone)] pub struct Subscriber { cmd: mpsc::Sender<Cmd>, } impl Subscriber { pub async fn subscribe(&mut self, chan: Vec<u8>) -> broadcast::Receiver<Vec<u8>> { let (reply_tx, reply_rx) = oneshot::channel(); let cmd = Cmd { chan,
} panic!("Tried to subscribe on closed pubsub connection"); } } pub async fn connect(info: &ConnectionInfo) -> RedisResult<Subscriber> { let (rx, tx): ( Box<dyn io::AsyncRead + Unpin + Send>, Box<dyn io::AsyncWrite + Unpin + Send>, ) = match *info.addr { ConnectionAddr::Tcp(ref host, port) => { let socket_addr = { let mut socket_addrs = (&host[..], port).to_socket_addrs()?; match socket_addrs.next() { Some(socket_addr) => socket_addr, None => { return Err(RedisError::from(( ErrorKind::InvalidClientConfig, "No address found for host", ))); } } }; let (rx, tx) = io::split(TcpStream::connect(&socket_addr).await?); (Box::new(rx), Box::new(tx)) } #[cfg(unix)] ConnectionAddr::Unix(ref path) => { let (rx, tx) = io::split(UnixStream::connect(path).await?); (Box::new(rx), Box::new(tx)) } #[cfg(not(unix))] ConnectionAddr::Unix(_) => { return Err(RedisError::from(( ErrorKind::InvalidClientConfig, "Cannot connect to unix sockets \ on this platform", ))) } }; let mut rx = ReadHalf(io::BufReader::new(rx)); let mut tx = WriteHalf(tx); if let Some(ref passwd) = info.passwd { tx.write(&[b"AUTH", passwd.as_bytes()]).await?; match rx.read().await { Ok(Value::Okay) => (), _ => { return Err(( ErrorKind::AuthenticationFailed, "Password authentication failed", ) .into()); } } } let (cmd_tx, cmd_rx) = mpsc::channel(8); tokio::spawn(conn_loop(rx, tx, cmd_rx)); Ok(Subscriber { cmd: cmd_tx }) }
reply: reply_tx, }; if self.cmd.send(cmd).await.is_ok() { if let Ok(rx) = reply_rx.await { return rx; }
function_block-random_span
[ { "content": "#[inline]\n\npub fn decode<T: ?Sized + AsRef<[u8]>>(data: &T) -> Result<Vec<u8>, ()> {\n\n base64::decode_config(data, base64::URL_SAFE_NO_PAD).map_err(|_| ())\n\n}\n", "file_path": "src/utils/base64url.rs", "rank": 2, "score": 202123.42041081307 }, { "content": "/// Parse all supported key pairs from a PEM stream.\n\npub fn parse_key_pairs(mut reader: impl BufRead) -> Result<Vec<ParsedKeyPair>, ParseError> {\n\n let mut key_pairs = Vec::new();\n\n let mut b64buf = String::new();\n\n let mut state = State::Scan;\n\n\n\n let mut raw_line = Vec::<u8>::new();\n\n loop {\n\n raw_line.clear();\n\n let len = reader.read_until(b'\\n', &mut raw_line)?;\n\n\n\n if len == 0 {\n\n return Ok(key_pairs);\n\n }\n\n let line = String::from_utf8_lossy(&raw_line);\n\n\n\n match state {\n\n State::Scan => {\n\n if line.starts_with(PKCS8_START_MARK) {\n\n state = State::InPkcs8;\n\n }\n", "file_path": "src/utils/pem.rs", "rank": 3, "score": 195624.8543218478 }, { "content": "#[inline]\n\npub fn encode<T: ?Sized + AsRef<[u8]>>(data: &T) -> String {\n\n base64::encode_config(data, base64::URL_SAFE_NO_PAD)\n\n}\n\n\n", "file_path": "src/utils/base64url.rs", "rank": 5, "score": 159425.34295048646 }, { "content": "/// Convert a PKCS #8 document to a PEM string.\n\npub fn from_der(der: &[u8]) -> String {\n\n let mut res = String::new();\n\n let b64 = base64::encode(der);\n\n let mut cursor = Cursor::new(b64.as_bytes());\n\n res.push_str(PKCS8_START_MARK);\n\n res.push('\\n');\n\n let mut buf = [0u8; 64];\n\n loop {\n\n let size = cursor.read(&mut buf[..]).unwrap();\n\n if size == 0 {\n\n break;\n\n }\n\n res.push_str(std::str::from_utf8(&buf[..size]).unwrap());\n\n res.push('\\n');\n\n }\n\n res.push_str(PKCS8_END_MARK);\n\n res.push('\\n');\n\n res\n\n}\n", "file_path": "src/utils/pem.rs", "rank": 8, "score": 142828.18233509606 }, { "content": "/// Parse a form-encoded string into a `HashMap`.\n\npub fn parse_form_encoded(input: &[u8]) -> HashMap<String, String> {\n\n let mut map = HashMap::new();\n\n for (key, value) in form_urlencoded::parse(input) {\n\n map.insert(key.into_owned(), value.into_owned());\n\n }\n\n map\n\n}\n\n\n\n/// Read the request or response body up to a fixed size.\n\npub async fn read_body(mut body: Body) -> Result<Bytes, BoxError> {\n\n let mut acc = BytesMut::new();\n\n while let Some(result) = body.next().await {\n\n let chunk = result.map_err(Box::new)?;\n\n if acc.len() + chunk.len() > 8096 {\n\n return Err(Box::new(SizeLimitExceeded));\n\n }\n\n acc.extend(chunk);\n\n }\n\n Ok(acc.freeze())\n\n}\n\n\n", "file_path": "src/web.rs", "rank": 9, "score": 139545.94688558098 }, { "content": "type RsaPublicKey = signature::RsaPublicKeyComponents<Vec<u8>>;\n\n\n\n/// Token signing algorithms we support.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum SigningAlgorithm {\n\n EdDsa,\n\n Rs256,\n\n}\n\n\n\nimpl SigningAlgorithm {\n\n /// Get the JWA string representation.\n\n pub fn as_str(self) -> &'static str {\n\n use SigningAlgorithm::*;\n\n match self {\n\n EdDsa => \"EdDSA\",\n\n Rs256 => \"RS256\",\n\n }\n\n }\n\n\n\n /// Format a list of algorithms for display.\n", "file_path": "src/crypto.rs", "rank": 10, "score": 133988.24071857854 }, { "content": "fn default_id_token_signing_alg_values_supported() -> Vec<String> {\n\n vec![\"RS256\".to_owned()]\n\n}\n\n\n\n/// OpenID Connect key set document.\n", "file_path": "src/bridges/oidc.rs", "rank": 12, "score": 122966.5781893737 }, { "content": "/// Verify a JWS signature, returning the payload as Value if successful.\n\npub fn verify_jws(\n\n jws: &str,\n\n key_set: &[ProviderKey],\n\n signing_alg: SigningAlgorithm,\n\n) -> Result<json::Value, ()> {\n\n // Extract the header from the JWT structure. Determine what key was used\n\n // to sign the token, so we can then verify the signature.\n\n let parts: Vec<&str> = jws.split('.').collect();\n\n if parts.len() != 3 {\n\n return Err(());\n\n }\n\n let decoded = parts\n\n .iter()\n\n .map(|s| base64url::decode(s))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n let jwt_header: json::Value = json::from_slice(&decoded[0]).map_err(|_| ())?;\n\n let kid = jwt_header.get(\"kid\").and_then(|v| v.as_str()).ok_or(())?;\n\n let pub_key = jwk_key_set_find(key_set, kid)?;\n\n\n\n // Make sure the key matches the algorithm originally selected.\n", "file_path": "src/crypto.rs", "rank": 13, "score": 112929.04001253995 }, { "content": "pub fn init() {\n\n let level = std::env::var(\"RUST_LOG\")\n\n .ok()\n\n .and_then(|level| level.parse().ok())\n\n .unwrap_or(log::Level::Warn);\n\n let logger = Box::new(Logger { level });\n\n log::set_boxed_logger(logger).expect(\"Failed to initialize logger\");\n\n log::set_max_level(level.to_level_filter());\n\n}\n", "file_path": "src/utils/logger.rs", "rank": 14, "score": 112924.91884546311 }, { "content": "/// Helper function for returning a response with JSON data.\n\n///\n\n/// Serializes the argument value to JSON and returns a HTTP 200 response\n\n/// code with the serialized JSON as the body.\n\npub fn json_response(obj: &json::Value, max_age: Duration) -> Response {\n\n let body = json::to_string(&obj).expect(\"unable to coerce JSON Value into string\");\n\n let mut res = Response::new(Body::from(body));\n\n res.typed_header(ContentType::json());\n\n res.typed_header(CacheControl::new().with_public().with_max_age(max_age));\n\n res\n\n}\n\n\n", "file_path": "src/web.rs", "rank": 15, "score": 107969.2128317495 }, { "content": "/// Get a duration since Unix epoch.\n\npub fn unix_duration() -> Duration {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"current system time is before Unix epoch\")\n\n}\n\n\n", "file_path": "src/utils/time.rs", "rank": 16, "score": 103609.46126060477 }, { "content": "/// Get a Unix timestamp for the current time.\n\npub fn unix_timestamp() -> u64 {\n\n unix_duration().as_secs()\n\n}\n", "file_path": "src/utils/time.rs", "rank": 17, "score": 103609.46126060477 }, { "content": "fn default_response_modes_supported() -> Vec<String> {\n\n vec![\"fragment\".to_owned()]\n\n}\n\n\n", "file_path": "src/bridges/oidc.rs", "rank": 18, "score": 100818.37112880523 }, { "content": "/// A trait for messages that can be sent to an agent.\n\npub trait Message: Send + 'static {\n\n /// The type of reply sent back by the agent.\n\n type Reply: Send + 'static;\n\n}\n\n\n\n/// A message used for the `Agent::started` context.\n\npub struct AgentStarted;\n\nimpl Message for AgentStarted {\n\n type Reply = ();\n\n}\n\n\n\n/// Context passed to handlers, used to send a reply.\n\n///\n\n/// The agent must call one of the reply methods, which consumes the context. The context may not\n\n/// otherwise be dropped.\n\npub struct Context<A, M: Message> {\n\n tx: oneshot::Sender<M::Reply>,\n\n addr: Addr<A>,\n\n}\n\n\n", "file_path": "src/utils/agent.rs", "rank": 19, "score": 100691.17018068933 }, { "content": "/// A trait for types that represent agents.\n\n///\n\n/// Types implementing this encapsulate agent state and behavior.\n\npub trait Agent: Send + Sized + 'static {\n\n /// Spawn the message loop.\n\n ///\n\n /// The default implementation spawns a Tokio task that processes messages from the receiver in\n\n /// an infinite loop. Each message is wrapped in a `block_in_place` to allow handlers to do\n\n /// synchronous work while holding a mutable reference to the agent.\n\n fn spawn_loop(mut self, mut rx: mpsc::Receiver<DispatchFn<Self>>) {\n\n tokio::spawn(async move {\n\n while let Some(dispatch) = rx.recv().await {\n\n tokio::task::block_in_place(|| {\n\n dispatch(&mut self);\n\n });\n\n }\n\n });\n\n }\n\n\n\n /// Called once the agent is started.\n\n ///\n\n /// Agents can implement this to start async tasks, such as doing maintenance periodically. The\n\n /// implementation works like a regular message handler. It can block and has mutable access to\n", "file_path": "src/utils/agent.rs", "rank": 20, "score": 95402.56114947039 }, { "content": "/// Create a response with an HTML body.\n\npub fn html_response(html: String) -> Response {\n\n let mut res = Response::new(Body::from(html));\n\n res.typed_header(ContentType::html());\n\n res\n\n}\n\n\n", "file_path": "src/web.rs", "rank": 21, "score": 94583.53583564295 }, { "content": "/// Create a response with an empty body and a specific status code.\n\npub fn empty_response(status: StatusCode) -> Response {\n\n let mut res = Response::new(Body::empty());\n\n *res.status_mut() = status;\n\n res\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::set_headers;\n\n use http::Response;\n\n\n\n #[test]\n\n fn sets_expected_headers() {\n\n let mut res = Response::new(());\n\n set_headers(&mut res);\n\n\n\n let headers = res.headers();\n\n assert!(headers.contains_key(\"Strict-Transport-Security\"));\n\n assert!(headers.contains_key(\"Content-Security-Policy\"));\n\n assert!(headers.contains_key(\"X-Content-Security-Policy\"));\n\n assert!(headers.contains_key(\"X-Content-Type-Options\"));\n\n assert!(headers.contains_key(\"X-XSS-Protection\"));\n\n assert!(headers.contains_key(\"X-Frame-Options\"));\n\n assert!(headers.contains_key(\"Cache-Control\"));\n\n }\n\n}\n", "file_path": "src/web.rs", "rank": 22, "score": 92628.44458486058 }, { "content": "/// Trait implemented by `Addr` that allows trait objects to be created per message.\n\npub trait Sender<M: Message>: Send + Sync {\n\n /// Sends a message of this type to the agent.\n\n fn send(&self, message: M) -> ReplyFuture<M>;\n\n}\n\n\n\nimpl<M, A> Sender<M> for Addr<A>\n\nwhere\n\n M: Message,\n\n A: Handler<M> + Send + 'static,\n\n{\n\n fn send(&self, message: M) -> ReplyFuture<M> {\n\n Addr::<A>::send(self, message)\n\n }\n\n}\n", "file_path": "src/utils/agent.rs", "rank": 23, "score": 90746.2125061274 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlRedisTable {\n\n url: Option<String>,\n\n session_ttl: Option<u64>,\n\n cache_ttl: Option<u64>,\n\n}\n\n\n", "file_path": "src/config/toml.rs", "rank": 24, "score": 90586.9337508007 }, { "content": "/// Test that a OpenID Connect endpoint is valid.\n\n///\n\n/// This method is more tolerant than `parse_redirect_uri`, because we're in control of all\n\n/// validation on the identity provider side. Note that this method also assumes the scheme was\n\n/// already checked.\n\n///\n\n/// Returns the origin if successful.\n\npub fn parse_oidc_href(input: &Url) -> Option<String> {\n\n if input.port() == Some(0) {\n\n return None;\n\n }\n\n\n\n // Simple check to see if it's just an origin.\n\n // The input should be the same, with only a trailing slash.\n\n let origin = input.origin().ascii_serialization();\n\n if input.as_str().len() != origin.len() + 1 {\n\n return None;\n\n }\n\n\n\n Some(origin)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/validation.rs", "rank": 25, "score": 88155.81189014822 }, { "content": "/// Mutate a response to set common headers.\n\nfn set_headers<B>(res: &mut hyper::Response<B>) {\n\n // Specify a tight content security policy. We need to be able to POST\n\n // redirect anywhere, and run our own scripts.\n\n let csp = vec![\n\n \"sandbox allow-scripts allow-forms\",\n\n \"default-src 'none'\",\n\n \"script-src 'self'\",\n\n \"style-src 'self'\",\n\n \"form-action *\",\n\n ]\n\n .join(\"; \");\n\n\n\n res.typed_header(StrictTransportSecurity::excluding_subdomains(\n\n Duration::from_secs(31_536_000u64),\n\n ));\n\n res.header(hyper::header::CONTENT_SECURITY_POLICY, csp.clone());\n\n res.header(\"x-content-security-policy\", csp);\n\n res.header(hyper::header::X_CONTENT_TYPE_OPTIONS, \"nosniff\".to_owned());\n\n res.header(hyper::header::X_XSS_PROTECTION, \"1; mode=block\".to_owned());\n\n res.header(hyper::header::X_FRAME_OPTIONS, \"DENY\".to_owned());\n\n\n\n // Default to disable caching completely.\n\n if !res.headers().contains_key(CacheControl::name()) {\n\n res.typed_header(CacheControl::new().with_no_cache().with_no_store());\n\n }\n\n}\n\n\n", "file_path": "src/web.rs", "rank": 26, "score": 86982.16608067666 }, { "content": "/// Internal message used to fetch a key set.\n\nstruct FetchKeys(SigningAlgorithm);\n\nimpl Message for FetchKeys {\n\n type Reply = RedisResult<KeySet>;\n\n}\n\n\n", "file_path": "src/agents/store/redis.rs", "rank": 27, "score": 83344.89101714185 }, { "content": "/// Internal message used to lock a key set.\n\nstruct LockKeys(SigningAlgorithm);\n\nimpl Message for LockKeys {\n\n type Reply = locking::LockGuard;\n\n}\n\n\n", "file_path": "src/agents/store/redis.rs", "rank": 28, "score": 83344.89101714185 }, { "content": "/// Internal message used to save a key set.\n\nstruct SaveKeys(KeySet);\n\nimpl Message for SaveKeys {\n\n type Reply = RedisResult<()>;\n\n}\n\n\n", "file_path": "src/agents/store/redis.rs", "rank": 29, "score": 83344.89101714185 }, { "content": "/// Helper function for returning a result to the Relying Party.\n\n///\n\n/// Takes an array of `(name, value)` parameter pairs and returns a response\n\n/// that sends them to the RP's `redirect_uri`. The method used to return to\n\n/// the RP depends on the `response_mode`.\n\npub fn return_to_relier(ctx: &Context, params: &[(&str, &str)]) -> Response {\n\n let &ReturnParams {\n\n ref redirect_uri,\n\n response_mode,\n\n ..\n\n } = ctx\n\n .return_params\n\n .as_ref()\n\n .expect(\"return_to_relier called without return parameters\");\n\n\n\n match response_mode {\n\n // Add params as fragment parameters and redirect.\n\n ResponseMode::Fragment => {\n\n let mut redirect_uri = redirect_uri.clone();\n\n let fragment = redirect_uri.fragment().unwrap_or(\"\").to_owned();\n\n let fragment = form_urlencoded::Serializer::for_suffix(fragment, 0)\n\n .extend_pairs(params)\n\n .finish();\n\n redirect_uri.set_fragment(Some(&fragment));\n\n\n", "file_path": "src/web.rs", "rank": 30, "score": 82241.58694711447 }, { "content": "/// Internal message used to fetch keys and send an update to the key manager.\n\nstruct UpdateKeysLocked(SigningAlgorithm);\n\nimpl Message for UpdateKeysLocked {\n\n type Reply = ();\n\n}\n\n\n\n/// Store implementation using Redis.\n\npub struct RedisStore {\n\n /// A random unique ID for ourselves.\n\n id: Arc<Vec<u8>>,\n\n /// The connection.\n\n conn: RedisConn,\n\n /// Pubsub client.\n\n pubsub: pubsub::Subscriber,\n\n /// Locking client.\n\n locking: locking::LockClient,\n\n /// TTL of session keys\n\n expire_sessions: Duration,\n\n /// TTL of cache keys\n\n expire_cache: Duration,\n\n /// The agent used for fetching on cache miss.\n", "file_path": "src/agents/store/redis.rs", "rank": 31, "score": 81404.82145443464 }, { "content": "/// Test that a `redirect_uri` is valid. Returns the parsed `Url` if successful.\n\npub fn parse_redirect_uri(input: &str, param: &str) -> Result<Url, ValidationError> {\n\n if !input.starts_with(\"http://\") && !input.starts_with(\"https://\") {\n\n return Err(ValidationError::NotHttps(param.to_owned()));\n\n }\n\n\n\n let url = Url::parse(input)?;\n\n if url.username() != \"\" || url.password().is_some() {\n\n return Err(ValidationError::UserinfoPresent(url));\n\n }\n\n if url.port() == Some(0) {\n\n return Err(ValidationError::InvalidPort(url));\n\n }\n\n\n\n // Make sure the input origin matches the serialized origin.\n\n let origin = url.origin().ascii_serialization();\n\n if !input.starts_with(&origin) {\n\n return Err(ValidationError::InconsistentSerialization(url));\n\n }\n\n match input.as_bytes().get(origin.len()) {\n\n Some(&b'/') | None => {}\n\n _ => return Err(ValidationError::InconsistentSerialization(url)),\n\n }\n\n\n\n Ok(url)\n\n}\n\n\n", "file_path": "src/validation.rs", "rank": 32, "score": 75932.61754059413 }, { "content": "/// Helper function to deserialize key from JWK Key Set.\n\n///\n\n/// Searches the provided JWK Key Set Value for the key matching the given\n\n/// id. Returns a usable public key if exactly one key is found.\n\npub fn jwk_key_set_find(key_set: &[ProviderKey], kid: &str) -> Result<SupportedPublicKey, ()> {\n\n let matching: Vec<&ProviderKey> = key_set\n\n .iter()\n\n .filter(|key| key.use_ == \"sig\" && key.kid == kid)\n\n .collect();\n\n\n\n // Verify that we found exactly one key matching the key ID.\n\n if matching.len() != 1 {\n\n return Err(());\n\n }\n\n let key = matching.first().expect(\"expected one key\");\n\n\n\n // Then, use the data to build a public key object for verification.\n\n match (key.alg.as_str(), key.crv.as_str()) {\n\n (\"EdDSA\", \"Ed25519\") => {\n\n let x = base64url::decode(&key.x).map_err(|_| ())?;\n\n let key = UnparsedPublicKey::new(&signature::ED25519, x);\n\n Ok(SupportedPublicKey::Ed25519(key))\n\n }\n\n (\"RS256\", _) => {\n\n let n = base64url::decode(&key.n).map_err(|_| ())?;\n\n let e = base64url::decode(&key.e).map_err(|_| ())?;\n\n let key = RsaPublicKey { n, e };\n\n Ok(SupportedPublicKey::Rsa(key))\n\n }\n\n _ => Err(()),\n\n }\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 33, "score": 73731.1390972108 }, { "content": "enum State {\n\n Scan,\n\n InPkcs8,\n\n InRsa,\n\n}\n\n\n", "file_path": "src/utils/pem.rs", "rank": 34, "score": 62142.98641556471 }, { "content": "#[derive(Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct Args {\n\n arg_CONFIG: Option<PathBuf>,\n\n flag_import_key: Option<PathBuf>,\n\n}\n\n\n\n/// The `main()` method. Will loop forever to serve HTTP requests.\n\n#[tokio::main]\n\nasync fn main() {\n\n crate::utils::logger::init();\n\n\n\n // We spawn a bunch of background tasks on the Tokio executor. If these panic, we want to exit\n\n // instead of continuing on without the task.\n\n let next_panic_hook = std::panic::take_hook();\n\n std::panic::set_hook(Box::new(move |info| {\n\n next_panic_hook(info);\n\n std::process::exit(1);\n\n }));\n\n\n\n let args: Args = docopt::Docopt::new(USAGE)\n\n .map(|docopt| docopt.version(Some(VERSION.to_owned())))\n", "file_path": "src/main.rs", "rank": 35, "score": 61983.96526286806 }, { "content": "/// Store configuration is first translated into this intermediate enum.\n\nenum StoreConfig {\n\n #[cfg(feature = \"redis\")]\n\n Redis(String),\n\n #[cfg(feature = \"rusqlite\")]\n\n Rusqlite(PathBuf),\n\n Memory,\n\n}\n\n\n\nimpl StoreConfig {\n\n fn from_options(\n\n redis_url: Option<String>,\n\n sqlite_db: Option<PathBuf>,\n\n memory_storage: bool,\n\n ) -> Result<Self, ConfigError> {\n\n match (redis_url, sqlite_db, memory_storage) {\n\n #[cfg(feature = \"redis\")]\n\n (Some(redis_url), None, false) => Ok(StoreConfig::Redis(redis_url)),\n\n #[cfg(not(feature = \"redis\"))]\n\n (Some(_), None, false) => {\n\n Err(\"Redis storage requested, but this build does not support it.\".into())\n", "file_path": "src/config/mod.rs", "rank": 36, "score": 60846.10153814941 }, { "content": "/// Message sent at an interval to collect garbage.\n\nstruct Gc;\n\nimpl Message for Gc {\n\n type Reply = ();\n\n}\n\n\n", "file_path": "src/agents/store/rusqlite.rs", "rank": 37, "score": 59312.033437116625 }, { "content": "/// Message sent at an interval to collect garbage.\n\nstruct Gc;\n\nimpl Message for Gc {\n\n type Reply = ();\n\n}\n\n\n", "file_path": "src/agents/store/memory.rs", "rank": 38, "score": 59312.033437116625 }, { "content": "#[derive(Deserialize)]\n\nstruct ProviderConfig {\n\n authorization_endpoint: Url,\n\n jwks_uri: Url,\n\n #[serde(default = \"default_response_modes_supported\")]\n\n response_modes_supported: Vec<String>,\n\n #[serde(default = \"default_id_token_signing_alg_values_supported\")]\n\n id_token_signing_alg_values_supported: Vec<String>,\n\n // NOTE: This field is non-standard.\n\n #[serde(default)]\n\n accepts_id_token_signing_alg_query_param: bool,\n\n}\n\n\n", "file_path": "src/bridges/oidc.rs", "rank": 39, "score": 59303.46661880435 }, { "content": "/// Parameters for `StoreConfig::spawn_store`.\n\nstruct StoreParams {\n\n session_ttl: Duration,\n\n cache_ttl: Duration,\n\n limit_per_email: LimitConfig,\n\n fetcher: Addr<FetchAgent>,\n\n #[allow(dead_code)]\n\n rng: SecureRandom,\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 40, "score": 59303.46661880435 }, { "content": "#[derive(Deserialize)]\n\nstruct ProviderKeys {\n\n #[serde(default)]\n\n keys: Vec<ProviderKey>,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct ProviderKey {\n\n #[serde(default)]\n\n pub alg: String,\n\n #[serde(default)]\n\n pub crv: String,\n\n #[serde(rename = \"use\")]\n\n #[serde(default)]\n\n pub use_: String,\n\n #[serde(default)]\n\n pub kid: String,\n\n #[serde(default)]\n\n pub n: String,\n\n #[serde(default)]\n\n pub e: String,\n", "file_path": "src/bridges/oidc.rs", "rank": 41, "score": 59303.46661880435 }, { "content": "/// Message used internally to save a cache entry.\n\nstruct SaveCache {\n\n url: Url,\n\n data: String,\n\n expires: i64,\n\n}\n\nimpl Message for SaveCache {\n\n type Reply = Result<(), SqlError>;\n\n}\n\n\n", "file_path": "src/agents/store/rusqlite.rs", "rank": 42, "score": 58113.19845417362 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlHeadersTable {\n\n static_ttl: Option<u64>,\n\n discovery_ttl: Option<u64>,\n\n keys_ttl: Option<u64>,\n\n}\n\n\n", "file_path": "src/config/toml.rs", "rank": 43, "score": 58104.91285872765 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlGoogleTable {\n\n client_id: Option<String>,\n\n}\n\n\n\nimpl TomlConfig {\n\n pub fn parse_and_apply(path: &Path, builder: &mut ConfigBuilder) {\n\n let parsed = Self::parse(path);\n\n Self::apply(parsed, builder);\n\n }\n\n\n\n fn warn_table(table: &str) {\n\n log::warn!(\n\n \"TOML '{}' section is deprecated. See {} on how to update your config.\",\n\n table,\n\n \"https://github.com/portier/portier-broker/releases/tag/v0.3.0\"\n\n );\n\n }\n\n\n\n #[allow(clippy::cognitive_complexity)]\n\n fn parse(path: &Path) -> TomlConfig {\n", "file_path": "src/config/toml.rs", "rank": 44, "score": 58104.91285872765 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlSmtpTable {\n\n from_name: Option<String>,\n\n from_address: Option<String>,\n\n server: Option<String>,\n\n username: Option<String>,\n\n password: Option<String>,\n\n}\n\n\n", "file_path": "src/config/toml.rs", "rank": 45, "score": 58104.91285872765 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlCryptoTable {\n\n token_ttl: Option<u64>,\n\n keyfiles: Option<Vec<PathBuf>>,\n\n keytext: Option<String>,\n\n}\n\n\n", "file_path": "src/config/toml.rs", "rank": 46, "score": 58104.91285872765 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlLimitTable {\n\n per_email: Option<LimitConfig>,\n\n}\n\n\n", "file_path": "src/config/toml.rs", "rank": 47, "score": 58104.91285872765 }, { "content": "#[derive(Deserialize)]\n\nstruct TomlServerTable {\n\n listen_ip: Option<String>,\n\n listen_port: Option<u16>,\n\n public_url: Option<String>,\n\n allowed_origins: Option<Vec<String>>,\n\n}\n\n\n", "file_path": "src/config/toml.rs", "rank": 48, "score": 58104.91285872765 }, { "content": "enum Event<K> {\n\n Update((K, TokioInstant)),\n\n Timer,\n\n Cancelled,\n\n}\n\n\n\n/// Task that runs a set of timers.\n\n#[derive(Clone)]\n\npub struct DelayQueueTask<K: Clone + Eq + Hash + Send + 'static> {\n\n tx: mpsc::Sender<(K, TokioInstant)>,\n\n}\n\n\n\nimpl<K: Clone + Eq + Hash + Send + 'static> DelayQueueTask<K> {\n\n /// Spawn a new task running a timer loop.\n\n ///\n\n /// The handler function is called when a timer expires with the timer key. Note that this\n\n /// function is called inside the Tokio run-time, and may not block.\n\n pub fn spawn<H>(mut handler: H) -> Self\n\n where\n\n H: (FnMut(K)) + Send + 'static,\n", "file_path": "src/utils/delay_queue_task.rs", "rank": 49, "score": 55414.99916626876 }, { "content": "/// Combines any type with an `Instant` expiry time.\n\nstruct Expiring<T> {\n\n value: T,\n\n expires: Instant,\n\n}\n\n\n\nimpl<T> Expiring<T> {\n\n /// Create a value from a duration.\n\n fn from_duration(value: T, duration: Duration) -> Self {\n\n let expires = Instant::now() + duration;\n\n Expiring { value, expires }\n\n }\n\n\n\n /// Whether this value has not yet expired.\n\n fn is_alive(&self) -> bool {\n\n self.expires > Instant::now()\n\n }\n\n}\n\n\n", "file_path": "src/agents/store/memory.rs", "rank": 50, "score": 55162.1477274481 }, { "content": "pub trait ResponseExt {\n\n fn header<K, V>(&mut self, key: K, value: V) -> &mut Self\n\n where\n\n HeaderName: TryFrom<K>,\n\n HeaderValue: TryFrom<V>,\n\n <HeaderName as TryFrom<K>>::Error: Debug,\n\n <HeaderValue as TryFrom<V>>::Error: Debug;\n\n\n\n fn typed_header(&mut self, header: impl Header) -> &mut Self;\n\n}\n\n\n\nimpl<B> ResponseExt for Response<B> {\n\n fn header<K, V>(&mut self, key: K, value: V) -> &mut Self\n\n where\n\n HeaderName: TryFrom<K>,\n\n HeaderValue: TryFrom<V>,\n\n <HeaderName as TryFrom<K>>::Error: Debug,\n\n <HeaderValue as TryFrom<V>>::Error: Debug,\n\n {\n\n self.headers_mut().insert(\n", "file_path": "src/utils/http.rs", "rank": 51, "score": 55085.41750024748 }, { "content": "/// Store abstraction. Combines all message types.\n\n///\n\n/// Downside of this is that it needs to be implemented on the agent side as:\n\n/// `impl StoreSender for Addr<FoobarStore> {}`\n\npub trait StoreSender:\n\n Sender<SaveSession>\n\n + Sender<GetSession>\n\n + Sender<DeleteSession>\n\n + Sender<FetchUrlCached>\n\n + Sender<IncrAndTestLimit>\n\n + Sender<EnableRotatingKeys>\n\n + Sender<RotateKeysLocked>\n\n + Sender<ImportKeySet>\n\n{\n\n}\n\n\n\npub mod memory;\n\npub use self::memory::MemoryStore;\n\n\n\n#[cfg(feature = \"redis\")]\n\npub mod redis;\n\n#[cfg(feature = \"redis\")]\n\npub use self::redis::RedisStore;\n\n\n\n#[cfg(feature = \"rusqlite\")]\n\npub mod rusqlite;\n\n#[cfg(feature = \"rusqlite\")]\n\npub use self::rusqlite::RusqliteStore;\n", "file_path": "src/agents/store/mod.rs", "rank": 52, "score": 53976.476144385495 }, { "content": "/// Trait for converting various types to a timer deadline.\n\npub trait IntoDeadline {\n\n fn into_deadline(self) -> TokioInstant;\n\n}\n\n\n\nimpl IntoDeadline for TokioInstant {\n\n fn into_deadline(self) -> TokioInstant {\n\n self\n\n }\n\n}\n\n\n\nimpl IntoDeadline for Instant {\n\n fn into_deadline(self) -> TokioInstant {\n\n TokioInstant::from_std(self)\n\n }\n\n}\n\n\n\nimpl IntoDeadline for Duration {\n\n fn into_deadline(self) -> TokioInstant {\n\n TokioInstant::now() + self\n\n }\n\n}\n\n\n\nimpl IntoDeadline for SystemTime {\n\n fn into_deadline(self) -> TokioInstant {\n\n self.duration_since(SystemTime::now())\n\n .unwrap_or_default()\n\n .into_deadline()\n\n }\n\n}\n\n\n", "file_path": "src/utils/delay_queue_task.rs", "rank": 53, "score": 53973.20526004504 }, { "content": "/// Additional `KeyPair` methods we implement for key pair types we support.\n\npub trait KeyPairExt {\n\n /// Generate an ID for the key by hashing the public components.\n\n ///\n\n /// Note that this hash is not a standard format, but that's okay, because it's only used as\n\n /// a simple identifier in JWKs.\n\n fn generate_kid(&self) -> String;\n\n\n\n /// Get the signing algorithm for this key type.\n\n fn signing_alg(&self) -> SigningAlgorithm;\n\n\n\n /// Create a JSON Web Signature (JWS) for the given JSON structure.\n\n fn sign_jws(\n\n &self,\n\n kid: &str,\n\n payload: &JsonValue,\n\n rng: &SecureRandom,\n\n ) -> Result<String, SignError>;\n\n\n\n /// Return JSON represenation of the public key for use in JWK key sets.\n\n fn public_jwk(&self, kid: &str) -> JsonValue;\n", "file_path": "src/utils/keys.rs", "rank": 54, "score": 53973.07307478905 }, { "content": "/// Message used internally to save a key set.\n\nstruct SaveKeys(KeySet);\n\nimpl Message for SaveKeys {\n\n type Reply = Result<(), SqlError>;\n\n}\n\n\n\n/// Store implementation using SQLite.\n\npub struct RusqliteStore {\n\n /// TTL of session keys\n\n expire_sessions: Duration,\n\n /// TTL of cache keys\n\n expire_cache: Duration,\n\n /// Configuration for per-email rate limiting.\n\n limit_per_email_config: LimitConfig,\n\n /// SQLite connection.\n\n conn: Connection,\n\n /// The agent used for fetching on cache miss.\n\n fetcher: Addr<FetchAgent>,\n\n /// Key manager if rotating keys are enabled.\n\n key_manager: Option<Addr<RotatingKeys>>,\n\n}\n", "file_path": "src/agents/store/rusqlite.rs", "rank": 55, "score": 53001.53623575678 }, { "content": "fn is_invalid_domain_char(c: char) -> bool {\n\n matches!(\n\n c,\n\n '\\0' | '\\t' | '\\n' | '\\r' | ' ' | '#' | '%' | '/' | ':' | '?' | '@' | '[' | '\\\\' | ']'\n\n )\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ParseEmailError {\n\n #[error(display = \"missing '@' separator in email address\")]\n\n NoSeparator,\n\n #[error(display = \"local part of an email address cannot be empty\")]\n\n EmptyLocal,\n\n #[error(display = \"invalid international domain name in email address\")]\n\n InvalidIdna(#[error(from)] idna::Errors),\n\n #[error(display = \"domain part of an email address cannot be empty\")]\n\n EmptyDomain,\n\n #[error(display = \"email address contains invalid characters in the domain part\")]\n\n InvalidDomainChars,\n\n #[error(display = \"email address domain part cannot be a raw IP address\")]\n", "file_path": "src/email_address.rs", "rank": 56, "score": 49010.87977196854 }, { "content": "/// A slot in the keys `HashMap`.\n\n///\n\n/// It's technically not necessary for us to lock keys, because the key manager is our only client,\n\n/// but we do so any ways to ensure correct behavior.\n\ntype KeysSlot = Arc<Mutex<KeySet>>;\n\n\n\n/// Store implementation using memory.\n\npub struct MemoryStore {\n\n /// TTL of session keys\n\n expire_sessions: Duration,\n\n /// TTL of cache keys\n\n expire_cache: Duration,\n\n /// Configuration for per-email rate limiting.\n\n limit_per_email_config: LimitConfig,\n\n /// The agent used for fetching on cache miss.\n\n fetcher: Addr<FetchAgent>,\n\n /// Key manager if rotating keys are enabled.\n\n key_manager: Option<Addr<RotatingKeys>>,\n\n /// Session storage.\n\n sessions: HashMap<String, Expiring<Session>>,\n\n /// Cache storage.\n\n cache: HashMap<Url, CacheSlot>,\n\n /// Rate limit storage.\n\n limits: HashMap<IncrAndTestLimit, Expiring<usize>>,\n", "file_path": "src/agents/store/memory.rs", "rank": 57, "score": 48022.98536443041 }, { "content": "/// Trait implemented by agents for each message type they handle.\n\npub trait Handler<M: Message>: Sized {\n\n /// Handle the message.\n\n ///\n\n /// Handlers are called one-by-one as messages arrive; the next message won't be picked up\n\n /// until the function returns. Handlers can block and have mutable access to the agent itself.\n\n ///\n\n /// A context is provided to send the reply, and it can live longer than the function, which\n\n /// allows agents to spawn an async task while continuing with the next message.\n\n fn handle(&mut self, message: M, cx: Context<Self, M>);\n\n}\n\n\n\n/// An address to an agent.\n\n///\n\n/// Can be cheaply cloned, and is used to send messages to the agent. It's also possible to\n\n/// abstract over a message type by casting it to a `dyn Sender<M>`.\n\npub struct Addr<A> {\n\n tx: mpsc::Sender<DispatchFn<A>>,\n\n}\n\n\n\nimpl<A> Addr<A> {\n", "file_path": "src/utils/agent.rs", "rank": 58, "score": 47157.34304248977 }, { "content": "/// Trait for key pair types we can generate.\n\npub trait GeneratedKeyPair: KeyPairExt + Sized {\n\n /// Configuration required for generating a key pair.\n\n type Config;\n\n\n\n /// Generate a new key pair.\n\n ///\n\n /// If this fails, we panic, because it may happen at an arbitrary moment at run-time.\n\n fn generate(config: Self::Config) -> String;\n\n\n\n /// Convert a ParsedKeyPair, if it is of the correct type.\n\n fn from_parsed(parsed: ParsedKeyPair) -> Option<Self>;\n\n}\n\n\n\nimpl GeneratedKeyPair for Ed25519KeyPair {\n\n type Config = SecureRandom;\n\n\n\n fn generate(config: Self::Config) -> String {\n\n let doc =\n\n Self::generate_pkcs8(&config.generator).expect(\"could not generate Ed25519 key pair\");\n\n pem::from_der(doc.as_ref())\n", "file_path": "src/utils/keys.rs", "rank": 59, "score": 45964.11238625334 }, { "content": "/// A slot in the cache `HashMap`.\n\n///\n\n/// We want to lock these individually while a fetch is in progress, so multiple requests for the\n\n/// same URL result in only one fetch. Therefore, we use an `Arc<Mutex<_>>` to carry slots around,\n\n/// and within an `Option` which indicates whether cache is actually present (despite the hash map\n\n/// entry existing or not, which does not indicate anything).\n\ntype CacheSlot = Arc<Mutex<Option<Expiring<String>>>>;\n\n\n", "file_path": "src/agents/store/memory.rs", "rank": 60, "score": 44139.147914474095 }, { "content": "/// Build the SMTP transport from config.\n\nfn build_transport(app: &Config) -> Result<SmtpTransport, String> {\n\n // Extract domain, and build an address with a default port.\n\n // Split the same way `to_socket_addrs` does.\n\n let parts = app.smtp_server.rsplitn(2, ':').collect::<Vec<_>>();\n\n let (domain, addr) = if parts.len() == 2 {\n\n (parts[1].to_owned(), app.smtp_server.to_owned())\n\n } else {\n\n (parts[0].to_owned(), format!(\"{}:25\", app.smtp_server))\n\n };\n\n\n\n // TODO: Configurable security.\n\n let tls_connector =\n\n TlsConnector::new().map_err(|e| format!(\"could not initialize tls: {}\", e))?;\n\n let security = ClientSecurity::Opportunistic(ClientTlsParameters::new(domain, tls_connector));\n\n let mut client = SmtpClient::new(&addr, security)\n\n .map_err(|e| format!(\"could not create the smtp transport: {}\", e))?;\n\n if let (&Some(ref username), &Some(ref password)) = (&app.smtp_username, &app.smtp_password) {\n\n client = client.credentials(Credentials::new(username.to_owned(), password.to_owned()));\n\n }\n\n Ok(client.transport())\n\n}\n", "file_path": "src/bridges/email.rs", "rank": 61, "score": 43341.261083275145 }, { "content": "/// Internal variant of `KeySet` where the PEM was parsed.\n\nstruct ActiveKeySet<T: KeyPairExt + GeneratedKeyPair> {\n\n current: NamedKeyPair<T>,\n\n next: NamedKeyPair<T>,\n\n previous: Option<NamedKeyPair<T>>,\n\n}\n\n\n\nimpl<T: KeyPairExt + GeneratedKeyPair> ActiveKeySet<T> {\n\n fn parse(key_set: &KeySet) -> Self {\n\n let current = key_set\n\n .current\n\n .as_ref()\n\n .map(|entry| Self::parse_one(&entry.value).into())\n\n .expect(\"Provided key set does not have a current key\");\n\n let next = key_set\n\n .next\n\n .as_ref()\n\n .map(|entry| Self::parse_one(&entry.value).into())\n\n .expect(\"Provided key set does not have a next key\");\n\n let previous = key_set\n\n .previous\n", "file_path": "src/agents/key_manager/rotating.rs", "rank": 62, "score": 43033.727080319324 }, { "content": "/// Key manager abstraction. Combines all message types.\n\n///\n\n/// Downside of this is that it needs to be implemented on the agent side as:\n\n/// `impl KeyManagerSender for Addr<FoobarKeyManager> {}`\n\npub trait KeyManagerSender: Sender<SignJws> + Sender<GetPublicJwks> {}\n\n\n\npub mod manual;\n\npub mod rotating;\n\n\n\npub use self::manual::{ManualKeys, ManualKeysError};\n\npub use self::rotating::{Expiring, KeySet, RotateKeys, RotatingKeys, UpdateKeys};\n", "file_path": "src/agents/key_manager/mod.rs", "rank": 63, "score": 39461.00320408336 }, { "content": " format!(\"session:{}\", session_id)\n\n }\n\n}\n\n\n\nimpl Agent for RedisStore {\n\n fn started(&mut self, cx: Context<Self, AgentStarted>) {\n\n // Ping Redis at an interval.\n\n let mut conn = self.conn.clone();\n\n tokio::spawn(async move {\n\n let mut interval = tokio::time::interval(Duration::from_secs(20));\n\n // Ignore the first (immediate) tick.\n\n interval.tick().await;\n\n loop {\n\n interval.tick().await;\n\n let _: String = ::redis::cmd(\"PING\")\n\n .query_async(&mut conn)\n\n .await\n\n .expect(\"Redis ping failed\");\n\n }\n\n });\n", "file_path": "src/agents/store/redis.rs", "rank": 72, "score": 34984.2086710101 }, { "content": " cx.reply_later(async move {\n\n let key = Self::format_session_key(&message.session_id);\n\n let data: Option<String> = conn.get(&key).await?;\n\n if let Some(data) = data {\n\n Ok(Some(serde_json::from_str(&data)?))\n\n } else {\n\n Ok(None)\n\n }\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<DeleteSession> for RedisStore {\n\n fn handle(&mut self, message: DeleteSession, cx: Context<Self, DeleteSession>) {\n\n let mut conn = self.conn.clone();\n\n cx.reply_later(async move {\n\n let key = Self::format_session_key(&message.session_id);\n\n conn.del(&key).await?;\n\n Ok(())\n\n });\n", "file_path": "src/agents/store/redis.rs", "rank": 73, "score": 34978.20845517192 }, { "content": " let request = self.rng.generate_async(16).await;\n\n let mut sub = self.pubsub.subscribe(key.to_vec()).await;\n\n let retry = interval(Duration::from_secs(2));\n\n tokio::pin!(retry);\n\n loop {\n\n poll_fn(|cx| {\n\n if sub.poll_recv(cx).is_ready() || retry.as_mut().poll_next(cx).is_ready() {\n\n Poll::Ready(())\n\n } else {\n\n Poll::Pending\n\n }\n\n })\n\n .await;\n\n if self.try_lock(key, &request).await {\n\n return self.make_guard(key, request);\n\n }\n\n }\n\n }\n\n\n\n /// Try to acquire a lock without waiting.\n", "file_path": "src/utils/redis/locking.rs", "rank": 74, "score": 34976.96970289615 }, { "content": " loop {\n\n let from_id = sub.recv().await.expect(\"Redis keys subscription failed\");\n\n if from_id != *my_id2 {\n\n me2.send(UpdateKeysLocked(signing_alg));\n\n }\n\n }\n\n });\n\n // Fetch current keys.\n\n me.send(UpdateKeysLocked(signing_alg)).await;\n\n }\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<RotateKeysLocked> for RedisStore {\n\n fn handle(&mut self, message: RotateKeysLocked, cx: Context<Self, RotateKeysLocked>) {\n\n let me = cx.addr().clone();\n\n let key_manager = self.key_manager.as_ref().unwrap().clone();\n\n cx.reply_later(async move {\n\n let lock = me.send(LockKeys(message.0)).await;\n", "file_path": "src/agents/store/redis.rs", "rank": 75, "score": 34976.28229158204 }, { "content": "use crate::utils::{redis::pubsub::Subscriber, SecureRandom};\n\nuse futures_util::future::poll_fn;\n\nuse redis::{aio::MultiplexedConnection, RedisResult, Script, Value};\n\nuse std::sync::Arc;\n\nuse std::task::Poll;\n\nuse tokio::stream::Stream;\n\nuse tokio::time::{interval, Duration};\n\n\n\n// TODO: Lock locally first, so multiple locks from the same machine are more efficient.\n\n\n\n/// An active lock in Redis.\n\n///\n\n/// This will try to send a\n\npub struct LockGuard {\n\n key: Vec<u8>,\n\n request: Vec<u8>,\n\n conn: MultiplexedConnection,\n\n unlock_script: Arc<Script>,\n\n}\n\n\n", "file_path": "src/utils/redis/locking.rs", "rank": 76, "score": 34976.15631855238 }, { "content": " async fn try_lock(&mut self, key: &[u8], request: &[u8]) -> bool {\n\n let value = redis::cmd(\"SET\")\n\n .arg(key)\n\n .arg(request)\n\n .arg(\"nx\")\n\n .arg(\"px\")\n\n .arg(\"30000\")\n\n .query_async(&mut self.conn)\n\n .await\n\n .expect(\"Could not make Redis lock request\");\n\n match value {\n\n Value::Nil => false,\n\n Value::Okay => true,\n\n value => panic!(\"Unexpected lock result from Redis: {:?}\", value),\n\n }\n\n }\n\n\n\n /// Create a lock guard, once we've acquired the lock.\n\n fn make_guard(&self, key: &[u8], request: Vec<u8>) -> LockGuard {\n\n LockGuard {\n\n key: key.to_vec(),\n\n request,\n\n conn: self.conn.clone(),\n\n unlock_script: self.unlock_script.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/utils/redis/locking.rs", "rank": 77, "score": 34975.452125982476 }, { "content": " Ok(count <= config.max_count)\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<EnableRotatingKeys> for RedisStore {\n\n fn handle(&mut self, message: EnableRotatingKeys, cx: Context<Self, EnableRotatingKeys>) {\n\n let me = cx.addr().clone();\n\n let my_id = self.id.clone();\n\n let mut pubsub = self.pubsub.clone();\n\n self.key_manager = Some(message.key_manager.clone());\n\n cx.reply_later(async move {\n\n for signing_alg in &message.signing_algs {\n\n let signing_alg = *signing_alg;\n\n // Listen for key changes by other workers.\n\n let chan = format!(\"keys-updated:{}\", signing_alg).into_bytes();\n\n let mut sub = pubsub.subscribe(chan).await;\n\n let me2 = me.clone();\n\n let my_id2 = my_id.clone();\n\n tokio::spawn(async move {\n", "file_path": "src/agents/store/redis.rs", "rank": 78, "score": 34975.00779209713 }, { "content": " cx.reply(());\n\n }\n\n}\n\n\n\nimpl Handler<SaveSession> for RedisStore {\n\n fn handle(&mut self, message: SaveSession, cx: Context<Self, SaveSession>) {\n\n let mut conn = self.conn.clone();\n\n let ttl = self.expire_sessions;\n\n cx.reply_later(async move {\n\n let key = Self::format_session_key(&message.session_id);\n\n let data = serde_json::to_string(&message.data)?;\n\n conn.set_ex(&key, data, ttl.as_secs() as usize).await?;\n\n Ok(())\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<GetSession> for RedisStore {\n\n fn handle(&mut self, message: GetSession, cx: Context<Self, GetSession>) {\n\n let mut conn = self.conn.clone();\n", "file_path": "src/agents/store/redis.rs", "rank": 79, "score": 34974.255193208905 }, { "content": " }\n\n}\n\n\n\nimpl Handler<UpdateKeysLocked> for RedisStore {\n\n fn handle(&mut self, message: UpdateKeysLocked, cx: Context<Self, UpdateKeysLocked>) {\n\n let me = cx.addr().clone();\n\n let key_manager = self.key_manager.as_ref().unwrap().clone();\n\n cx.reply_later(async move {\n\n let key_set = {\n\n let _lock = me.send(LockKeys(message.0)).await;\n\n me.send(FetchKeys(message.0))\n\n .await\n\n .expect(\"Failed to fetch keys from Redis\")\n\n };\n\n key_manager.send(UpdateKeys(key_set)).await;\n\n });\n\n }\n\n}\n\n\n\nimpl StoreSender for Addr<RedisStore> {}\n", "file_path": "src/agents/store/redis.rs", "rank": 81, "score": 34973.12952489419 }, { "content": " .await\n\n .expect(\"Failed to save keys to Redis\");\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<LockKeys> for RedisStore {\n\n fn handle(&mut self, message: LockKeys, cx: Context<Self, LockKeys>) {\n\n let mut locking = self.locking.clone();\n\n let lock_key = format!(\"lock:keys:{}\", message.0);\n\n cx.reply_later(async move { locking.lock(lock_key.as_bytes()).await });\n\n }\n\n}\n\n\n\nimpl Handler<FetchKeys> for RedisStore {\n\n fn handle(&mut self, message: FetchKeys, cx: Context<Self, FetchKeys>) {\n\n let mut conn = self.conn.clone();\n\n let signing_alg = message.0;\n\n let db_key = format!(\"keys:{}\", signing_alg);\n\n cx.reply_later(async move {\n", "file_path": "src/agents/store/redis.rs", "rank": 82, "score": 34972.99342387378 }, { "content": " let key_set = me\n\n .send(FetchKeys(message.0))\n\n .await\n\n .expect(\"Failed to fetch keys from Redis\");\n\n if let Some(key_set) = key_manager.send(RotateKeys(key_set)).await {\n\n me.send(SaveKeys(key_set.clone()))\n\n .await\n\n .expect(\"Failed to save keys to Redis\");\n\n drop(lock);\n\n key_manager.send(UpdateKeys(key_set)).await;\n\n }\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<ImportKeySet> for RedisStore {\n\n fn handle(&mut self, message: ImportKeySet, cx: Context<Self, ImportKeySet>) {\n\n let me = cx.addr().clone();\n\n cx.reply_later(async move {\n\n me.send(SaveKeys(message.0))\n", "file_path": "src/agents/store/redis.rs", "rank": 85, "score": 34971.876877920346 }, { "content": " Ok(result.data)\n\n }\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<IncrAndTestLimit> for RedisStore {\n\n fn handle(&mut self, message: IncrAndTestLimit, cx: Context<Self, IncrAndTestLimit>) {\n\n let mut conn = self.conn.clone();\n\n let script = self.limit_script.clone();\n\n let (key, config) = match message {\n\n IncrAndTestLimit::PerEmail { addr } => (\n\n format!(\"ratelimit:per-email:{}\", addr),\n\n self.limit_per_email_config,\n\n ),\n\n };\n\n cx.reply_later(async move {\n\n let mut invocation = script.prepare_invoke();\n\n invocation.key(key).arg(config.duration.as_secs());\n\n let count: usize = invocation.invoke_async(&mut conn).await?;\n", "file_path": "src/agents/store/redis.rs", "rank": 86, "score": 34971.65412640408 }, { "content": " }\n\n}\n\n\n\nimpl Handler<FetchUrlCached> for RedisStore {\n\n fn handle(&mut self, message: FetchUrlCached, cx: Context<Self, FetchUrlCached>) {\n\n let mut conn = self.conn.clone();\n\n let mut locking = self.locking.clone();\n\n let fetcher = self.fetcher.clone();\n\n let expire_cache = self.expire_cache;\n\n cx.reply_later(async move {\n\n let key = format!(\"cache:{}\", message.url);\n\n let _lock = locking.lock(format!(\"lock:{}\", key).as_bytes()).await;\n\n if let Some(data) = conn.get(key).await? {\n\n Ok(data)\n\n } else {\n\n let key = message.url.as_str().to_owned();\n\n let result = fetcher.send(FetchUrl { url: message.url }).await?;\n\n let ttl = std::cmp::max(expire_cache, result.max_age);\n\n conn.set_ex(key, result.data.clone(), ttl.as_secs() as usize)\n\n .await?;\n", "file_path": "src/agents/store/redis.rs", "rank": 87, "score": 34971.26894039387 }, { "content": " let key_set: Option<String> = conn.get(db_key).await?;\n\n let key_set = key_set\n\n .map(|data| serde_json::from_str(&data).expect(\"Invalid key set JSON in Redis\"))\n\n .unwrap_or_else(|| KeySet::empty(signing_alg));\n\n Ok(key_set)\n\n })\n\n }\n\n}\n\n\n\nimpl Handler<SaveKeys> for RedisStore {\n\n fn handle(&mut self, message: SaveKeys, cx: Context<Self, SaveKeys>) {\n\n let mut conn = self.conn.clone();\n\n let signing_alg = message.0.signing_alg;\n\n let db_key = format!(\"keys:{}\", signing_alg);\n\n let data = serde_json::to_string(&message.0).expect(\"Could not encode key set as JSON\");\n\n let mut pipe = pipe();\n\n pipe.atomic()\n\n .set(db_key, data)\n\n .publish(format!(\"keys-updated:{}\", signing_alg), &self.id[..]);\n\n cx.reply_later(async move { pipe.query_async(&mut conn).await });\n", "file_path": "src/agents/store/redis.rs", "rank": 88, "score": 34969.85090059015 }, { "content": "impl Drop for LockGuard {\n\n fn drop(&mut self) {\n\n let key = self.key.clone();\n\n let request = self.request.clone();\n\n let mut conn = self.conn.clone();\n\n let unlock_script = self.unlock_script.clone();\n\n tokio::spawn(async move {\n\n let mut invocation = unlock_script.prepare_invoke();\n\n invocation.key(key.clone()).arg(&request[..]);\n\n let res: RedisResult<Value> = invocation.invoke_async(&mut conn).await;\n\n if let Err(err) = res {\n\n log::error!(\"Failed to release Redis lock: {:?}\", err);\n\n }\n\n });\n\n }\n\n}\n\n\n\n/// A client used for locking in Redis.\n\n///\n\n/// This struct can be cheaply cloned.\n", "file_path": "src/utils/redis/locking.rs", "rank": 89, "score": 34969.55241489634 }, { "content": " fetcher: Addr<FetchAgent>,\n\n /// Key manager if rotating keys are enabled.\n\n key_manager: Option<Addr<RotatingKeys>>,\n\n /// Script used to check a limit.\n\n limit_script: Arc<Script>,\n\n /// Configuration for per-email rate limiting.\n\n limit_per_email_config: LimitConfig,\n\n}\n\n\n\nimpl RedisStore {\n\n pub async fn new(\n\n mut url: String,\n\n expire_sessions: Duration,\n\n expire_cache: Duration,\n\n limit_per_email_config: LimitConfig,\n\n fetcher: Addr<FetchAgent>,\n\n rng: SecureRandom,\n\n ) -> RedisResult<Self> {\n\n if url.starts_with(\"http://\") {\n\n url = url.replace(\"http://\", \"redis://\");\n", "file_path": "src/agents/store/redis.rs", "rank": 91, "score": 34963.191989347404 }, { "content": "#[derive(Clone)]\n\npub struct LockClient {\n\n conn: MultiplexedConnection,\n\n pubsub: Subscriber,\n\n rng: SecureRandom,\n\n unlock_script: Arc<Script>,\n\n}\n\n\n\nimpl LockClient {\n\n /// Create a new instance.\n\n ///\n\n /// This takes a Redis connection and a Redis pubsub connection, which must both be connected\n\n /// to the same server.\n\n pub fn new(conn: MultiplexedConnection, pubsub: Subscriber, rng: SecureRandom) -> Self {\n\n let unlock_script = Arc::new(Script::new(\n\n r\"\n\n if redis.call('GET', KEYS[1]) == ARGV[1] then\n\n redis.call('DEL', KEYS[1])\n\n redis.call('PUBLISH', KEYS[1], 'UNLOCK')\n\n return 1\n", "file_path": "src/utils/redis/locking.rs", "rank": 92, "score": 34961.8828043322 }, { "content": "use crate::agents::*;\n\nuse crate::config::LimitConfig;\n\nuse crate::crypto::SigningAlgorithm;\n\nuse crate::utils::{\n\n agent::*,\n\n redis::{locking, pubsub},\n\n SecureRandom,\n\n};\n\nuse ::redis::{\n\n aio::MultiplexedConnection as RedisConn, pipe, AsyncCommands, Client as RedisClient,\n\n IntoConnectionInfo, RedisResult, Script,\n\n};\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\n\n\n/// Internal message used to lock a key set.\n", "file_path": "src/agents/store/redis.rs", "rank": 93, "score": 34960.225657874995 }, { "content": " else\n\n return 0\n\n end\n\n \",\n\n ));\n\n Self {\n\n conn,\n\n pubsub,\n\n rng,\n\n unlock_script,\n\n }\n\n }\n\n\n\n /// Acquire a lock.\n\n ///\n\n /// Takes a lock key name, and a unique request ID.\n\n ///\n\n /// Note that the given key *is* the lock, and the key may not otherwise be written to.\n\n /// (Unlike, say, file locking, where the lock is conceptually metadata on the file.)\n\n pub async fn lock(&mut self, key: &[u8]) -> LockGuard {\n", "file_path": "src/utils/redis/locking.rs", "rank": 94, "score": 34960.19872697202 }, { "content": " } else if !url.starts_with(\"redis://\") {\n\n url = format!(\"redis://{}\", &url);\n\n }\n\n let id = Arc::new(rng.generate_async(16).await);\n\n let info = url.as_str().into_connection_info()?;\n\n let pubsub = pubsub::connect(&info).await?;\n\n let conn = RedisClient::open(info)?\n\n .get_multiplexed_tokio_connection()\n\n .await?;\n\n let locking = locking::LockClient::new(conn.clone(), pubsub.clone(), rng);\n\n\n\n log::warn!(\"Storing sessions and keys in Redis at {}\", url);\n\n log::warn!(\"Please always double check this Redis and the connection to it are secure!\");\n\n log::warn!(\"(This warning can't be fixed; it's a friendly reminder.)\");\n\n\n\n let limit_script = Arc::new(Script::new(\n\n r\"\n\n local count = redis.call('incr', KEYS[1])\n\n if count == 1 then\n\n redis.call('expire', KEYS[1], ARGV[1])\n", "file_path": "src/agents/store/redis.rs", "rank": 95, "score": 34955.73366065436 }, { "content": " end\n\n return count\n\n \",\n\n ));\n\n\n\n Ok(RedisStore {\n\n id,\n\n conn,\n\n pubsub,\n\n locking,\n\n expire_sessions,\n\n expire_cache,\n\n fetcher,\n\n key_manager: None,\n\n limit_script,\n\n limit_per_email_config,\n\n })\n\n }\n\n\n\n fn format_session_key(session_id: &str) -> String {\n", "file_path": "src/agents/store/redis.rs", "rank": 96, "score": 34951.73579126499 }, { "content": "pub mod locking;\n\npub mod pubsub;\n", "file_path": "src/utils/redis/mod.rs", "rank": 97, "score": 34949.724230016654 }, { "content": "const findTextPart = mimeParts => {\n\n const textPart = mimeParts.find(part =>\n\n (part.Headers[\"Content-Type\"] || []).some(\n\n value => value.indexOf(\"text/plain\") === 0\n\n )\n\n );\n\n if (textPart) {\n\n return textPart;\n\n }\n\n\n\n const nestedPart = mimeParts.find(part =>\n\n (part.Headers[\"Content-Type\"] || []).some(\n\n value => value.indexOf(\"multipart/\") === 0\n\n )\n\n );\n\n if (nestedPart) {\n\n return findTextPart(nestedPart.MIME.Parts);\n\n }\n\n\n\n throw Error(\"Could not find plain text part in mail\");\n", "file_path": "tests/e2e/src/mailhog.js", "rank": 98, "score": 28187.35412298841 }, { "content": "## Eviction\n\n\n\nSetting `maxmemory-policy` to one of the `volatile-*` options is recommended.\n\nIf you're unsure which, use `volatile-lru`.\n\n\n\nAvoid the `allkeys-*` options, especially if you're using rotating signing\n\nkeys. Using these opens up the possibility that a malicious user floods your\n\nserver with data and causes the signing keys to be evicted.\n\n\n\nIt's also not recommended to use `noeviction`. The broker may not have a useful\n\nway to handle errors for certain write operations to Redis, and it'll simply\n\nexit in these cases.\n\n\n\n## Snapshotting\n\n\n\nEnabling snapshotting in Redis (using the `save` option) is, of course,\n\nrecommended. It'll allow Redis to recover the database across restarts.\n\n\n\nYou should take as much care as possible to ensure the snapshot location is\n\nsecure. The most basic measure to take here is to run Redis as its own user,\n\nand to prevent other processes from reading the directory Redis saves snapshots\n\nto using filesystem permissions.\n\n\n\nEnabling `stop-writes-on-bgsave-error` is also recommended. This will cause the\n\nbroker to also hard-fail if your Redis server is not able to write snapshots,\n\nwhich is usually what you want.\n\n\n\n## Clustering and replication\n\n\n\nThe broker currently does not support clustered Redis installations, or\n\ndistributing load to read-only replicas.\n", "file_path": "docs/storage/redis.md", "rank": 99, "score": 24676.71832401522 } ]
Rust
src/util.rs
HEnquist/flexi_logger
5a89eb567d35a7821b682e95e9328859b133c042
use crate::{deferred_now::DeferredNow, FormatFunction}; use log::Record; use std::cell::RefCell; use std::io::Write; #[cfg(test)] use std::io::Cursor; #[cfg(test)] use std::sync::{Arc, Mutex}; #[cfg(feature = "async")] pub(crate) const ASYNC_FLUSH: &[u8] = b"F"; #[cfg(feature = "async")] pub(crate) const ASYNC_SHUTDOWN: &[u8] = b"S"; #[derive(Copy, Clone, Debug)] pub(crate) enum ERRCODE { Write, Flush, Format, Poison, LogFile, #[cfg(feature = "specfile_without_notification")] LogSpecFile, #[cfg(target_os = "linux")] Symlink, } impl ERRCODE { fn as_index(self) -> &'static str { match self { Self::Write => "write", Self::Flush => "flush", Self::Format => "format", Self::Poison => "poison", Self::LogFile => "logfile", #[cfg(feature = "specfile_without_notification")] Self::LogSpecFile => "logspecfile", #[cfg(target_os = "linux")] Self::Symlink => "symlink", } } } pub(crate) fn eprint_err(errcode: ERRCODE, msg: &str, err: &dyn std::error::Error) { eprintln!( "[flexi_logger][ERRCODE::{code:?}] {msg}, caused by {err}\n\ See https://docs.rs/flexi_logger/latest/flexi_logger/error_info/index.html#{code_lc}", msg = msg, err = err, code = errcode, code_lc = errcode.as_index(), ); } pub(crate) fn io_err(s: &'static str) -> std::io::Error { std::io::Error::new(std::io::ErrorKind::Other, s) } pub(crate) fn buffer_with<F>(f: F) where F: FnOnce(&RefCell<Vec<u8>>), { thread_local! { static BUFFER: RefCell<Vec<u8>> = RefCell::new(Vec::with_capacity(200)); } BUFFER.with(f); } pub(crate) fn write_buffered( format_function: FormatFunction, now: &mut DeferredNow, record: &Record, w: &mut dyn Write, #[cfg(test)] o_validation_buffer: Option<&Arc<Mutex<Cursor<Vec<u8>>>>>, ) -> Result<(), std::io::Error> { let mut result: Result<(), std::io::Error> = Ok(()); buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { Ok(mut buffer) => { (format_function)(&mut *buffer, now, record) .unwrap_or_else(|e| eprint_err(ERRCODE::Format, "formatting failed", &e)); buffer .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ERRCODE::Write, "writing failed", &e)); result = w.write_all(&*buffer).map_err(|e| { eprint_err(ERRCODE::Write, "writing failed", &e); e }); #[cfg(test)] if let Some(valbuf) = o_validation_buffer { valbuf.lock().unwrap().write_all(&*buffer).ok(); } buffer.clear(); } Err(_e) => { let mut tmp_buf = Vec::<u8>::with_capacity(200); (format_function)(&mut tmp_buf, now, record) .unwrap_or_else(|e| eprint_err(ERRCODE::Format, "formatting failed", &e)); tmp_buf .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ERRCODE::Write, "writing failed", &e)); result = w.write_all(&tmp_buf).map_err(|e| { eprint_err(ERRCODE::Write, "writing failed", &e); e }); #[cfg(test)] if let Some(valbuf) = o_validation_buffer { valbuf.lock().unwrap().write_all(&tmp_buf).ok(); } } }); result }
use crate::{deferred_now::DeferredNow, FormatFunction}; use log::Record; use std::cell::RefCell; use std::io::Write; #[cfg(test)] use std::io::Cursor; #[cfg(test)] use std::sync::{Arc, Mutex}; #[cfg(feature = "async")] pub(crate) const ASYNC_FLUSH: &[u8] = b"F"; #[cfg(feature = "async")] pub(crate) const ASYNC_SHUTDOWN: &[u8] = b"S"; #[derive(Copy, Clone, Debug)] pub(crate) enum ERRCODE { Write, Flush, Format, Poison, LogFile, #[cfg(feature = "specfile_without_notification")] LogSpecFile, #[cfg(target_os = "linux")] Symlink, } impl ERRCODE { fn as_index(self) -> &'static str { match self { Self::Write => "write", Self::Flush => "flush", Self::Format => "format", Self::Poison => "poison", Self::LogFile => "logfile", #[cfg(feature = "specfile_without_notification")] Self::LogSpecFile => "logspecfile", #[cfg(target_os = "linux")] Self::Symlink => "symlink", } } } pub(crate) fn eprint_err(errcode: ERRCODE, msg: &str, err: &dyn std::error::Error) { eprintln!( "[flexi_logger][ERRCODE::{code:?}] {msg}, caused by {err}\n\ See https://docs.rs/flexi_logger/latest/flexi_logger/error_info/index.html#{code_lc}", msg = msg, err = err, code = errcode, code_lc = errcode.as_index(), ); } pub(crate) fn io_err(s: &'static str) -> std::io::Error { std::io::Error::new(std::io::ErrorKind::Other, s) } pub(crate) fn buffer_with<F>(f: F) where F: FnOnce(&RefCell<Vec<u8>>), { thread_local! { static BUFFER: RefCell<Vec<u8>> = RefCell::new(Vec::with_capacity(200)); } BUFFER.with(f); } pub(crate) fn write_buffered( format_function: FormatFunction, now: &mut DeferredNow, record: &Record, w: &mut dyn Write, #[cfg(test)] o_validation_buffer: Option<&Arc<Mutex<Cursor<Vec<u8>>>>>, ) -> Result<(), std::io::Error> { let mut result: Result<(), std::io::Error> = Ok(());
; result }
buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { Ok(mut buffer) => { (format_function)(&mut *buffer, now, record) .unwrap_or_else(|e| eprint_err(ERRCODE::Format, "formatting failed", &e)); buffer .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ERRCODE::Write, "writing failed", &e)); result = w.write_all(&*buffer).map_err(|e| { eprint_err(ERRCODE::Write, "writing failed", &e); e }); #[cfg(test)] if let Some(valbuf) = o_validation_buffer { valbuf.lock().unwrap().write_all(&*buffer).ok(); } buffer.clear(); } Err(_e) => { let mut tmp_buf = Vec::<u8>::with_capacity(200); (format_function)(&mut tmp_buf, now, record) .unwrap_or_else(|e| eprint_err(ERRCODE::Format, "formatting failed", &e)); tmp_buf .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ERRCODE::Write, "writing failed", &e)); result = w.write_all(&tmp_buf).map_err(|e| { eprint_err(ERRCODE::Write, "writing failed", &e); e }); #[cfg(test)] if let Some(valbuf) = o_validation_buffer { valbuf.lock().unwrap().write_all(&tmp_buf).ok(); } } })
call_expression
[ { "content": "fn push_err(s: &str, parse_errs: &mut String) {\n\n if !parse_errs.is_empty() {\n\n parse_errs.push_str(\"; \");\n\n }\n\n parse_errs.push_str(s);\n\n}\n\n\n", "file_path": "src/log_specification.rs", "rank": 0, "score": 184303.19247900957 }, { "content": "fn contains_whitespace(s: &str, parse_errs: &mut String) -> bool {\n\n let result = s.chars().any(char::is_whitespace);\n\n if result {\n\n push_err(\n\n &format!(\n\n \"ignoring invalid part in log spec '{}' (contains a whitespace)\",\n\n s\n\n ),\n\n parse_errs,\n\n );\n\n }\n\n result\n\n}\n\n\n\n#[allow(clippy::needless_doctest_main)]\n\n/// Builder for [`LogSpecification`].\n\n///\n\n/// # Example\n\n///\n\n/// Start with a programmatically built log specification, and use the\n", "file_path": "src/log_specification.rs", "rank": 1, "score": 166402.00966323714 }, { "content": "#[cfg(feature = \"colors\")]\n\nfn parse_style(input: &str) -> Result<Style, std::num::ParseIntError> {\n\n Ok(if input == \"-\" {\n\n Style::new()\n\n } else {\n\n match input.strip_prefix('b') {\n\n None => Style::new().fg(Color::Fixed(input.parse()?)),\n\n Some(s) => Style::new().bold().fg(Color::Fixed(s.parse()?)),\n\n }\n\n })\n\n}\n\n\n\n/// Can be used in\n\n/// [`Logger::adaptive_format_for_stderr`](crate::Logger::adaptive_format_for_stderr) and\n\n/// [`Logger::adaptive_format_for_stdout`](crate::Logger::adaptive_format_for_stdout)\n\n/// to use coloring only if the output goes to a tty.\n\n///\n\n/// This is helpful if the output is sometimes piped into other programs, which usually\n\n/// do not expect color control byte sequences.\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"atty\")))]\n\n#[cfg(feature = \"atty\")]\n", "file_path": "src/formats.rs", "rank": 2, "score": 156843.8501141569 }, { "content": "fn work(value: u8) {\n\n let logger = Logger::try_with_str(\"info\").unwrap().format(opt_format);\n\n\n\n let logger = match value {\n\n 0 => {\n\n println!(\"stdout, direct\");\n\n logger.log_to_stdout().write_mode(WriteMode::Direct)\n\n }\n\n 1 => {\n\n println!(\"stdout, buffer+flush\");\n\n logger.log_to_stdout().write_mode(WriteMode::BufferAndFlush)\n\n }\n\n 2 => {\n\n #[cfg(feature = \"async\")]\n\n {\n\n println!(\"stdout, async\");\n\n logger.log_to_stdout().write_mode(WriteMode::Async)\n\n }\n\n #[cfg(not(feature = \"async\"))]\n\n {\n", "file_path": "tests/test_write_modes.rs", "rank": 3, "score": 142022.0530294164 }, { "content": "fn parse_level_filter<S: AsRef<str>>(s: S) -> Result<LevelFilter, FlexiLoggerError> {\n\n match s.as_ref().to_lowercase().as_ref() {\n\n \"off\" => Ok(LevelFilter::Off),\n\n \"error\" => Ok(LevelFilter::Error),\n\n \"warn\" => Ok(LevelFilter::Warn),\n\n \"info\" => Ok(LevelFilter::Info),\n\n \"debug\" => Ok(LevelFilter::Debug),\n\n \"trace\" => Ok(LevelFilter::Trace),\n\n _ => Err(FlexiLoggerError::LevelFilter(format!(\n\n \"unknown level filter: {}\",\n\n s.as_ref()\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/log_specification.rs", "rank": 4, "score": 119191.953520978 }, { "content": "// launch child process from same executable and sets there an additional environment variable\n\n// or finds this environment variable and returns its value\n\npub fn dispatch(count: u8) -> Option<u8> {\n\n match std::env::var(CTRL_INDEX) {\n\n Err(_) => {\n\n println!(\"dispatcher\");\n\n let progname = std::env::args().next().unwrap();\n\n let nocapture = std::env::args().find(|a| a == \"--nocapture\").is_some();\n\n for value in 0..count {\n\n let mut command = std::process::Command::new(progname.to_string());\n\n if nocapture {\n\n command.arg(\"--nocapture\");\n\n }\n\n let status = command\n\n .env(CTRL_INDEX, value.to_string())\n\n .status()\n\n .expect(\"Command failed to start\");\n\n assert!(status.success());\n\n }\n\n None\n\n }\n\n Ok(value) => {\n\n println!(\"executor {}\", value);\n\n Some(value.parse().unwrap())\n\n }\n\n }\n\n}\n", "file_path": "tests/test_utils.rs", "rank": 5, "score": 113704.0574747943 }, { "content": "fn run_script(s: &str) {\n\n let mut path = std::path::PathBuf::from(std::env::var(\"CARGO_SCRIPT_BASE_PATH\").unwrap());\n\n path.push(s);\n\n let script = path.to_string_lossy().to_owned().to_string();\n\n run_command!(\"cargo\", \"script\", script);\n\n}\n\n\n", "file_path": "scripts/qualify.rs", "rank": 6, "score": 112799.3496061613 }, { "content": "fn remove_or_compress_too_old_logfiles_impl(\n\n cleanup_config: &Cleanup,\n\n file_spec: &FileSpec,\n\n) -> Result<(), std::io::Error> {\n\n let (log_limit, compress_limit) = match *cleanup_config {\n\n Cleanup::Never => {\n\n return Ok(());\n\n }\n\n Cleanup::KeepLogFiles(log_limit) => (log_limit, 0),\n\n\n\n #[cfg(feature = \"compress\")]\n\n Cleanup::KeepCompressedFiles(compress_limit) => (0, compress_limit),\n\n\n\n #[cfg(feature = \"compress\")]\n\n Cleanup::KeepLogAndCompressedFiles(log_limit, compress_limit) => {\n\n (log_limit, compress_limit)\n\n }\n\n };\n\n\n\n for (index, file) in list_of_log_and_compressed_files(file_spec).enumerate() {\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 7, "score": 110434.62643459634 }, { "content": "fn run_script(s: &str) {\n\n let mut path = std::path::PathBuf::from(std::env::var(\"CARGO_SCRIPT_BASE_PATH\").unwrap());\n\n path.push(s);\n\n let script = path.to_string_lossy().to_owned().to_string();\n\n run_command!(\"cargo\", \"script\", script);\n\n}\n\n\n", "file_path": "scripts/qualify_fast.rs", "rank": 8, "score": 110240.24107277588 }, { "content": "fn main() -> Result<(), FlexiLoggerError> {\n\n flexi_logger::Logger::try_with_str(\"info\")?\n\n .filter(Box::new(BarsOnly))\n\n .start()?;\n\n log::info!(\"barista\");\n\n log::info!(\"foo\"); // will be swallowed by the filter\n\n log::info!(\"bar\");\n\n log::info!(\"gaga\"); // will be swallowed by the filter\n\n Ok(())\n\n}\n\n\n\npub struct BarsOnly;\n\nimpl LogLineFilter for BarsOnly {\n\n fn write(\n\n &self,\n\n now: &mut DeferredNow,\n\n record: &log::Record,\n\n log_line_writer: &dyn LogLineWriter,\n\n ) -> std::io::Result<()> {\n\n if record.args().to_string().contains(\"bar\") {\n\n log_line_writer.write(now, record)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "examples/filter.rs", "rank": 9, "score": 108130.58910372166 }, { "content": "fn work(value: u8) {\n\n let link_name = \"link_to_log\".to_string();\n\n let mut logger = Logger::try_with_str(\"info\").unwrap();\n\n\n\n match value {\n\n 0 => {\n\n logger = logger.log_to_file(\n\n FileSpec::default()\n\n .directory(\"log_files\")\n\n .basename(\"to_foo_or_not_to_foo\"),\n\n );\n\n }\n\n 1 => {\n\n logger = logger\n\n .log_to_file(FileSpec::default().directory(\"log_files\"))\n\n .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never);\n\n }\n\n 2 => {\n\n logger = logger\n\n .format(detailed_format)\n", "file_path": "tests/test_file_writer.rs", "rank": 10, "score": 107761.42710661612 }, { "content": "fn verify_logs(directory: &str) {\n\n let expected_line_counts = [3, 3, 3, 1, 1, 3, 1];\n\n // read all files\n\n let pattern = String::from(directory).add(\"/*\");\n\n let globresults = match glob(&pattern) {\n\n Err(e) => panic!(\n\n \"Is this ({}) really a directory? Listing failed with {}\",\n\n pattern, e\n\n ),\n\n Ok(globresults) => globresults,\n\n };\n\n let mut no_of_log_files = 0;\n\n let mut total_line_count = 0_usize;\n\n for (index, globresult) in globresults.into_iter().enumerate() {\n\n let mut line_count = 0_usize;\n\n let pathbuf = globresult.unwrap_or_else(|e| panic!(\"Ups - error occured: {}\", e));\n\n let f = File::open(&pathbuf)\n\n .unwrap_or_else(|e| panic!(\"Cannot open file {:?} due to {}\", pathbuf, e));\n\n no_of_log_files += 1;\n\n let mut reader = BufReader::new(f);\n", "file_path": "tests/test_age_or_size.rs", "rank": 11, "score": 105626.75629339629 }, { "content": "fn work(value: u8) {\n\n let mut logger = Logger::try_with_str(\"info\").unwrap();\n\n match value {\n\n 0 => {\n\n logger = logger.log_to_writer(Box::new(CustomWriter {\n\n data: Mutex::new(Vec::new()),\n\n format: default_format,\n\n mode: 0,\n\n }));\n\n }\n\n 1 => {\n\n logger = logger.log_to_writer(Box::new(CustomWriter {\n\n data: Mutex::new(Vec::new()),\n\n format: default_format,\n\n mode: 1,\n\n }));\n\n logger = logger.format(custom_format);\n\n }\n\n _ => unreachable!(\"asAS\"),\n\n }\n", "file_path": "tests/test_custom_log_writer.rs", "rank": 12, "score": 105533.6556951624 }, { "content": "#[test]\n\nfn you_must_see_exactly_three_messages_above_1_err_1_warn_1_info() {\n\n flexi_logger::Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .start()\n\n .unwrap();\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n info!(\"This is an info message\");\n\n debug!(\"This is a debug message - you must not see it!\");\n\n trace!(\"This is a trace message - you must not see it!\");\n\n}\n", "file_path": "tests/test_env_logger_style.rs", "rank": 13, "score": 105320.60593659928 }, { "content": "fn verify_logs(directory: &str) {\n\n // read all files\n\n let pattern = String::from(directory).add(\"/*\");\n\n let globresults = match glob(&pattern) {\n\n Err(e) => panic!(\n\n \"Is this ({}) really a directory? Listing failed with {}\",\n\n pattern, e\n\n ),\n\n Ok(globresults) => globresults,\n\n };\n\n let mut no_of_log_files = 0;\n\n let mut line_count = 0_usize;\n\n for globresult in globresults {\n\n let pathbuf = globresult.unwrap_or_else(|e| panic!(\"Ups - error occured: {}\", e));\n\n let f = File::open(&pathbuf)\n\n .unwrap_or_else(|e| panic!(\"Cannot open file {:?} due to {}\", pathbuf, e));\n\n no_of_log_files += 1;\n\n let mut reader = BufReader::new(f);\n\n let mut buffer = String::new();\n\n while reader.read_line(&mut buffer).unwrap() > 0 {\n", "file_path": "tests/test_multi_threaded_dates.rs", "rank": 14, "score": 103540.25336579022 }, { "content": "fn verify_logs(directory: &str) {\n\n // read all files\n\n let pattern = String::from(directory).add(\"/*\");\n\n let globresults = match glob(&pattern) {\n\n Err(e) => panic!(\n\n \"Is this ({}) really a directory? Listing failed with {}\",\n\n pattern, e\n\n ),\n\n Ok(globresults) => globresults,\n\n };\n\n let mut no_of_log_files = 0;\n\n let mut line_count = 0_usize;\n\n for globresult in globresults {\n\n let pathbuf = globresult.unwrap_or_else(|e| panic!(\"Ups - error occured: {}\", e));\n\n let f = File::open(&pathbuf)\n\n .unwrap_or_else(|e| panic!(\"Cannot open file {:?} due to {}\", pathbuf, e));\n\n no_of_log_files += 1;\n\n let mut reader = BufReader::new(f);\n\n let mut buffer = String::new();\n\n while reader.read_line(&mut buffer).unwrap() > 0 {\n", "file_path": "tests/test_multi_threaded_numbers.rs", "rank": 15, "score": 103540.25336579022 }, { "content": "#[bench]\n\nfn b30_relevant_logs(b: &mut Bencher) {\n\n b.iter(use_error);\n\n}\n\n\n", "file_path": "benches/bench_standard.rs", "rank": 16, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b40_suppressed_logs(b: &mut Bencher) {\n\n b.iter(use_trace);\n\n}\n\n\n", "file_path": "benches/bench_reconfigurable.rs", "rank": 17, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b20_initialize_logger(_: &mut Bencher) {\n\n Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .log_to_file(FileSpec::default().directory(\"log_files\"))\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n}\n\n\n", "file_path": "benches/bench_standard.rs", "rank": 18, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b20_initialize_logger(_: &mut Bencher) {\n\n Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .log_to_file(FileSpec::default().directory(\"log_files\"))\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n}\n\n\n", "file_path": "benches/bench_reconfigurable.rs", "rank": 19, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b30_relevant_logs(b: &mut Bencher) {\n\n b.iter(use_error);\n\n}\n\n\n", "file_path": "benches/bench_reconfigurable.rs", "rank": 20, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b40_suppressed_logs(b: &mut Bencher) {\n\n b.iter(use_trace);\n\n}\n\n\n", "file_path": "benches/bench_standard.rs", "rank": 21, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b10_no_logger_active(b: &mut Bencher) {\n\n b.iter(use_error);\n\n}\n\n\n", "file_path": "benches/bench_reconfigurable.rs", "rank": 22, "score": 102092.0363526268 }, { "content": "#[bench]\n\nfn b10_no_logger_active(b: &mut Bencher) {\n\n b.iter(use_error);\n\n}\n\n\n", "file_path": "benches/bench_standard.rs", "rank": 23, "score": 102092.0363526268 }, { "content": "/// A logline-formatter that produces log lines like <br>\n\n/// ```INFO [my_prog::some_submodule] Task successfully read from conf.json```\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n\npub fn default_format(\n\n w: &mut dyn std::io::Write,\n\n _now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n write!(\n\n w,\n\n \"{} [{}] {}\",\n\n record.level(),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n record.args()\n\n )\n\n}\n\n\n\n#[allow(clippy::doc_markdown)]\n\n/// A colored version of the logline-formatter `default_format`\n\n/// that produces log lines like <br>\n\n/// <code><span style=\"color:red\">ERROR</span> &#91;my_prog::some_submodule&#93; <span\n\n/// style=\"color:red\">File not found</span></code>\n\n///\n\n/// See method [style](crate::style) if you want to influence coloring.\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n", "file_path": "src/formats.rs", "rank": 24, "score": 100887.00813630295 }, { "content": "/// A logline-formatter that produces log lines with timestamp and file location, like\n\n/// <br>\n\n/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [src/foo/bar:26] Task successfully read from conf.json```\n\n/// <br>\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n\npub fn opt_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n write!(\n\n w,\n\n \"[{}] {} [{}:{}] {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\"),\n\n record.level(),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n &record.args()\n\n )\n\n}\n\n\n\n/// A colored version of the logline-formatter `opt_format`.\n\n///\n\n/// See method [style](crate::style) if you want to influence coloring.\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n", "file_path": "src/formats.rs", "rank": 25, "score": 100886.70332057637 }, { "content": "/// A logline-formatter that produces log lines like\n\n/// <br>\n\n/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [foo::bar] src/foo/bar.rs:26: Task successfully read from conf.json```\n\n/// <br>\n\n/// i.e. with timestamp, module path and file location.\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n\npub fn detailed_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n write!(\n\n w,\n\n \"[{}] {} [{}] {}:{}: {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\"),\n\n record.level(),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n &record.args()\n\n )\n\n}\n\n\n\n/// A colored version of the logline-formatter `detailed_format`.\n\n///\n\n/// See method [style](crate::style) if you want to influence coloring.\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n", "file_path": "src/formats.rs", "rank": 26, "score": 100886.42108979195 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"colors\")))]\n\n#[cfg(feature = \"colors\")]\n\npub fn colored_opt_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \"[{}] {} [{}:{}] {}\",\n\n style(level).paint(now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\").to_string()),\n\n style(level).paint(level.to_string()),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n style(level).paint(&record.args().to_string())\n\n )\n\n}\n\n\n", "file_path": "src/formats.rs", "rank": 27, "score": 98558.52138681027 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"colors\")))]\n\n#[cfg(feature = \"colors\")]\n\npub fn colored_detailed_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \"[{}] {} [{}] {}:{}: {}\",\n\n style(level).paint(now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\").to_string()),\n\n style(level).paint(record.level().to_string()),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n style(level).paint(&record.args().to_string())\n\n )\n\n}\n\n\n", "file_path": "src/formats.rs", "rank": 28, "score": 98558.52138681027 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"colors\")))]\n\n#[cfg(feature = \"colors\")]\n\npub fn colored_default_format(\n\n w: &mut dyn std::io::Write,\n\n _now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \"{} [{}] {}\",\n\n style(level).paint(level.to_string()),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n style(level).paint(record.args().to_string())\n\n )\n\n}\n\n\n", "file_path": "src/formats.rs", "rank": 29, "score": 98558.52138681027 }, { "content": "#[test]\n\nfn test_write_modes() {\n\n if let Some(value) = test_utils::dispatch(COUNT) {\n\n work(value)\n\n }\n\n}\n\n\n", "file_path": "tests/test_write_modes.rs", "rank": 30, "score": 98503.7481853964 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\nfn validate_logs(logger: &mut LoggerHandle) {\n\n logger.validate_logs(&[\n\n (\"ERROR\", \"test_reconfigure_methods\", \"1-error\"),\n\n (\"WARN\", \"test_reconfigure_methods\", \"1-warning\"),\n\n (\"INFO\", \"test_reconfigure_methods\", \"1-info\"),\n\n //\n\n (\"ERROR\", \"test_reconfigure_methods\", \"1-error\"),\n\n //\n\n (\"ERROR\", \"test_reconfigure_methods\", \"1-error\"),\n\n (\"WARN\", \"test_reconfigure_methods\", \"1-warning\"),\n\n (\"INFO\", \"test_reconfigure_methods\", \"1-info\"),\n\n (\"DEBUG\", \"test_reconfigure_methods\", \"1-debug\"),\n\n (\"TRACE\", \"test_reconfigure_methods\", \"1-trace\"),\n\n // -----\n\n (\"ERROR\", \"test_reconfigure_methods\", \"2-error\"),\n\n (\"WARN\", \"test_reconfigure_methods\", \"2-warning\"),\n\n (\"INFO\", \"test_reconfigure_methods\", \"2-info\"),\n\n //\n\n (\"ERROR\", \"test_reconfigure_methods\", \"2-error\"),\n\n //\n", "file_path": "tests/test_reconfigure_methods.rs", "rank": 31, "score": 98049.28745379756 }, { "content": "enum ErrWriter {\n\n Unbuffered(std::io::Stderr),\n\n Buffered(Mutex<BufWriter<std::io::Stderr>>),\n\n #[cfg(feature = \"async\")]\n\n Async(AsyncHandle),\n\n}\n", "file_path": "src/primary_writer/stderr_writer.rs", "rank": 32, "score": 97495.27946459445 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\nfn test_push_new_spec(logger: &mut LoggerHandle) {\n\n error!(\"2-error message\");\n\n warn!(\"2-warning\");\n\n info!(\"2-info message\");\n\n debug!(\"2-debug message - you must not see it!\");\n\n trace!(\"2-trace message - you must not see it!\");\n\n\n\n logger.parse_and_push_temp_spec(\"error\").ok();\n\n error!(\"2-error message\");\n\n warn!(\"2-warning - you must not see it!\");\n\n info!(\"2-info message - you must not see it!\");\n\n debug!(\"2-debug message - you must not see it!\");\n\n trace!(\"2-trace message - you must not see it!\");\n\n\n\n logger.parse_and_push_temp_spec(\"trace\").ok();\n\n error!(\"2-error message\");\n\n warn!(\"2-warning\");\n\n info!(\"2-info message\");\n\n debug!(\"2-debug message\");\n\n trace!(\"2-trace message\");\n", "file_path": "tests/test_reconfigure_methods.rs", "rank": 33, "score": 94476.33369401548 }, { "content": "fn test_parse_new_spec(logger: &mut LoggerHandle) {\n\n error!(\"1-error message\");\n\n warn!(\"1-warning\");\n\n info!(\"1-info message\");\n\n debug!(\"1-debug message - you must not see it!\");\n\n trace!(\"1-trace message - you must not see it!\");\n\n\n\n logger.parse_new_spec(\"error\").ok();\n\n error!(\"1-error message\");\n\n warn!(\"1-warning - you must not see it!\");\n\n info!(\"1-info message - you must not see it!\");\n\n debug!(\"1-debug message - you must not see it!\");\n\n trace!(\"1-trace message - you must not see it!\");\n\n\n\n logger.parse_new_spec(\"trace\").ok();\n\n error!(\"1-error message\");\n\n warn!(\"1-warning\");\n\n info!(\"1-info message\");\n\n debug!(\"1-debug message\");\n\n trace!(\"1-trace message\");\n\n\n\n logger.parse_new_spec(\"info\").ok();\n\n}\n\n\n", "file_path": "tests/test_reconfigure_methods.rs", "rank": 34, "score": 94476.33369401548 }, { "content": "/// A logline-formatter that produces log lines like\n\n/// <br>\n\n/// ```[2016-01-13 15:25:01.640870 +01:00] T[taskreader] INFO [src/foo/bar:26] Task successfully read from conf.json```\n\n/// <br>\n\n/// i.e. with timestamp, thread name and file location.\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n\npub fn with_thread(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n write!(\n\n w,\n\n \"[{}] T[{:?}] {} [{}:{}] {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\"),\n\n thread::current().name().unwrap_or(\"<unnamed>\"),\n\n record.level(),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n &record.args()\n\n )\n\n}\n\n\n\n/// A colored version of the logline-formatter `with_thread`.\n\n///\n\n/// See method [style](crate::style) if you want to influence coloring.\n\n///\n\n/// # Errors\n\n///\n\n/// See `std::write`\n", "file_path": "src/formats.rs", "rank": 35, "score": 93661.99141597384 }, { "content": "fn use_trace() {\n\n for _ in 1..100 {\n\n trace!(\"This is a trace message\");\n\n }\n\n}\n", "file_path": "benches/bench_reconfigurable.rs", "rank": 36, "score": 93180.33970470275 }, { "content": "fn use_error() {\n\n for _ in 1..100 {\n\n error!(\"This is an error message\");\n\n }\n\n}\n", "file_path": "benches/bench_standard.rs", "rank": 37, "score": 93180.33970470275 }, { "content": "fn use_error() {\n\n for _ in 1..100 {\n\n error!(\"This is an error message\");\n\n }\n\n}\n", "file_path": "benches/bench_reconfigurable.rs", "rank": 38, "score": 93180.33970470275 }, { "content": "fn use_trace() {\n\n for _ in 1..100 {\n\n trace!(\"This is a trace message\");\n\n }\n\n}\n", "file_path": "benches/bench_standard.rs", "rank": 39, "score": 93180.33970470275 }, { "content": "fn parse_err(\n\n errors: String,\n\n logspec: LogSpecification,\n\n) -> Result<LogSpecification, FlexiLoggerError> {\n\n Err(FlexiLoggerError::Parse(errors, logspec))\n\n}\n\n\n", "file_path": "src/log_specification.rs", "rank": 40, "score": 93171.02744558528 }, { "content": "pub fn my_format(\n\n w: &mut dyn std::io::Write,\n\n _now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \"{} [{}] {}\",\n\n level,\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n record.args()\n\n )\n\n}\n", "file_path": "tests/test_recursion.rs", "rank": 41, "score": 90906.72239181834 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"colors\")))]\n\n#[cfg(feature = \"colors\")]\n\npub fn colored_with_thread(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \"[{}] T[{:?}] {} [{}:{}] {}\",\n\n style(level).paint(now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\").to_string()),\n\n style(level).paint(thread::current().name().unwrap_or(\"<unnamed>\")),\n\n style(level).paint(level.to_string()),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n style(level).paint(&record.args().to_string())\n\n )\n\n}\n\n\n\n/// Helper function that is used in the provided coloring format functions to apply\n\n/// colors based on the log level and the effective color palette.\n\n///\n\n/// See [`Logger::set_palette`](crate::Logger::set_palette) if you want to\n\n/// modify the color palette.\n", "file_path": "src/formats.rs", "rank": 42, "score": 90906.72239181834 }, { "content": "#[test]\n\nfn you_must_not_see_anything() {\n\n Logger::try_with_str(\"trace\")\n\n .unwrap()\n\n .do_not_log()\n\n .start()\n\n .unwrap();\n\n\n\n error!(\"This is an error message - you must not see it!\");\n\n warn!(\"This is a warning - you must not see it!\");\n\n info!(\"This is an info message - you must not see it!\");\n\n debug!(\"This is a debug message - you must not see it!\");\n\n trace!(\"This is a trace message - you must not see it!\");\n\n}\n", "file_path": "tests/test_no_logger.rs", "rank": 43, "score": 90637.62670763524 }, { "content": "fn wait_for_workers_to_close(worker_handles: Vec<JoinHandle<u8>>) {\n\n for worker_handle in worker_handles {\n\n worker_handle\n\n .join()\n\n .unwrap_or_else(|e| panic!(\"Joining worker thread failed: {:?}\", e));\n\n }\n\n trace!(\"All worker threads joined.\");\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_numbers.rs", "rank": 44, "score": 88472.41638348757 }, { "content": "fn wait_for_workers_to_close(worker_handles: Vec<JoinHandle<u8>>) {\n\n for worker_handle in worker_handles {\n\n worker_handle\n\n .join()\n\n .unwrap_or_else(|e| panic!(\"Joining worker thread failed: {:?}\", e));\n\n }\n\n trace!(\"All worker threads joined.\");\n\n}\n", "file_path": "tests/test_multi_threaded_stderr.rs", "rank": 45, "score": 88472.41638348757 }, { "content": "fn wait_for_workers_to_close(worker_handles: Vec<JoinHandle<u8>>) {\n\n for worker_handle in worker_handles {\n\n worker_handle\n\n .join()\n\n .unwrap_or_else(|e| panic!(\"Joining worker thread failed: {:?}\", e));\n\n }\n\n trace!(\"All worker threads joined.\");\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_dates.rs", "rank": 46, "score": 88472.41638348757 }, { "content": "#[cfg(feature = \"colors\")]\n\npub fn my_colored_format(\n\n w: &mut dyn std::io::Write,\n\n _now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n let style = ansi_term::Style::new().fg(ansi_term::Color::Fixed(165));\n\n write!(\n\n w,\n\n \"{} [{}] {}\",\n\n style.paint(level.to_string()),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n style.paint(record.args().to_string())\n\n )\n\n}\n", "file_path": "tests/test_recursion.rs", "rank": 47, "score": 88352.21111046892 }, { "content": "fn write_log_lines() {\n\n // Fill first three files by size\n\n trace!(\"{}\", 'a');\n\n trace!(\"{}\", 'b');\n\n trace!(\"{}\", 'c');\n\n\n\n trace!(\"{}\", 'd');\n\n trace!(\"{}\", 'e');\n\n trace!(\"{}\", 'f');\n\n\n\n trace!(\"{}\", 'g');\n\n trace!(\"{}\", 'h');\n\n trace!(\"{}\", 'i');\n\n\n\n trace!(\"{}\", 'j');\n\n\n\n // now wait to enforce a rotation with a smaller file\n\n std::thread::sleep(std::time::Duration::from_secs(2));\n\n trace!(\"{}\", 'k');\n\n\n", "file_path": "tests/test_age_or_size.rs", "rank": 48, "score": 88093.78157239668 }, { "content": "#[test]\n\nfn test_write_modes() {\n\n if let Some(value) = test_utils::dispatch(COUNT) {\n\n work(value)\n\n }\n\n}\n\n\n", "file_path": "tests/test_file_writer.rs", "rank": 49, "score": 88093.78157239668 }, { "content": "fn custom_format(\n\n writer: &mut dyn std::io::Write,\n\n _now: &mut DeferredNow,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n // Only write the message and the level, without the module\n\n write!(writer, \"{}: {}\", record.level(), &record.args())\n\n}\n", "file_path": "tests/test_custom_log_writer.rs", "rank": 50, "score": 88058.7365309484 }, { "content": "fn wait_for_workers_to_close(worker_handles: Vec<JoinHandle<u8>>) {\n\n for worker_handle in worker_handles {\n\n worker_handle\n\n .join()\n\n .unwrap_or_else(|e| panic!(\"Joining worker thread failed: {:?}\", e));\n\n }\n\n trace!(\"All worker threads joined.\");\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_numbers_dedup.rs", "rank": 51, "score": 87009.44320499299 }, { "content": "// Starts given number of worker threads and lets each execute `do_work`\n\nfn start_worker_threads(no_of_workers: usize) -> Vec<JoinHandle<u8>> {\n\n let mut worker_handles: Vec<JoinHandle<u8>> = Vec::with_capacity(no_of_workers);\n\n trace!(\"Starting {} worker threads\", no_of_workers);\n\n for thread_number in 0..no_of_workers {\n\n trace!(\"Starting thread {}\", thread_number);\n\n worker_handles.push(\n\n std::thread::Builder::new()\n\n .name(thread_number.to_string())\n\n .spawn(move || {\n\n do_work(thread_number);\n\n 0 as u8\n\n })\n\n .unwrap(),\n\n );\n\n }\n\n trace!(\"All {} worker threads started.\", worker_handles.len());\n\n worker_handles\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_dates.rs", "rank": 52, "score": 86027.50675742055 }, { "content": "// Starts given number of worker threads and lets each execute `do_work`\n\nfn start_worker_threads(no_of_workers: usize) -> Vec<JoinHandle<u8>> {\n\n let mut worker_handles: Vec<JoinHandle<u8>> = Vec::with_capacity(no_of_workers);\n\n trace!(\"Starting {} worker threads\", no_of_workers);\n\n for thread_number in 0..no_of_workers {\n\n trace!(\"Starting thread {}\", thread_number);\n\n worker_handles.push(\n\n std::thread::Builder::new()\n\n .name(thread_number.to_string())\n\n .spawn(move || {\n\n do_work(thread_number);\n\n 0 as u8\n\n })\n\n .unwrap(),\n\n );\n\n }\n\n trace!(\"All {} worker threads started.\", worker_handles.len());\n\n worker_handles\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_numbers.rs", "rank": 53, "score": 86027.50675742055 }, { "content": "// Starts given number of worker threads and lets each execute `do_work`\n\nfn start_worker_threads(no_of_workers: usize) -> Vec<JoinHandle<u8>> {\n\n let mut worker_handles: Vec<JoinHandle<u8>> = Vec::with_capacity(no_of_workers);\n\n trace!(\"Starting {} worker threads\", no_of_workers);\n\n for thread_number in 0..no_of_workers {\n\n trace!(\"Starting thread {}\", thread_number);\n\n worker_handles.push(\n\n thread::Builder::new()\n\n .name(thread_number.to_string())\n\n .spawn(move || {\n\n do_work(thread_number);\n\n 0 as u8\n\n })\n\n .unwrap(),\n\n );\n\n }\n\n trace!(\"All {} worker threads started.\", worker_handles.len());\n\n worker_handles\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_stderr.rs", "rank": 54, "score": 86027.50675742055 }, { "content": "// Starts given number of worker threads and lets each execute `do_work`\n\nfn start_worker_threads(no_of_workers: usize) -> Vec<JoinHandle<u8>> {\n\n let mut worker_handles: Vec<JoinHandle<u8>> = Vec::with_capacity(no_of_workers);\n\n trace!(\"Starting {} worker threads\", no_of_workers);\n\n for thread_number in 0..no_of_workers {\n\n trace!(\"Starting thread {}\", thread_number);\n\n worker_handles.push(\n\n std::thread::Builder::new()\n\n .name(thread_number.to_string())\n\n .spawn(move || {\n\n do_work(thread_number);\n\n 0 as u8\n\n })\n\n .unwrap(),\n\n );\n\n }\n\n trace!(\"All {} worker threads started.\", worker_handles.len());\n\n worker_handles\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_numbers_dedup.rs", "rank": 55, "score": 84564.53357892597 }, { "content": "pub fn test_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> std::io::Result<()> {\n\n write!(\n\n w,\n\n \"XXXXX [{}] T[{:?}] {} [{}:{}] {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\"),\n\n std::thread::current().name().unwrap_or(\"<unnamed>\"),\n\n record.level(),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n &record.args()\n\n )\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_dates.rs", "rank": 56, "score": 83747.01412037932 }, { "content": "pub fn test_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> std::io::Result<()> {\n\n write!(\n\n w,\n\n \"XXXXX [{}] T[{:?}] {} [{}:{}] {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\"),\n\n std::thread::current().name().unwrap_or(\"<unnamed>\"),\n\n record.level(),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n &record.args()\n\n )\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_numbers.rs", "rank": 57, "score": 83747.01412037932 }, { "content": "fn list_of_files(pattern: &str) -> std::vec::IntoIter<PathBuf> {\n\n let mut log_files: Vec<PathBuf> = glob::glob(pattern)\n\n .unwrap(/* failure should be impossible */)\n\n .filter_map(Result::ok)\n\n .collect();\n\n log_files.reverse();\n\n log_files.into_iter()\n\n}\n\n\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 58, "score": 82511.55606481692 }, { "content": "fn remove_or_compress_too_old_logfiles(\n\n o_cleanup_thread_handle: &Option<CleanupThreadHandle>,\n\n cleanup_config: &Cleanup,\n\n file_spec: &FileSpec,\n\n) -> Result<(), std::io::Error> {\n\n o_cleanup_thread_handle.as_ref().map_or_else(\n\n || remove_or_compress_too_old_logfiles_impl(cleanup_config, file_spec),\n\n |cleanup_thread_handle| {\n\n cleanup_thread_handle\n\n .sender\n\n .send(MessageToCleanupThread::Act)\n\n .ok();\n\n Ok(())\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 59, "score": 81970.96880370994 }, { "content": "pub fn test_format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut DeferredNow,\n\n record: &Record,\n\n) -> std::io::Result<()> {\n\n write!(\n\n w,\n\n \"XXXXX [{}] T[{:?}] {} [{}:{}] {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S%.6f %:z\"),\n\n std::thread::current().name().unwrap_or(\"<unnamed>\"),\n\n record.level(),\n\n record.file().unwrap_or(\"<unnamed>\"),\n\n record.line().unwrap_or(0),\n\n &record.args()\n\n )\n\n}\n\n\n\n/// A helper to skip duplicated consecutive log lines.\n\npub struct DedupWriter {\n\n deduper: Mutex<Deduper>,\n", "file_path": "tests/test_multi_threaded_numbers_dedup.rs", "rank": 60, "score": 81664.25944327022 }, { "content": "#[allow(clippy::missing_panics_doc)]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"colors\")))]\n\n#[cfg(feature = \"colors\")]\n\n#[must_use]\n\npub fn style(level: log::Level) -> Style {\n\n let palette = &*(PALETTE.read().unwrap());\n\n match level {\n\n log::Level::Error => palette.error,\n\n log::Level::Warn => palette.warn,\n\n log::Level::Info => palette.info,\n\n log::Level::Debug => palette.debug,\n\n log::Level::Trace => palette.trace,\n\n }\n\n}\n\n\n\n#[cfg(feature = \"colors\")]\n\nlazy_static::lazy_static! {\n\n static ref PALETTE: std::sync::RwLock<Palette> = std::sync::RwLock::new(Palette::default());\n\n}\n\n\n\n// Overwrites the default PALETTE value either from the environment, if set,\n\n// or from the parameter, if filled.\n\n// Returns an error if parsing failed.\n\n#[cfg(feature = \"colors\")]\n", "file_path": "src/formats.rs", "rank": 61, "score": 78433.39690482557 }, { "content": "#[cfg(not(any(target_os = \"windows\", target_os = \"linux\")))]\n\nfn try_get_creation_date(path: &Path) -> Result<DateTime<Local>, FlexiLoggerError> {\n\n Ok(std::fs::metadata(path)?.created()?.into())\n\n}\n\n\n\nmod platform {\n\n #[cfg(target_os = \"linux\")]\n\n use crate::util::{eprint_err, ERRCODE};\n\n use std::path::Path;\n\n\n\n pub fn create_symlink_if_possible(link: &Path, path: &Path) {\n\n linux_create_symlink(link, path);\n\n }\n\n\n\n #[cfg(target_os = \"linux\")]\n\n fn linux_create_symlink(link: &Path, logfile: &Path) {\n\n if std::fs::symlink_metadata(link).is_ok() {\n\n // remove old symlink before creating a new one\n\n if let Err(e) = std::fs::remove_file(link) {\n\n eprint_err(ERRCODE::Symlink, \"cannot delete symlink to log file\", e);\n\n }\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 62, "score": 77569.19363365657 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum DedupAction {\n\n /// The record should be allowed and logged normally.\n\n Allow,\n\n /// The record is the last consecutive duplicate to be allowed.\n\n ///\n\n /// Any following duplicates will be skipped until a different event is\n\n /// received (or the duplicates count overflows).\n\n AllowLastOfLeeway(usize),\n\n /// The record should be allowed, the last `N` records were skipped as\n\n /// consecutive duplicates.\n\n AllowAfterSkipped(usize),\n\n /// The record should be skipped because no more consecutive duplicates\n\n /// are allowed.\n\n Skip,\n\n}\n\n\n\nimpl Deduper {\n\n // Constructs a new [`Deduper`] that will skip duplicated entries after\n\n // some record has been received for the consecutive times specified by\n\n // `leeway`.\n", "file_path": "examples/dedup.rs", "rank": 63, "score": 67008.33910394204 }, { "content": "enum LogTarget {\n\n StdErr,\n\n StdOut,\n\n Multi(bool, Option<Box<dyn LogWriter>>),\n\n}\n\n\n\n/// Create a Logger instance and define how to access the (initial)\n\n/// loglevel-specification.\n\nimpl Logger {\n\n /// Creates a Logger that you provide with an explicit [`LogSpecification`].\n\n #[must_use]\n\n pub fn with(logspec: LogSpecification) -> Self {\n\n Self::from_spec_and_errs(logspec)\n\n }\n\n\n\n /// Creates a Logger that reads the [`LogSpecification`] from a `String` or `&str`.\n\n /// See [`LogSpecification`] for the syntax.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/logger.rs", "rank": 64, "score": 67003.7783851701 }, { "content": "#[derive(Debug, Clone, Eq, PartialEq)]\n\nenum TimestampCfg {\n\n Default,\n\n Yes,\n\n No,\n\n}\n\nimpl TimestampCfg {\n\n fn get_timestamp(&self) -> Option<String> {\n\n match self {\n\n Self::Default | Self::Yes => {\n\n Some(Local::now().format(\"_%Y-%m-%d_%H-%M-%S\").to_string())\n\n }\n\n Self::No => None,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::FileSpec;\n\n use std::path::{Path, PathBuf};\n", "file_path": "src/file_spec.rs", "rank": 65, "score": 65618.35470555852 }, { "content": "enum OutWriter {\n\n Unbuffered(std::io::Stdout),\n\n Buffered(Mutex<BufWriter<std::io::Stdout>>),\n\n #[cfg(feature = \"async\")]\n\n Async(AsyncHandle),\n\n}\n", "file_path": "src/primary_writer/stdout_writer.rs", "rank": 66, "score": 64313.00763556293 }, { "content": "enum Inner {\n\n Initial(Option<RotationConfig>, bool),\n\n Active(Option<RotationState>, Box<dyn Write + Send>),\n\n}\n\nimpl std::fmt::Debug for Inner {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n match self {\n\n Self::Initial(o_rot, b) => f.write_fmt(format_args!(\"Initial({:?}, {}) \", o_rot, b)),\n\n Self::Active(o_rot, _) => {\n\n f.write_fmt(format_args!(\"Active({:?}, <some-writer>) \", o_rot,))\n\n }\n\n }\n\n }\n\n}\n\n\n\n// The mutable state of a FileLogWriter.\n\n#[derive(Debug)]\n\npub(crate) struct State {\n\n config: Config,\n\n inner: Inner,\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 67, "score": 63104.80673461998 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum IdxState {\n\n // We rotate to numbered files, and no rotated numbered file exists yet\n\n Start,\n\n // highest index of rotated numbered files\n\n Idx(u32),\n\n}\n\n\n\n// Created_at is needed both for\n\n// is_rotation_necessary() -> if Criterion::Age -> NamingState::CreatedAt\n\n// and rotate_to_date() -> if Naming::Timestamps -> RollState::Age\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 68, "score": 61985.22609842698 }, { "content": "#[derive(Debug)]\n\nenum RollState {\n\n Size(u64, u64), // max_size, current_size\n\n Age(Age),\n\n AgeOrSize(Age, u64, u64), // age, max_size, current_size\n\n}\n\n\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 69, "score": 61980.68208444734 }, { "content": "#[derive(Debug)]\n\nenum NamingState {\n\n CreatedAt,\n\n IdxState(IdxState),\n\n}\n\n\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 70, "score": 61980.68208444734 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum DedupAction {\n\n /// The record should be allowed and logged normally.\n\n Allow,\n\n /// The record is the last consecutive duplicate to be allowed.\n\n ///\n\n /// Any following duplicates will be skipped until a different event is\n\n /// received (or the duplicates count overflows).\n\n AllowLastOfLeeway(usize),\n\n /// The record should be allowed, the last `N` records were skipped as\n\n /// consecutive duplicates.\n\n AllowAfterSkipped(usize),\n\n /// The record should be skipped because no more consecutive duplicates\n\n /// are allowed.\n\n Skip,\n\n}\n\n\n\nimpl Deduper {\n\n // Constructs a new [`Deduper`] that will skip duplicated entries after\n\n // some record has been received for the consecutive times specified by\n\n // `leeway`.\n", "file_path": "tests/test_multi_threaded_numbers_dedup.rs", "rank": 71, "score": 61980.597598638895 }, { "content": "enum MessageToCleanupThread {\n\n Act,\n\n Die,\n\n}\n", "file_path": "src/writers/file_log_writer/state.rs", "rank": 72, "score": 60919.11428086247 }, { "content": "fn main() {\n\n #[cfg(colors)]\n\n let format = flexi_logger::colored_detailed_format;\n\n #[cfg(not(colors))]\n\n let format = flexi_logger::detailed_format;\n\n\n\n flexi_logger::Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .format(format)\n\n .log_to_stdout()\n\n .filter(Box::new(DedupWriter::with_leeway(\n\n std::num::NonZeroUsize::new(2).unwrap(),\n\n )))\n\n .start()\n\n .unwrap();\n\n\n\n for i in 0..10 {\n\n log::info!(\"{}\", if i == 5 { \"bar\" } else { \"foo\" });\n\n }\n\n\n", "file_path": "examples/dedup.rs", "rank": 73, "score": 59095.14830291443 }, { "content": "fn main() {\n\n println!(\"Qualify flexi_logger\");\n\n\n\n // format\n\n run_command!(\"cargo\", \"fmt\");\n\n\n\n // Build in important variants\n\n std::fs::remove_file(\"Cargo.lock\").ok();\n\n run_command!(\"cargo\", \"+1.46.0\", \"build\", \"--no-default-features\");\n\n run_command!(\"cargo\", \"+1.46.0\", \"build\", \"--all-features\");\n\n\n\n std::fs::remove_file(\"Cargo.lock\").ok();\n\n run_command!(\"cargo\", \"build\");\n\n run_command!(\"cargo\", \"build\", \"--no-default-features\");\n\n run_command!(\"cargo\", \"build\", \"--no-default-features\", \"--features=atty\");\n\n run_command!(\"cargo\", \"build\", \"--all-features\");\n\n run_command!(\"cargo\", \"build\", \"--release\");\n\n run_command!(\"cargo\", \"build\", \"--release\", \"--all-features\");\n\n\n\n // Clippy in important variants\n", "file_path": "scripts/qualify.rs", "rank": 74, "score": 59095.14830291443 }, { "content": "fn main() {\n\n for pattern in &[\n\n \"./*.log\",\n\n \"./*.alerts\",\n\n \"./*.seclog\",\n\n \"./*logspec.toml\",\n\n \"./log_files/**/*.log\",\n\n \"./log_files/**/*.csv\",\n\n \"./log_files/**/*.zip\",\n\n \"./log_files/**/*.gz\",\n\n \"./test_spec/*.toml\",\n\n ] {\n\n for globresult in glob::glob(pattern).unwrap() {\n\n match globresult {\n\n Err(e) => eprintln!(\"Evaluating pattern {:?} produced error {}\", pattern, e),\n\n Ok(pathbuf) => {\n\n std::fs::remove_file(&pathbuf).unwrap();\n\n }\n\n }\n\n }\n", "file_path": "scripts/cleanup.rs", "rank": 75, "score": 59095.14830291443 }, { "content": "fn main() {\n\n // --------------------------------\n\n println!(\"flexi_logger\");\n\n flexi_logger::Logger::try_with_str(\"off\")\n\n .unwrap()\n\n .format(flexi_logger::detailed_format)\n\n .start()\n\n .unwrap();\n\n // --------------------------------\n\n // $> Set-Item -Path Env:RUST_LOG -Value \"trace\"\n\n // println!(\"env_logger\");\n\n // env_logger::init();\n\n // $> Set-Item -Path Env:RUST_LOG\n\n // --------------------------------\n\n let mut structs = Vec::new();\n\n for i in 0..100 {\n\n structs.push(Struct {\n\n data: [i as u8; 32],\n\n });\n\n }\n", "file_path": "examples/performance.rs", "rank": 76, "score": 59095.14830291443 }, { "content": "fn main() {\n\n // Check in important variants\n\n run_command!(\"cargo\", \"check\");\n\n run_command!(\"cargo\", \"check\", \"--all-features\");\n\n run_command!(\"cargo\", \"check\", \"--no-default-features\");\n\n run_command!(\"cargo\", \"check\", \"--features= specfile\");\n\n run_command!(\"cargo\", \"check\", \"--features= trc\");\n\n\n\n // Clippy in important variants\n\n run_command!(\"cargo\", \"clippy\", \"--all-features\", \"--\", \"-D\", \"warnings\");\n\n\n\n // doc\n\n #[rustfmt::skip]\n\n run_command!(\"cargo\", \"+nightly\", \"doc\", \"--all-features\", \"--no-deps\", \"--open\");\n\n\n\n // say goodbye\n\n println!(\"\\n> checks are done :-) Looks like you're ready to do the full qualification?\");\n\n}\n", "file_path": "scripts/check.rs", "rank": 77, "score": 59095.14830291443 }, { "content": "fn main() {\n\n #[cfg(not(feature = \"colors\"))]\n\n println!(\"Feature color is switched off\");\n\n\n\n #[cfg(feature = \"colors\")]\n\n {\n\n use ansi_term::Color;\n\n use atty::Stream::{Stderr, Stdout};\n\n\n\n for i in 0..=255 {\n\n println!(\"{}: {}\", i, Color::Fixed(i).paint(i.to_string()));\n\n }\n\n\n\n println!(\"\");\n\n\n\n if atty::is(Stdout) {\n\n println!(\n\n \"Stdout is considered a tty - \\\n\n flexi_logger::AdaptiveFormat will use colors\",\n\n );\n", "file_path": "examples/colors.rs", "rank": 78, "score": 59095.14830291443 }, { "content": "fn main() {\n\n // Build in important variants\n\n run_command!(\"cargo\", \"build\", \"--release\", \"--all-features\");\n\n\n\n // Clippy in important variants\n\n run_command!(\"cargo\", \"clippy\", \"--all-features\", \"--\", \"-D\", \"warnings\");\n\n\n\n // Run tests in important variants\n\n run_command!(\"cargo\", \"test\", \"--release\", \"--all-features\");\n\n run_script(\"cleanup\");\n\n\n\n // doc\n\n run_command!(\"cargo\", \"doc\", \"--all-features\", \"--no-deps\", \"--open\");\n\n\n\n // say goodbye\n\n println!(\"\\n> fast qualification is done :-) Looks like you're ready to do the full qualification?\");\n\n}\n", "file_path": "scripts/qualify_fast.rs", "rank": 79, "score": 57630.28266000423 }, { "content": "#[test]\n\n#[cfg(feature = \"textfilter\")]\n\nfn test_textfilter() {\n\n use flexi_logger::{default_format, FileSpec, LogSpecification, Logger};\n\n use log::*;\n\n\n\n use std::env;\n\n use std::fs::File;\n\n use std::io::{BufRead, BufReader};\n\n use std::path::Path;\n\n\n\n let logspec = LogSpecification::parse(\"info/Hello\").unwrap();\n\n Logger::with(logspec)\n\n .format(default_format)\n\n .print_message()\n\n .log_to_file(FileSpec::default().suppress_timestamp())\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n info!(\"This is an info message\");\n", "file_path": "tests/test_textfilter.rs", "rank": 80, "score": 56272.25816097605 }, { "content": "#[test]\n\nfn test_mods() {\n\n Logger::try_with_str(\"trace\")\n\n .unwrap()\n\n .log_to_stdout()\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n info!(\"This is an info message\");\n\n debug!(\"This is a debug message\");\n\n trace!(\"This is a trace message\");\n\n}\n", "file_path": "tests/test_colors.rs", "rank": 81, "score": 56272.25816097605 }, { "content": "#[test]\n\nfn test_mods() {\n\n let handle = Logger::try_with_env_or_str(\n\n \"info, test_mods::mymod1=debug, test_mods::mymod2=error, test_mods::mymod1::mysubmod = off\",\n\n )\n\n .unwrap()\n\n .format(detailed_format)\n\n .log_to_file(FileSpec::default())\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n info!(\"This is an info message\");\n\n debug!(\"This is a debug message - you must not see it!\");\n\n trace!(\"This is a trace message - you must not see it!\");\n\n\n\n mymod1::test_traces();\n\n mymod2::test_traces();\n\n\n\n handle.validate_logs(&[\n", "file_path": "tests/test_mods.rs", "rank": 82, "score": 56272.25816097605 }, { "content": "#[test]\n\nfn test() {\n\n // more complex just to support validation:\n\n let (sec_writer, sec_handle) = SecWriter::new();\n\n let mut logger = Logger::try_with_str(\"info, fantasy = trace\")\n\n .unwrap()\n\n .format(detailed_format)\n\n .print_message()\n\n .log_to_file(FileSpec::default())\n\n .add_writer(\"Sec\", sec_writer)\n\n .add_writer(\"Alert\", alert_logger())\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n // Explicitly send logs to different loggers\n\n error!(target : \"{Sec}\", \"This is a security-relevant error message\");\n\n error!(target : \"{Sec,Alert}\", \"This is a security-relevant alert message\");\n\n error!(target : \"{Sec,Alert,_Default}\", \"This is a security-relevant alert and log message\");\n\n error!(target : \"{Alert}\", \"This is an alert\");\n\n\n\n // Nicer: use explicit macros\n", "file_path": "tests/test_multi_logger.rs", "rank": 83, "score": 56272.25816097605 }, { "content": "#[test]\n\nfn test_mods_off() {\n\n let handle: LoggerHandle = Logger::try_with_env_or_str(\"info, test_mods_off::mymod1=off\")\n\n .unwrap()\n\n .format(detailed_format)\n\n .log_to_file(FileSpec::default())\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n mymod1::test_traces();\n\n info!(\"This is an info message\");\n\n debug!(\"This is a debug message - you must not see it!\");\n\n trace!(\"This is a trace message - you must not see it!\");\n\n\n\n handle.validate_logs(&[\n\n (\"ERROR\", \"test_mods\", \"error\"),\n\n (\"WARN\", \"test_mods\", \"warning\"),\n\n (\"INFO\", \"test_mods\", \"info\"),\n\n ]);\n", "file_path": "tests/test_mods_off.rs", "rank": 84, "score": 56272.25816097605 }, { "content": "#[test]\n\nfn test_recursion() {\n\n let logger = Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .format(detailed_format)\n\n .log_to_file(FileSpec::default())\n\n .duplicate_to_stderr(Duplicate::All)\n\n .duplicate_to_stdout(Duplicate::All)\n\n .print_message();\n\n #[cfg(feature = \"colors\")]\n\n let logger = logger.format_for_stderr(colored_detailed_format);\n\n #[cfg(not(feature = \"colors\"))]\n\n let logger = logger.format_for_stderr(detailed_format);\n\n #[cfg(feature = \"colors\")]\n\n let logger =\n\n logger.adaptive_format_for_stdout(AdaptiveFormat::Custom(my_format, my_colored_format));\n\n logger\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed because: {}\", e));\n\n\n\n let dummy = Dummy();\n\n\n\n for _ in 0..10 {\n\n error!(\"This is an error message for {}\", dummy);\n\n warn!(\"This is a warning for {}\", dummy);\n\n info!(\"This is an info message for {}\", dummy);\n\n debug!(\"This is a debug message for {}\", dummy);\n\n trace!(\"This is a trace message for {}\", dummy);\n\n }\n\n}\n\n\n", "file_path": "tests/test_recursion.rs", "rank": 85, "score": 56272.25816097605 }, { "content": "#[test]\n\nfn test_readme_deps() {\n\n assert_markdown_deps_updated!(\"README.md\");\n\n}\n", "file_path": "tests/version_numbers.rs", "rank": 86, "score": 55009.79725824269 }, { "content": "#[cfg(feature = \"specfile_without_notification\")]\n\npub trait LogSpecSubscriber: 'static + Send {\n\n /// Apply a new `LogSpecification`.\n\n ///\n\n /// # Errors\n\n fn set_new_spec(&mut self, new_spec: LogSpecification) -> Result<(), FlexiLoggerError>;\n\n\n\n /// Provide the current log spec.\n\n ///\n\n /// # Errors\n\n fn initial_spec(&self) -> Result<LogSpecification, FlexiLoggerError>;\n\n}\n\n#[cfg(feature = \"specfile_without_notification\")]\n\nimpl LogSpecSubscriber for LoggerHandle {\n\n fn set_new_spec(&mut self, new_spec: LogSpecification) -> Result<(), FlexiLoggerError> {\n\n let max_level = new_spec.max_level();\n\n self.spec\n\n .write()\n\n .map_err(|_| FlexiLoggerError::Poison)?\n\n .update_from(new_spec);\n\n self.reconfigure(max_level);\n\n Ok(())\n\n }\n\n\n\n fn initial_spec(&self) -> Result<LogSpecification, FlexiLoggerError> {\n\n Ok((*self.spec.read().map_err(|_e| FlexiLoggerError::Poison)?).clone())\n\n }\n\n}\n", "file_path": "src/logger_handle.rs", "rank": 87, "score": 54013.07334675144 }, { "content": "#[test]\n\nfn multi_threaded() {\n\n // we use a special log line format that starts with a special string so that it is easier to\n\n // verify that all log lines are written correctly\n\n\n\n let start = Local::now();\n\n let directory = define_directory();\n\n let mut reconf_handle = Logger::try_with_str(\"debug\")\n\n .unwrap()\n\n .log_to_file(FileSpec::default().directory(directory.clone()))\n\n .format(test_format)\n\n .create_symlink(\"link_to_mt_log\")\n\n .duplicate_to_stderr(Duplicate::Info)\n\n .rotate(\n\n Criterion::Age(Age::Minute),\n\n Naming::Timestamps,\n\n Cleanup::Never,\n\n )\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n info!(\n", "file_path": "tests/test_multi_threaded_dates.rs", "rank": 88, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn parse_errors_logspec() {\n\n match LogSpecification::parse(\"info, foo=bar, fuzz=debug\")\n\n .err()\n\n .unwrap()\n\n {\n\n FlexiLoggerError::Parse(_, logspec) => {\n\n assert_eq!(\n\n logspec.module_filters(),\n\n LogSpecification::parse(\"info, fuzz=debug\")\n\n .unwrap()\n\n .module_filters()\n\n );\n\n #[cfg(feature = \"textfilter\")]\n\n assert!(logspec.text_filter().is_none());\n\n }\n\n _ => panic!(\"Wrong error from parsing (1)\"),\n\n }\n\n\n\n match LogSpecification::parse(\"info, ene mene dubbedene\")\n\n .err()\n", "file_path": "tests/test_parse_errors.rs", "rank": 89, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn test_mods() {\n\n let handle: LoggerHandle = Logger::try_with_env_or_str(\n\n \"info, test_windows_line_ending::mymod1=debug, test_windows_line_ending::mymod2=error\",\n\n )\n\n .unwrap()\n\n .format(detailed_format)\n\n .log_to_file(FileSpec::default())\n\n .use_windows_line_ending()\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n info!(\"This is an info message\");\n\n debug!(\"This is a debug message - you must not see it!\");\n\n trace!(\"This is a trace message - you must not see it!\");\n\n\n\n mymod1::test_traces();\n\n mymod2::test_traces();\n\n\n", "file_path": "tests/test_windows_line_ending.rs", "rank": 90, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn test_reconfigure_methods() {\n\n let mut logger = Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .log_to_file(FileSpec::default())\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n test_parse_new_spec(&mut logger);\n\n test_push_new_spec(&mut logger);\n\n validate_logs(&mut logger);\n\n}\n\n\n", "file_path": "tests/test_reconfigure_methods.rs", "rank": 91, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn test_age_or_size() {\n\n let directory = define_directory();\n\n Logger::try_with_str(\"trace\")\n\n .unwrap()\n\n .log_to_file(FileSpec::default().directory(directory.clone()))\n\n .duplicate_to_stderr(Duplicate::Info)\n\n .rotate(\n\n Criterion::AgeOrSize(Age::Second, 80),\n\n Naming::Numbers,\n\n Cleanup::Never,\n\n )\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n // info!(\"test correct rotation by age or size\");\n\n\n\n write_log_lines();\n\n\n\n verify_logs(&directory);\n\n}\n\n\n", "file_path": "tests/test_age_or_size.rs", "rank": 92, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn parse_errors_logger() {\n\n let result = Logger::try_with_str(\"info, foo=baz\");\n\n assert!(result.is_err());\n\n let error = result.err().unwrap();\n\n println!(\"err: {}\", error);\n\n\n\n Logger::try_with_str(\"info, foo=debug\")\n\n .unwrap()\n\n .start()\n\n .unwrap();\n\n info!(\"logging works\");\n\n info!(\"logging works\");\n\n}\n", "file_path": "tests/test_parse_errors.rs", "rank": 93, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn multi_threaded() {\n\n let logger = Logger::try_with_str(\"debug\")\n\n .unwrap()\n\n .log_to_stderr()\n\n .write_mode(WriteMode::BufferAndFlushWith(\n\n 1024,\n\n std::time::Duration::from_millis(600),\n\n ))\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n info!(\"create a huge number of log lines with a considerable number of threads\");\n\n for i in 0..50 {\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n\n info!(\"********** check delay of this log line ({}) **********\", i);\n\n }\n\n let start = Local::now();\n\n\n\n let worker_handles = start_worker_threads(NO_OF_THREADS);\n\n\n\n wait_for_workers_to_close(worker_handles);\n\n\n\n let delta = Local::now().signed_duration_since(start).num_milliseconds();\n\n debug!(\n\n \"Task executed with {} threads in {}ms.\",\n\n NO_OF_THREADS, delta\n\n );\n\n\n\n logger.shutdown();\n\n}\n\n\n", "file_path": "tests/test_multi_threaded_stderr.rs", "rank": 94, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn multi_threaded() {\n\n // we use a special log line format that starts with a special string so that it is easier to\n\n // verify that all log lines are written correctly\n\n\n\n let start = Local::now();\n\n let directory = define_directory();\n\n let logger = Logger::try_with_str(\"debug\")\n\n .unwrap()\n\n .log_to_file(\n\n FileSpec::default()\n\n .basename(\"test_mtn\")\n\n .directory(directory.clone()),\n\n )\n\n .write_mode(WriteMode::BufferAndFlush)\n\n .format(test_format)\n\n .duplicate_to_stderr(Duplicate::Info)\n\n .rotate(\n\n Criterion::Size(ROTATE_OVER_SIZE),\n\n Naming::Numbers,\n\n Cleanup::Never,\n", "file_path": "tests/test_multi_threaded_numbers.rs", "rank": 95, "score": 53833.1556495964 }, { "content": "#[test]\n\nfn multi_threaded() {\n\n // we use a special log line format that starts with a special string so that it is easier to\n\n // verify that all log lines are written correctly\n\n\n\n let start = Local::now();\n\n let directory = define_directory();\n\n let logger = Logger::try_with_str(\"debug\")\n\n .unwrap()\n\n .log_to_file(\n\n FileSpec::default()\n\n .basename(\"test_mtn\")\n\n .directory(directory.clone()),\n\n )\n\n .write_mode(WriteMode::BufferAndFlush)\n\n .format(test_format)\n\n .duplicate_to_stderr(Duplicate::Info)\n\n .rotate(\n\n Criterion::Size(ROTATE_OVER_SIZE),\n\n Naming::Numbers,\n\n Cleanup::Never,\n", "file_path": "tests/test_multi_threaded_numbers_dedup.rs", "rank": 96, "score": 52733.87027689764 }, { "content": "fn define_directory() -> String {\n\n format!(\n\n \"./log_files/age_or_size/{}\",\n\n Local::now().format(\"%Y-%m-%d_%H-%M-%S\")\n\n )\n\n}\n\n\n", "file_path": "tests/test_age_or_size.rs", "rank": 97, "score": 51745.76896293091 }, { "content": "#[test]\n\nfn test_custom_log_writer() {\n\n if let Some(value) = test_utils::dispatch(COUNT) {\n\n work(value)\n\n }\n\n}\n\n\n", "file_path": "tests/test_custom_log_writer.rs", "rank": 98, "score": 51704.55544512038 }, { "content": "#[test]\n\nfn test_default_file_and_writer() {\n\n let w = FileLogWriter::builder(FileSpec::default().discriminant(\"bar\"))\n\n .format(detailed_format)\n\n .try_build()\n\n .unwrap();\n\n\n\n let handle = Logger::try_with_str(\"info\")\n\n .unwrap()\n\n .log_to_file_and_writer(FileSpec::default().discriminant(\"foo\"), Box::new(w))\n\n .format(detailed_format)\n\n .start()\n\n .unwrap_or_else(|e| panic!(\"Logger initialization failed with {}\", e));\n\n\n\n error!(\"This is an error message\");\n\n warn!(\"This is a warning\");\n\n info!(\"This is an info message\");\n\n debug!(\"This is a debug message - you must not see it!\");\n\n trace!(\"This is a trace message - you must not see it!\");\n\n\n\n handle.validate_logs(&[\n", "file_path": "tests/test_default_file_and_writer.rs", "rank": 99, "score": 51704.55544512038 } ]
Rust
src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs
ohno418/rust
395a09c3dafe0c7838c9ca41d2b47bb5e79a5b6d
use clippy_utils::diagnostics::span_lint_and_sugg; use if_chain::if_chain; use rustc_ast::ast::{BindingMode, Lifetime, Mutability, Param, PatKind, Path, TyKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::symbol::kw; use rustc_span::Span; declare_clippy_lint! { #[clippy::version = "1.47.0"] pub NEEDLESS_ARBITRARY_SELF_TYPE, complexity, "type of `self` parameter is already by default `Self`" } declare_lint_pass!(NeedlessArbitrarySelfType => [NEEDLESS_ARBITRARY_SELF_TYPE]); enum Mode { Ref(Option<Lifetime>), Value, } fn check_param_inner(cx: &EarlyContext<'_>, path: &Path, span: Span, binding_mode: &Mode, mutbl: Mutability) { if_chain! { if let [segment] = &path.segments[..]; if segment.ident.name == kw::SelfUpper; then { let mut applicability = Applicability::MachineApplicable; let self_param = match (binding_mode, mutbl) { (Mode::Ref(None), Mutability::Mut) => "&mut self".to_string(), (Mode::Ref(Some(lifetime)), Mutability::Mut) => { if lifetime.ident.span.from_expansion() { applicability = Applicability::HasPlaceholders; "&'_ mut self".to_string() } else { format!("&{} mut self", &lifetime.ident.name) } }, (Mode::Ref(None), Mutability::Not) => "&self".to_string(), (Mode::Ref(Some(lifetime)), Mutability::Not) => { if lifetime.ident.span.from_expansion() { applicability = Applicability::HasPlaceholders; "&'_ self".to_string() } else { format!("&{} self", &lifetime.ident.name) } }, (Mode::Value, Mutability::Mut) => "mut self".to_string(), (Mode::Value, Mutability::Not) => "self".to_string(), }; span_lint_and_sugg( cx, NEEDLESS_ARBITRARY_SELF_TYPE, span, "the type of the `self` parameter does not need to be arbitrary", "consider to change this parameter to", self_param, applicability, ) } } } impl EarlyLintPass for NeedlessArbitrarySelfType { fn check_param(&mut self, cx: &EarlyContext<'_>, p: &Param) { if !p.is_self() || p.span.from_expansion() { return; } match &p.ty.kind { TyKind::Path(None, path) => { if let PatKind::Ident(BindingMode::ByValue(mutbl), _, _) = p.pat.kind { check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Value, mutbl); } }, TyKind::Rptr(lifetime, mut_ty) => { if_chain! { if let TyKind::Path(None, path) = &mut_ty.ty.kind; if let PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, _) = p.pat.kind; then { check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Ref(*lifetime), mut_ty.mutbl); } } }, _ => {}, } } }
use clippy_utils::diagnostics::span_lint_and_sugg; use if_chain::if_chain; use rustc_ast::ast::{BindingMode, Lifetime, Mutability, Param, PatKind, Path, TyKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::symbol::kw; use rustc_span::Span; declare_clippy_lint! { #[clippy::version = "1.47.0"] pub NEEDLESS_ARBITRARY_SELF_TYPE, complexity, "type of `self` parameter is already by default `Self`" } declare_lint_pass!(NeedlessArbitrarySelfType => [NEEDLESS_ARBITRARY_SELF_TYPE]); enum Mode { Ref(Option<Lifetime>), Value, } fn check_param_inner(cx: &EarlyContext<'_>, path: &Path, span: Span, binding_mode: &Mode, mutbl: Mutability) { if_chain! { if let [segment] = &path.segments[..]; if segment.ident.name == kw::SelfUpper; then { let mut applicability = Applicability::MachineApplicable; let self_param = match (binding_mode, mutbl) { (Mode::Ref(None), Mutability::Mut) => "&mut self".to_string(), (Mode::Ref(Some(lifetime)), Mutability::Mut) => { if lifetime.ident.span.from_expansion() { applicability = Applicability::HasPlaceholders; "&'_ mut self".to_string() } else { format!("&{} mut self", &lifetime.ident.name) } }, (Mode::Ref(None), Mutability::Not) => "&self".to_string(), (Mode::Ref(Some(lifetime)), Mutability::Not) => { if lifetime.ident.span.from_expansion() { applicability = Applicability::HasPlaceholders; "&'_ self".to_string() } else { format!("&{} self", &lifetime.ident.name) } }, (Mode::Value, Mutability::Mut) => "mut self".to_string(), (Mode::Value, Mutability::Not) => "self
e of the `self` parameter does not need to be arbitrary", "consider to change this parameter to", self_param, applicability, ) } } } impl EarlyLintPass for NeedlessArbitrarySelfType { fn check_param(&mut self, cx: &EarlyContext<'_>, p: &Param) { if !p.is_self() || p.span.from_expansion() { return; } match &p.ty.kind { TyKind::Path(None, path) => { if let PatKind::Ident(BindingMode::ByValue(mutbl), _, _) = p.pat.kind { check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Value, mutbl); } }, TyKind::Rptr(lifetime, mut_ty) => { if_chain! { if let TyKind::Path(None, path) = &mut_ty.ty.kind; if let PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, _) = p.pat.kind; then { check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Ref(*lifetime), mut_ty.mutbl); } } }, _ => {}, } } }
".to_string(), }; span_lint_and_sugg( cx, NEEDLESS_ARBITRARY_SELF_TYPE, span, "the typ
random
[]
Rust
storage/aptosdb/src/pruner/mod.rs
JoshLind/aptos-core
a58a87106bde10eccb8954128cd31700b76e95c1
mod db_pruner; pub(crate) mod db_sub_pruner; pub(crate) mod event_store; mod ledger_store; pub(crate) mod state_store; pub(crate) mod transaction_store; pub mod utils; pub(crate) mod worker; use crate::metrics::APTOS_STORAGE_PRUNE_WINDOW; use aptos_config::config::StoragePrunerConfig; use aptos_infallible::Mutex; use crate::{EventStore, LedgerStore, TransactionStore}; use aptos_types::transaction::Version; use schemadb::DB; use std::{ sync::{ mpsc::{channel, Sender}, Arc, }, thread::JoinHandle, }; use worker::{Command, Worker}; #[derive(Debug)] pub(crate) struct Pruner { state_store_prune_window: Version, ledger_prune_window: Version, worker_thread: Option<JoinHandle<()>>, command_sender: Mutex<Sender<Command>>, #[allow(dead_code)] least_readable_version: Arc<Mutex<Vec<Version>>>, last_version_sent_to_pruners: Arc<Mutex<Version>>, pruning_batch_size: usize, latest_version: Arc<Mutex<Version>>, } #[cfg(test)] pub enum PrunerIndex { StateStorePrunerIndex, LedgerPrunerIndex, } impl Pruner { pub fn new( db: Arc<DB>, storage_pruner_config: StoragePrunerConfig, transaction_store: Arc<TransactionStore>, ledger_store: Arc<LedgerStore>, event_store: Arc<EventStore>, ) -> Self { let (command_sender, command_receiver) = channel(); let least_readable_version = Arc::new(Mutex::new(vec![0, 0, 0, 0, 0])); let worker_progress_clone = Arc::clone(&least_readable_version); APTOS_STORAGE_PRUNE_WINDOW.set( storage_pruner_config .state_store_prune_window .expect("State store pruner window is required") as i64, ); let worker = Worker::new( db, transaction_store, ledger_store, event_store, command_receiver, least_readable_version, storage_pruner_config.pruning_batch_size as u64, ); let worker_thread = std::thread::Builder::new() .name("aptosdb_pruner".into()) .spawn(move || worker.work()) .expect("Creating pruner thread should succeed."); Self { state_store_prune_window: storage_pruner_config .state_store_prune_window .expect("State store prune window must be specified"), ledger_prune_window: storage_pruner_config .ledger_prune_window .expect("Default prune window must be specified"), worker_thread: Some(worker_thread), command_sender: Mutex::new(command_sender), least_readable_version: worker_progress_clone, last_version_sent_to_pruners: Arc::new(Mutex::new(0)), pruning_batch_size: storage_pruner_config.pruning_batch_size, latest_version: Arc::new(Mutex::new(0)), } } pub fn get_state_store_pruner_window(&self) -> Version { self.state_store_prune_window } pub fn maybe_wake_pruner(&self, latest_version: Version) { *self.latest_version.lock() = latest_version; if latest_version >= *self.last_version_sent_to_pruners.lock() + self.pruning_batch_size as u64 { self.wake_pruner(latest_version) } } fn wake_pruner(&self, latest_version: Version) { let least_readable_state_store_version = latest_version.saturating_sub(self.state_store_prune_window); let least_readable_ledger_version = latest_version.saturating_sub(self.ledger_prune_window); self.command_sender .lock() .send(Command::Prune { target_db_versions: vec![ least_readable_state_store_version, least_readable_ledger_version, ], }) .expect("Receiver should not destruct prematurely."); } #[cfg(test)] pub fn wake_and_wait( &self, latest_version: Version, pruner_index: usize, ) -> anyhow::Result<()> { use std::{ thread::sleep, time::{Duration, Instant}, }; self.maybe_wake_pruner(latest_version); if latest_version > self.state_store_prune_window || latest_version > self.ledger_prune_window { let least_readable_state_store_version = latest_version - self.state_store_prune_window; const TIMEOUT: Duration = Duration::from_secs(10); let end = Instant::now() + TIMEOUT; while Instant::now() < end { if *self .least_readable_version .lock() .get(pruner_index) .unwrap() >= least_readable_state_store_version { return Ok(()); } sleep(Duration::from_millis(1)); } anyhow::bail!("Timeout waiting for pruner worker."); } Ok(()) } } impl Drop for Pruner { fn drop(&mut self) { self.command_sender .lock() .send(Command::Quit) .expect("Receiver should not destruct."); self.worker_thread .take() .expect("Worker thread must exist.") .join() .expect("Worker thread should join peacefully."); } }
mod db_pruner; pub(crate) mod db_sub_pruner; pub(crate) mod event_store; mod ledger_store; pub(crate) mod state_store; pub(crate) mod transaction_store; pub mod utils; pub(crate) mod worker; use crate::metrics::APTOS_STORAGE_PRUNE_WINDOW; use aptos_config::config::StoragePrunerConfig; use aptos_infallible::Mutex; use crate::{EventStore, LedgerStore, TransactionStore}; use aptos_types::transaction::Version; use schemadb::DB; use std::{ sync::{ mpsc::{channel, Sender}, Arc, }, thread::JoinHandle, }; use worker::{Command, Worker}; #[derive(Debug)] pub(crate) struct Pruner { state_store_prune_window: Version, ledger_prune_window: Version, worker_thread: Option<JoinHandle<()>>, command_sender: Mutex<Sender<Command>>, #[allow(dead_code)] least_readable_version: Arc<Mutex<Vec<Version>>>, last_version_sent_to_pruners: Arc<Mutex<Version>>, pruning_batch_size: usize, latest_version: Arc<Mutex<Version>>, } #[cfg(test)] pub enum PrunerIndex { StateStorePrunerIndex, LedgerPrunerIndex, } impl Pruner { pub fn new( db: Arc<DB>, storage_pruner_config: StoragePrunerConfig, transaction_store: Arc<TransactionStore>, ledger_store: Arc<LedgerStore>, event_store: Arc<EventStore>, ) -> Self { let (command_sender, command_receiver) = channel(); let least_readable_version = Arc::new(Mutex::new(vec![0, 0, 0, 0, 0])); let worker_progress_clone = Arc::clone(&least_readable_version); APTOS_STORAGE_PRUNE_WINDOW.set( storage_pruner_config .state_store_prune_window .expect("State store pruner window is required") as i64, ); let worker = Worker::new( db, transaction_store, ledger_store, event_store, command_receiver, least_readable_version, storage_pruner_config.pruning_batch_size as u64, ); let worker_thread = std::thread::Builder::new() .name("aptosdb_pruner".into()) .spawn(move || worker.work()) .expect("Creating pruner thread should succeed."); Self { state_store_prune_window: storage_pruner_config .state_store_prune_window .expect("State store prune window must be specified"), ledger_prune_window: storage_pruner_config .ledger_prune_window .expect("Default prune window must be specified"), worker_thread: Some(worker_thread), command_sender: Mutex::new(command_sender), least_readable_version: worker_progress_clone, last_version_sent_to_pruners: Arc::new(Mutex::new(0)), pruning_batch_size: storage_pruner_config.pruning_batch_size, latest_version: Arc::new(Mutex::new(0)), } } pub fn get_state_store_pruner_window(&self) -> Version { self.state_store_prune_window } pub fn maybe_wake_pruner(&self, latest_version: Version) { *self.latest_version.lock() = latest_version; if latest_version >= *self.last_version_sent_to_pruners.lock() + self.pruning_batch_size as u64 { self.wake_pruner(latest_version) } } fn wake_pruner(&self, latest_version: Version) { let least_readable_state_store_version = latest_version.saturating_sub(self.state_store_prune_window); let least_readable_ledger_version = latest_version.saturating_sub(self.ledger_prune_window); self.command_sender .lock() .send(Command::Prune { target_db_versions: vec![ least_readable_state_store_version, least_readable_ledger_version, ], }) .expect("Receiver should not destruct prematurely."); } #[cfg(test)] pub fn wake_and_wait( &self, latest_version: Version, pruner_index: usize, ) -> anyhow::Result<()> { use std::{ thread::sleep, time::{Duration, Instant}, }; self.maybe_wake_pruner(latest_version); if latest_version > self.state_store_prune_window || latest_version > self.ledger_prune_window {
} impl Drop for Pruner { fn drop(&mut self) { self.command_sender .lock() .send(Command::Quit) .expect("Receiver should not destruct."); self.worker_thread .take() .expect("Worker thread must exist.") .join() .expect("Worker thread should join peacefully."); } }
let least_readable_state_store_version = latest_version - self.state_store_prune_window; const TIMEOUT: Duration = Duration::from_secs(10); let end = Instant::now() + TIMEOUT; while Instant::now() < end { if *self .least_readable_version .lock() .get(pruner_index) .unwrap() >= least_readable_state_store_version { return Ok(()); } sleep(Duration::from_millis(1)); } anyhow::bail!("Timeout waiting for pruner worker."); } Ok(()) }
function_block-function_prefix_line
[ { "content": "/// Fetches the latest synced version from the specified storage\n\npub fn fetch_latest_synced_version(storage: Arc<dyn DbReader>) -> Result<Version, Error> {\n\n let latest_transaction_info =\n\n storage\n\n .get_latest_transaction_info_option()\n\n .map_err(|error| {\n\n Error::StorageError(format!(\n\n \"Failed to get the latest transaction info from storage: {:?}\",\n\n error\n\n ))\n\n })?;\n\n latest_transaction_info\n\n .ok_or_else(|| Error::StorageError(\"Latest transaction info is missing!\".into()))\n\n .map(|(latest_synced_version, _)| latest_synced_version)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/utils.rs", "rank": 0, "score": 435773.0580160788 }, { "content": "pub fn build_empty_tree() -> Arc<BlockStore> {\n\n let (initial_data, storage) = EmptyStorage::start_for_testing();\n\n Arc::new(BlockStore::new(\n\n storage,\n\n initial_data,\n\n Arc::new(EmptyStateComputer),\n\n 10, // max pruned blocks in mem\n\n Arc::new(SimulatedTimeService::new()),\n\n 10,\n\n ))\n\n}\n\n\n\npub struct TreeInserter {\n\n signer: ValidatorSigner,\n\n block_store: Arc<BlockStore>,\n\n}\n\n\n\nimpl TreeInserter {\n\n pub fn default() -> Self {\n\n Self::new(ValidatorSigner::random(None))\n", "file_path": "consensus/src/test_utils/mod.rs", "rank": 1, "score": 408985.7130246173 }, { "content": "pub fn new() -> (Sender, Receiver) {\n\n aptos_channel::new(QueueStyle::LIFO, 1, None)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{peer::DisconnectReason, transport::ConnectionMetadata};\n\n use aptos_config::network_id::NetworkContext;\n\n use futures::{executor::block_on, future::FutureExt, stream::StreamExt};\n\n\n\n fn send_new_peer(sender: &mut Sender, connection: ConnectionMetadata) {\n\n let peer_id = connection.remote_peer_id;\n\n let notif =\n\n ConnectionNotification::NewPeer(connection, NetworkContext::mock_with_peer_id(peer_id));\n\n sender.push(peer_id, notif).unwrap()\n\n }\n\n\n\n fn send_lost_peer(\n\n sender: &mut Sender,\n", "file_path": "network/src/peer_manager/conn_notifs_channel.rs", "rank": 2, "score": 404901.42035774165 }, { "content": "pub fn prune_state_store(\n\n db: Arc<DB>,\n\n least_readable_version: Version,\n\n target_version: Version,\n\n max_versions: usize,\n\n) -> anyhow::Result<Version> {\n\n let indices =\n\n StaleNodeIndicesByVersionIterator::new(&db, least_readable_version, target_version)?\n\n .take(max_versions) // Iterator<Item = Result<Vec<StaleNodeIndex>>>\n\n .collect::<anyhow::Result<Vec<_>>>()? // now Vec<Vec<StaleNodeIndex>>\n\n .into_iter()\n\n .flatten()\n\n .collect::<Vec<_>>();\n\n\n\n if indices.is_empty() {\n\n Ok(least_readable_version)\n\n } else {\n\n let _timer = APTOS_STORAGE_OTHER_TIMERS_SECONDS\n\n .with_label_values(&[\"pruner_commit\"])\n\n .start_timer();\n\n let new_least_readable_version = indices.last().expect(\"Should exist.\").stale_since_version;\n\n let mut batch = SchemaBatch::new();\n\n indices\n\n .into_iter()\n\n .try_for_each(|index| batch.delete::<JellyfishMerkleNodeSchema>(&index.node_key))?;\n\n db.write_schemas(batch)?;\n\n Ok(new_least_readable_version)\n\n }\n\n}\n\n\n", "file_path": "storage/aptosdb/src/pruner/state_store/mod.rs", "rank": 3, "score": 404047.18289719697 }, { "content": "pub fn set_version(version_type: VersionType, version: u64) {\n\n VERSION\n\n .with_label_values(&[version_type.as_str()])\n\n .set(version as i64)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v1/src/counters.rs", "rank": 4, "score": 403666.3152707736 }, { "content": "/// Initializes all relevant metric gauges (e.g., after a reboot\n\n/// or after an account state snapshot has been restored).\n\npub fn initialize_sync_version_gauges(storage: Arc<dyn DbReader>) -> Result<(), Error> {\n\n let highest_synced_version = fetch_latest_synced_version(storage)?;\n\n let metrics = [\n\n metrics::StorageSynchronizerOperations::AppliedTransactionOutputs,\n\n metrics::StorageSynchronizerOperations::ExecutedTransactions,\n\n metrics::StorageSynchronizerOperations::Synced,\n\n ];\n\n\n\n for metric in metrics {\n\n metrics::set_gauge(\n\n &metrics::STORAGE_SYNCHRONIZER_OPERATIONS,\n\n metric.get_label(),\n\n highest_synced_version,\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Handles a notification for committed transactions by\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/utils.rs", "rank": 5, "score": 400137.4244758663 }, { "content": "fn verify_events_not_in_store(version: Version, event_store: &Arc<EventStore>) {\n\n assert!(event_store\n\n .get_events_by_version(version)\n\n .unwrap()\n\n .is_empty());\n\n}\n\n\n", "file_path": "storage/aptosdb/src/pruner/event_store/test.rs", "rank": 6, "score": 380894.2801154504 }, { "content": "fn test_get_last_version_before_timestamp_impl(new_block_events: Vec<(Version, ContractEvent)>) {\n\n let tmp_dir = TempPath::new();\n\n let db = AptosDB::new_for_test(&tmp_dir);\n\n let store = &db.event_store;\n\n // error on no blocks\n\n assert!(store.get_last_version_before_timestamp(1000, 2000).is_err());\n\n\n\n // save events to db\n\n let mut cs = ChangeSet::new();\n\n new_block_events.iter().for_each(|(ver, event)| {\n\n store\n\n .put_events(*ver as u64, &[event.clone()], &mut cs)\n\n .unwrap();\n\n });\n\n store.db.write_schemas(cs.batch);\n\n\n\n let ledger_version = new_block_events.last().unwrap().0;\n\n\n\n // error on no block before timestamp\n\n let (first_block_version, first_event) = new_block_events.first().unwrap();\n", "file_path": "storage/aptosdb/src/event_store/test.rs", "rank": 7, "score": 378996.18884069077 }, { "content": "pub fn get_version(version_type: VersionType) -> u64 {\n\n VERSION.with_label_values(&[version_type.as_str()]).get() as u64\n\n}\n\n\n\npub enum VersionType {\n\n /// Version of latest ledger info committed.\n\n Committed,\n\n /// Highest known version or version proceeding it\n\n Highest,\n\n /// Version of most recent txn that was synced (even if it is not backed by an LI)\n\n Synced,\n\n /// Current version a node is trying to catch up to usually within the current epoch\n\n Target,\n\n}\n\n\n\nimpl VersionType {\n\n pub fn as_str(&self) -> &'static str {\n\n match self {\n\n VersionType::Committed => \"committed\",\n\n VersionType::Highest => \"highest\",\n", "file_path": "state-sync/state-sync-v1/src/counters.rs", "rank": 8, "score": 375413.12129919755 }, { "content": "/// Similar to `mpsc::channel`, `new` creates a pair of `Sender` and `Receiver`\n\npub fn new<T>(size: usize, gauge: &IntGauge) -> (Sender<T>, Receiver<T>) {\n\n gauge.set(0);\n\n let (sender, receiver) = mpsc::channel(size);\n\n (\n\n Sender {\n\n inner: sender,\n\n gauge: gauge.clone(),\n\n },\n\n Receiver {\n\n inner: receiver,\n\n gauge: gauge.clone(),\n\n },\n\n )\n\n}\n\n\n", "file_path": "crates/channel/src/lib.rs", "rank": 9, "score": 373990.75609548145 }, { "content": "/// Creates a new ledger info with signatures at the specified version\n\npub fn create_ledger_info_at_version(version: Version) -> LedgerInfoWithSignatures {\n\n let block_info = BlockInfo::new(0, 0, HashValue::zero(), HashValue::zero(), version, 0, None);\n\n let ledger_info = LedgerInfo::new(block_info, HashValue::random());\n\n LedgerInfoWithSignatures::new(ledger_info, BTreeMap::new())\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/tests/utils.rs", "rank": 10, "score": 372854.4254265118 }, { "content": "/// Returns a random u64 with a value between 0 and `max_value` - 1 (inclusive).\n\npub fn create_random_u64(max_value: u64) -> u64 {\n\n create_range_random_u64(0, max_value)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/data-streaming-service/src/tests/utils.rs", "rank": 11, "score": 371823.02168089716 }, { "content": "/// Returns a transaction to change the keys for the given account.\n\npub fn rotate_key_txn(sender: &Account, new_key_hash: Vec<u8>, seq_num: u64) -> SignedTransaction {\n\n sender\n\n .transaction()\n\n .payload(encode_rotate_authentication_key_script_function(\n\n new_key_hash,\n\n ))\n\n .sequence_number(seq_num)\n\n .sign()\n\n}\n\n\n", "file_path": "aptos-move/e2e-tests/src/common_transactions.rs", "rank": 12, "score": 371667.53373414197 }, { "content": "pub fn arb_ledger_infos_with_sigs() -> impl Strategy<Value = Vec<LedgerInfoWithSignatures>> {\n\n (\n\n any_with::<AccountInfoUniverse>(3),\n\n vec((any::<LedgerInfoWithSignaturesGen>(), 1..50usize), 1..50),\n\n )\n\n .prop_map(|(mut universe, gens)| {\n\n let ledger_infos_with_sigs: Vec<_> = gens\n\n .into_iter()\n\n .map(|(ledger_info_gen, block_size)| {\n\n ledger_info_gen.materialize(&mut universe, block_size)\n\n })\n\n .collect();\n\n assert_eq!(get_first_epoch(&ledger_infos_with_sigs), 0);\n\n ledger_infos_with_sigs\n\n })\n\n}\n\n\n", "file_path": "storage/aptosdb/src/ledger_store/ledger_info_test_utils.rs", "rank": 13, "score": 371566.20838583395 }, { "content": "pub fn start_local_backup_service(db: Arc<AptosDB>) -> (Runtime, u16) {\n\n let port = get_available_port();\n\n let rt = start_backup_service(SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port), db);\n\n (rt, port)\n\n}\n", "file_path": "storage/backup/backup-cli/src/utils/test_utils.rs", "rank": 14, "score": 370705.47554831655 }, { "content": "/// Returns a transaction to change the keys for the given account.\n\npub fn raw_rotate_key_txn(sender: &Account, new_key_hash: Vec<u8>, seq_num: u64) -> RawTransaction {\n\n sender\n\n .transaction()\n\n .payload(encode_rotate_authentication_key_script_function(\n\n new_key_hash,\n\n ))\n\n .sequence_number(seq_num)\n\n .raw()\n\n}\n", "file_path": "aptos-move/e2e-tests/src/common_transactions.rs", "rank": 15, "score": 367432.52681021 }, { "content": "pub fn new_test<T>(size: usize) -> (Sender<T>, Receiver<T>) {\n\n let gauge = IntGauge::new(\"TEST_COUNTER\", \"test\").unwrap();\n\n new(size, &gauge)\n\n}\n", "file_path": "crates/channel/src/lib.rs", "rank": 16, "score": 367432.0930953649 }, { "content": "/// A useful utility function to instantiate all db pruners.\n\npub fn create_db_pruners(\n\n db: Arc<DB>,\n\n transaction_store: Arc<TransactionStore>,\n\n ledger_store: Arc<LedgerStore>,\n\n event_store: Arc<EventStore>,\n\n) -> Vec<Mutex<Arc<dyn DBPruner + Send + Sync>>> {\n\n vec![\n\n Mutex::new(Arc::new(StateStorePruner::new(\n\n Arc::clone(&db),\n\n 0,\n\n Instant::now(),\n\n ))),\n\n Mutex::new(Arc::new(LedgerPruner::new(\n\n Arc::clone(&db),\n\n Arc::clone(&transaction_store),\n\n Arc::clone(&event_store),\n\n Arc::clone(&ledger_store),\n\n ))),\n\n ]\n\n}\n", "file_path": "storage/aptosdb/src/pruner/utils.rs", "rank": 17, "score": 365370.8753466739 }, { "content": "pub fn tmp_db_empty() -> (TempPath, Arc<AptosDB>) {\n\n let tmpdir = TempPath::new();\n\n let db = Arc::new(AptosDB::new_for_test(&tmpdir));\n\n\n\n (tmpdir, db)\n\n}\n\n\n", "file_path": "storage/backup/backup-cli/src/utils/test_utils.rs", "rank": 18, "score": 359105.2350505814 }, { "content": "pub fn test_execution_with_storage_impl() -> Arc<AptosDB> {\n\n let (genesis, validators) = vm_genesis::test_genesis_change_set_and_validators(Some(1));\n\n let genesis_txn = Transaction::GenesisTransaction(WriteSetPayload::Direct(genesis));\n\n let genesis_key = &vm_genesis::GENESIS_KEYPAIR.0;\n\n\n\n let path = aptos_temppath::TempPath::new();\n\n path.create_as_dir().unwrap();\n\n let (aptos_db, db, executor, waypoint) = create_db_and_executor(path.path(), &genesis_txn);\n\n\n\n let parent_block_id = executor.committed_block_id();\n\n let signer = aptos_types::validator_signer::ValidatorSigner::new(\n\n validators[0].data.address,\n\n validators[0].key.clone(),\n\n );\n\n\n\n // This generates accounts that do not overlap with genesis\n\n let seed = [3u8; 32];\n\n let mut rng = ::rand::rngs::StdRng::from_seed(seed);\n\n\n\n let privkey1 = Ed25519PrivateKey::generate(&mut rng);\n", "file_path": "execution/executor-test-helpers/src/integration_test_impl.rs", "rank": 19, "score": 355202.77752552583 }, { "content": "pub fn encode_mint_transaction(sender: AccountAddress, amount: u64) -> Transaction {\n\n encode_transaction(sender, encode_mint_program(amount))\n\n}\n\n\n", "file_path": "execution/executor/src/mock_vm/mod.rs", "rank": 20, "score": 351357.27717748494 }, { "content": "// Generate the value_for txn_idx and incarnation in arc.\n\nfn arc_value_for(txn_idx: usize, incarnation: usize) -> Arc<Vec<usize>> {\n\n // Generate a Vec deterministically based on txn_idx and incarnation.\n\n Arc::new(value_for(txn_idx, incarnation))\n\n}\n\n\n", "file_path": "aptos-move/mvhashmap/src/unit_tests/mod.rs", "rank": 21, "score": 350855.9005851154 }, { "content": "pub fn placeholder_sync_info() -> SyncInfo {\n\n SyncInfo::new(\n\n certificate_for_genesis(),\n\n certificate_for_genesis(),\n\n None,\n\n None,\n\n )\n\n}\n\n\n", "file_path": "consensus/src/test_utils/mod.rs", "rank": 22, "score": 345686.59959357814 }, { "content": "pub fn prepare_safety_rules() -> (Arc<Mutex<MetricsSafetyRules>>, Vec<ValidatorSigner>) {\n\n let num_nodes = 1;\n\n\n\n // environment setup\n\n let (signers, validators) = random_validator_verifier(num_nodes, None, false);\n\n let validator_set = (&validators).into();\n\n let signer = &signers[0];\n\n\n\n let waypoint =\n\n Waypoint::new_epoch_boundary(&LedgerInfo::mock_genesis(Some(validator_set))).unwrap();\n\n\n\n let safety_storage = PersistentSafetyStorage::initialize(\n\n Storage::from(aptos_secure_storage::InMemoryStorage::new()),\n\n signer.author(),\n\n signer.private_key().clone(),\n\n Ed25519PrivateKey::generate_for_testing(),\n\n waypoint,\n\n true,\n\n );\n\n let (_, storage) = MockStorage::start_for_testing((&validators).into());\n\n\n\n let safety_rules_manager = SafetyRulesManager::new_local(safety_storage, false, false);\n\n let mut safety_rules = MetricsSafetyRules::new(safety_rules_manager.client(), storage);\n\n safety_rules.perform_initialize().unwrap();\n\n\n\n (Arc::new(Mutex::new(safety_rules)), signers)\n\n}\n\n\n", "file_path": "consensus/src/experimental/tests/test_utils.rs", "rank": 23, "score": 342594.38275576127 }, { "content": "pub fn health_check_route(health_aptos_db: Arc<dyn DbReader>) -> BoxedFilter<(impl Reply,)> {\n\n warp::path!(\"-\" / \"healthy\")\n\n .and(warp::path::end())\n\n .and(warp::query().map(move |params: HealthCheckParams| params))\n\n .and(warp::any().map(move || health_aptos_db.clone()))\n\n .and(warp::any().map(SystemTime::now))\n\n .and_then(health_check)\n\n .boxed()\n\n}\n\n\n\nasync fn health_check(\n\n params: HealthCheckParams,\n\n db: Arc<dyn DbReader>,\n\n now: SystemTime,\n\n) -> Result<Box<dyn warp::Reply>, warp::Rejection> {\n\n if let Some(duration) = params.duration_secs {\n\n let ledger_info = db\n\n .get_latest_ledger_info()\n\n .map_err(|_| reject::custom(HealthCheckError))?;\n\n let timestamp = ledger_info.ledger_info().timestamp_usecs();\n\n\n\n check_latest_ledger_info_timestamp(duration, timestamp, now)\n\n .map_err(|_| reject::custom(HealthCheckError))?;\n\n }\n\n Ok(Box::new(\"aptos-node:ok\"))\n\n}\n\n\n", "file_path": "api/src/health_check.rs", "rank": 24, "score": 341495.883684624 }, { "content": "/// Fetches the latest epoch state from the specified storage\n\npub fn fetch_latest_epoch_state(storage: Arc<dyn DbReader>) -> Result<EpochState, Error> {\n\n let startup_info = fetch_startup_info(storage)?;\n\n Ok(startup_info.get_epoch_state().clone())\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/utils.rs", "rank": 25, "score": 341041.8741901669 }, { "content": "pub fn arb_smt_correctness_case() -> impl Strategy<Value = Vec<Action>> {\n\n (\n\n hash_set(any::<HashValue>(), 1..100), // keys\n\n vec(\n\n prop_oneof![\n\n vec(\n\n // txns\n\n vec(\n\n // txn updates\n\n (any::<Index>(), any::<Vec<u8>>()),\n\n 1..4,\n\n ),\n\n 1..10,\n\n ),\n\n Just(vec![]),\n\n ],\n\n 1..10,\n\n ),\n\n )\n\n .prop_map(|(keys, commit_or_execute)| {\n", "file_path": "storage/scratchpad/src/sparse_merkle/test_utils/proptest_helpers.rs", "rank": 26, "score": 338225.65793845843 }, { "content": "#[inline]\n\npub fn pick_slice_idxs(max: usize, indexes: &[impl AsRef<PropIndex>]) -> Vec<usize> {\n\n pick_idxs(max, indexes, indexes.len())\n\n}\n\n\n\n/// Wrapper for `proptest`'s [`Index`][proptest::sample::Index] that allows `AsRef` to work.\n\n///\n\n/// There is no blanket `impl<T> AsRef<T> for T`, so `&[PropIndex]` doesn't work with\n\n/// `&[impl AsRef<PropIndex>]` (unless an impl gets added upstream). `Index` does.\n\n#[derive(Arbitrary, Clone, Copy, Debug)]\n\npub struct Index(PropIndex);\n\n\n\nimpl AsRef<PropIndex> for Index {\n\n fn as_ref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Deref for Index {\n\n type Target = PropIndex;\n\n\n\n fn deref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n", "file_path": "crates/aptos-proptest-helpers/src/lib.rs", "rank": 27, "score": 335822.3479931226 }, { "content": "/// Given `node`, an index in an in-order traversal of a perfect binary tree,\n\n/// what order would the node be visited in in post-order traversal?\n\n/// For example, consider this tree of in-order nodes.\n\n///\n\n/// ```text\n\n/// 3\n\n/// / \\\n\n/// / \\\n\n/// 1 5\n\n/// / \\ / \\\n\n/// 0 2 4 6\n\n/// ```\n\n///\n\n/// The post-order ordering of the nodes is:\n\n/// ```text\n\n/// 6\n\n/// / \\\n\n/// / \\\n\n/// 2 5\n\n/// / \\ / \\\n\n/// 0 1 3 4\n\n/// ```\n\n///\n\n/// post_order_index(1) == 2\n\n/// post_order_index(4) == 3\n\npub fn inorder_to_postorder(node: u64) -> u64 {\n\n let children = children_of_node(node);\n\n let left_nodes = nodes_to_left_of(node);\n\n\n\n children + left_nodes\n\n}\n\n\n", "file_path": "types/src/proof/position/mod.rs", "rank": 28, "score": 335242.70054587943 }, { "content": "pub fn get_last_version(ledger_infos_with_sigs: &[LedgerInfoWithSignatures]) -> Version {\n\n ledger_infos_with_sigs\n\n .last()\n\n .unwrap()\n\n .ledger_info()\n\n .version()\n\n}\n\n\n", "file_path": "storage/aptosdb/src/ledger_store/ledger_info_test_utils.rs", "rank": 29, "score": 330437.07232116454 }, { "content": "pub fn test_smt_correctness_impl(input: Vec<Action>) {\n\n let mut naive_q = VecDeque::new();\n\n naive_q.push_back(NaiveSmt::new::<AccountStateBlob>(&[]));\n\n let mut serial_q = VecDeque::new();\n\n serial_q.push_back(SparseMerkleTree::new(*SPARSE_MERKLE_PLACEHOLDER_HASH));\n\n let mut updater_q = VecDeque::new();\n\n updater_q.push_back(SparseMerkleTree::new(*SPARSE_MERKLE_PLACEHOLDER_HASH));\n\n\n\n for action in input {\n\n match action {\n\n Action::Commit => {\n\n if naive_q.len() > 1 {\n\n naive_q.pop_front();\n\n serial_q.pop_front();\n\n updater_q.pop_front();\n\n }\n\n }\n\n Action::Execute(block) => {\n\n let updates = block\n\n .iter()\n", "file_path": "storage/scratchpad/src/sparse_merkle/test_utils/proptest_helpers.rs", "rank": 30, "score": 326927.3362154163 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn random_serializable_struct() -> impl Strategy<Value = TestAptosCrypto> {\n\n (String::arbitrary()).prop_map(TestAptosCrypto).no_shrink()\n\n}\n", "file_path": "crates/aptos-crypto/src/test_utils.rs", "rank": 31, "score": 325040.1417415999 }, { "content": "pub fn postorder_to_inorder(mut node: u64) -> u64 {\n\n // The number of nodes in a full binary tree with height `n` is `2^n - 1`.\n\n let mut full_binary_size = !0u64;\n\n let mut bitmap = 0u64;\n\n for i in (0..64).rev() {\n\n if node >= full_binary_size {\n\n node -= full_binary_size;\n\n bitmap |= 1 << i;\n\n }\n\n full_binary_size >>= 1;\n\n }\n\n let level = node as u32;\n\n let pos = bitmap >> level;\n\n Position::from_level_and_pos(level, pos).to_inorder_index()\n\n}\n", "file_path": "types/src/proof/position/mod.rs", "rank": 32, "score": 324427.78091219894 }, { "content": "pub fn fixed_retry_strategy(delay_ms: u64, tries: usize) -> impl Iterator<Item = Duration> {\n\n FixedDelay::new(delay_ms).take(tries)\n\n}\n\n\n", "file_path": "crates/aptos-retrier/src/lib.rs", "rank": 33, "score": 323988.28549677774 }, { "content": "pub fn arb_metadata_files() -> impl Strategy<Value = Vec<(ShellSafeName, TextLine)>> {\n\n hash_map(any::<ShellSafeName>(), any::<TextLine>(), 0..10)\n\n .prop_map(HashMap::into_iter)\n\n .prop_map(Iterator::collect)\n\n}\n", "file_path": "storage/backup/backup-cli/src/storage/test_util.rs", "rank": 34, "score": 323292.1455686319 }, { "content": "/// Returns a [`Strategy`] that provides a variety of balances (or transfer amounts) over a roughly\n\n/// logarithmic distribution.\n\npub fn log_balance_strategy(max_balance: u64) -> impl Strategy<Value = u64> {\n\n // The logarithmic distribution is modeled by uniformly picking from ranges of powers of 2.\n\n let minimum = gas_costs::TXN_RESERVED.next_power_of_two();\n\n assert!(max_balance >= minimum, \"minimum to make sense\");\n\n let mut strategies = vec![];\n\n // Balances below and around the minimum are interesting but don't cover *every* power of 2,\n\n // just those starting from the minimum.\n\n let mut lower_bound: u64 = 0;\n\n let mut upper_bound: u64 = minimum;\n\n loop {\n\n strategies.push(lower_bound..upper_bound);\n\n if upper_bound >= max_balance {\n\n break;\n\n }\n\n lower_bound = upper_bound;\n\n upper_bound = (upper_bound * 2).min(max_balance);\n\n }\n\n Union::new(strategies)\n\n}\n\n\n", "file_path": "aptos-move/e2e-tests/src/account_universe.rs", "rank": 35, "score": 321435.7772401847 }, { "content": "fn run_tests<V>(db: Arc<MockTreeStore<V>>, btree: &BTreeMap<HashValue, V>, version: Version)\n\nwhere\n\n V: crate::TestValue,\n\n{\n\n {\n\n let iter =\n\n JellyfishMerkleIterator::new(Arc::clone(&db), version, HashValue::zero()).unwrap();\n\n assert_eq!(\n\n iter.collect::<Result<Vec<_>>>().unwrap(),\n\n btree.clone().into_iter().collect::<Vec<_>>(),\n\n );\n\n }\n\n\n\n for i in 0..btree.len() {\n\n {\n\n let iter = JellyfishMerkleIterator::new_by_index(Arc::clone(&db), version, i).unwrap();\n\n assert_eq!(\n\n iter.collect::<Result<Vec<_>>>().unwrap(),\n\n btree.clone().into_iter().skip(i).collect::<Vec<_>>(),\n\n );\n", "file_path": "storage/jellyfish-merkle/src/iterator/iterator_test.rs", "rank": 36, "score": 318164.2579236025 }, { "content": "pub fn create_db_and_executor<P: AsRef<std::path::Path>>(\n\n path: P,\n\n genesis: &Transaction,\n\n) -> (\n\n Arc<AptosDB>,\n\n DbReaderWriter,\n\n BlockExecutor<AptosVM>,\n\n Waypoint,\n\n) {\n\n let (db, dbrw) = DbReaderWriter::wrap(AptosDB::new_for_test(&path));\n\n let waypoint = bootstrap_genesis::<AptosVM>(&dbrw, genesis).unwrap();\n\n let executor = BlockExecutor::new(dbrw.clone());\n\n\n\n (db, dbrw, executor, waypoint)\n\n}\n\n\n", "file_path": "execution/executor-test-helpers/src/integration_test_impl.rs", "rank": 37, "score": 314929.6940616647 }, { "content": "pub fn get_first_epoch(ledger_infos_with_sigs: &[LedgerInfoWithSignatures]) -> u64 {\n\n ledger_infos_with_sigs\n\n .first()\n\n .unwrap()\n\n .ledger_info()\n\n .epoch()\n\n}\n\n\n", "file_path": "storage/aptosdb/src/ledger_store/ledger_info_test_utils.rs", "rank": 38, "score": 314146.1517393566 }, { "content": "pub fn get_last_epoch(ledger_infos_with_sigs: &[LedgerInfoWithSignatures]) -> u64 {\n\n ledger_infos_with_sigs.last().unwrap().ledger_info().epoch()\n\n}\n\n\n", "file_path": "storage/aptosdb/src/ledger_store/ledger_info_test_utils.rs", "rank": 39, "score": 314146.1517393566 }, { "content": "pub fn test_append_many_impl(batches: Vec<Vec<HashValue>>) {\n\n let mut store = MockHashStore::new();\n\n\n\n let mut leaves: Vec<HashValue> = Vec::new();\n\n let mut num_leaves = 0;\n\n for hashes in batches.iter() {\n\n let (root_hash, writes) = TestAccumulator::append(&store, num_leaves, hashes).unwrap();\n\n store.put_many(&writes);\n\n\n\n num_leaves += hashes.len() as LeafCount;\n\n leaves.extend(hashes.iter());\n\n let expected_root_hash = store.verify(&leaves).unwrap();\n\n assert_eq!(root_hash, expected_root_hash);\n\n assert_eq!(\n\n TestAccumulator::get_root_hash(&store, num_leaves).unwrap(),\n\n expected_root_hash\n\n );\n\n }\n\n}\n\n\n", "file_path": "storage/accumulator/src/test_helpers.rs", "rank": 40, "score": 312635.3067452719 }, { "content": "fn verify_event_store_pruner(events: Vec<Vec<ContractEvent>>) {\n\n let tmp_dir = TempPath::new();\n\n let aptos_db = AptosDB::new_for_test(&tmp_dir);\n\n let event_store = &aptos_db.event_store;\n\n let mut cs = ChangeSet::new();\n\n let num_versions = events.len();\n\n let pruner = Pruner::new(\n\n Arc::clone(&aptos_db.db),\n\n StoragePrunerConfig {\n\n state_store_prune_window: Some(0),\n\n ledger_prune_window: Some(0),\n\n pruning_batch_size: 1,\n\n },\n\n Arc::clone(&aptos_db.transaction_store),\n\n Arc::clone(&aptos_db.ledger_store),\n\n Arc::clone(&aptos_db.event_store),\n\n );\n\n\n\n // Write events to DB\n\n for (version, events_for_version) in events.iter().enumerate() {\n", "file_path": "storage/aptosdb/src/pruner/event_store/test.rs", "rank": 41, "score": 311944.48234347394 }, { "content": "/// Sets the gauge with the specific label to the given value\n\npub fn set_gauge(gauge: &Lazy<IntGaugeVec>, label: &str, value: u64) {\n\n gauge.with_label_values(&[label]).set(value as i64);\n\n}\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/metrics.rs", "rank": 42, "score": 308750.6090164495 }, { "content": "/// Increments the gauge with the specific label by the given delta\n\npub fn increment_gauge(gauge: &Lazy<IntGaugeVec>, label: &str, delta: u64) {\n\n gauge.with_label_values(&[label]).add(delta as i64);\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/metrics.rs", "rank": 43, "score": 308750.60901644954 }, { "content": "/// Decrements the gauge with the specific label by the given delta\n\npub fn decrement_gauge(gauge: &Lazy<IntGaugeVec>, label: &str, delta: u64) {\n\n gauge.with_label_values(&[label]).sub(delta as i64);\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/metrics.rs", "rank": 44, "score": 308750.60901644954 }, { "content": "pub fn start_backup_service(address: SocketAddr, db: Arc<AptosDB>) -> Runtime {\n\n let backup_handler = db.get_backup_handler();\n\n let routes = get_routes(backup_handler);\n\n\n\n let runtime = Builder::new_multi_thread()\n\n .thread_name(\"backup\")\n\n .enable_all()\n\n .build()\n\n .expect(\"[backup] failed to create runtime\");\n\n\n\n // Ensure that we actually bind to the socket first before spawning the\n\n // server tasks. This helps in tests to prevent races where a client attempts\n\n // to make a request before the server task is actually listening on the\n\n // socket.\n\n //\n\n // Note: we need to enter the runtime context first to actually bind, since\n\n // tokio TcpListener can only be bound inside a tokio context.\n\n let _guard = runtime.enter();\n\n let server = warp::serve(routes).bind(address);\n\n runtime.handle().spawn(server);\n", "file_path": "storage/backup/backup-service/src/lib.rs", "rank": 45, "score": 308060.1840286103 }, { "content": "/// Returns the value currently held by the pending chunk counter\n\nfn load_pending_data_chunks(pending_data_chunks: Arc<AtomicU64>) -> u64 {\n\n pending_data_chunks.load(Ordering::Relaxed)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/storage_synchronizer.rs", "rank": 46, "score": 307778.4866720723 }, { "content": "/// Create a new Channel and returns the two ends of the channel.\n\npub fn new<K: Eq + Hash + Clone, M>(\n\n queue_style: QueueStyle,\n\n max_queue_size_per_key: usize,\n\n counters: Option<&'static IntCounterVec>,\n\n) -> (Sender<K, M>, Receiver<K, M>) {\n\n let max_queue_size_per_key =\n\n NonZeroUsize!(max_queue_size_per_key, \"aptos_channel cannot be of size 0\");\n\n let shared_state = Arc::new(Mutex::new(SharedState {\n\n internal_queue: PerKeyQueue::new(queue_style, max_queue_size_per_key, counters),\n\n waker: None,\n\n num_senders: 1,\n\n receiver_dropped: false,\n\n stream_terminated: false,\n\n }));\n\n let shared_state_clone = Arc::clone(&shared_state);\n\n (\n\n Sender { shared_state },\n\n Receiver {\n\n shared_state: shared_state_clone,\n\n },\n\n )\n\n}\n", "file_path": "crates/channel/src/aptos_channel.rs", "rank": 47, "score": 305695.3596926426 }, { "content": "fn create_database() -> Arc<RwLock<DbReaderWriter>> {\n\n // Generate a genesis change set\n\n let (genesis, _) = vm_genesis::test_genesis_change_set_and_validators(Some(1));\n\n\n\n // Create test aptos database\n\n let db_path = aptos_temppath::TempPath::new();\n\n assert_ok!(db_path.create_as_dir());\n\n let (_, db_rw) = DbReaderWriter::wrap(AptosDB::new_for_test(db_path.path()));\n\n\n\n // Bootstrap the genesis transaction\n\n let genesis_txn = Transaction::GenesisTransaction(WriteSetPayload::Direct(genesis));\n\n assert_ok!(bootstrap_genesis::<AptosVM>(&db_rw, &genesis_txn));\n\n\n\n Arc::new(RwLock::new(db_rw))\n\n}\n", "file_path": "state-sync/inter-component/event-notifications/src/tests.rs", "rank": 48, "score": 301109.87590144 }, { "content": "pub fn arb_state_sync_msg() -> impl Strategy<Value = StateSyncMessage> {\n\n prop_oneof![\n\n (any::<GetChunkRequest>()).prop_map(|chunk_request| {\n\n StateSyncMessage::GetChunkRequest(Box::new(chunk_request))\n\n }),\n\n (any::<GetChunkResponse>()).prop_map(|chunk_response| {\n\n StateSyncMessage::GetChunkResponse(Box::new(chunk_response))\n\n })\n\n ]\n\n}\n\n\n\nimpl Arbitrary for GetChunkRequest {\n\n type Parameters = ();\n\n fn arbitrary_with(_args: ()) -> Self::Strategy {\n\n (\n\n any::<u64>(),\n\n any::<u64>(),\n\n any::<u64>(),\n\n any::<TargetType>(),\n\n )\n", "file_path": "state-sync/state-sync-v1/src/fuzzing.rs", "rank": 49, "score": 300835.1810099647 }, { "content": "fn create_test_ledger_info_with_sigs(epoch: u64, version: u64) -> LedgerInfoWithSignatures {\n\n // Create a mock ledger info with signatures\n\n let ledger_info = LedgerInfo::new(\n\n BlockInfo::new(\n\n epoch,\n\n 0,\n\n HashValue::zero(),\n\n HashValue::zero(),\n\n version,\n\n 0,\n\n None,\n\n ),\n\n HashValue::zero(),\n\n );\n\n LedgerInfoWithSignatures::new(ledger_info, BTreeMap::new())\n\n}\n\n\n\n/// This is a mock implementation of the `DbReader` trait.\n", "file_path": "state-sync/storage-service/server/src/tests.rs", "rank": 50, "score": 299478.88017968193 }, { "content": "pub fn encode_mint_program(amount: u64) -> Script {\n\n let argument = TransactionArgument::U64(amount);\n\n Script::new(vec![], vec![], vec![argument])\n\n}\n\n\n", "file_path": "execution/executor/src/mock_vm/mod.rs", "rank": 51, "score": 297533.6162715199 }, { "content": "pub fn test_save_blocks_impl(input: Vec<(Vec<TransactionToCommit>, LedgerInfoWithSignatures)>) {\n\n let tmp_dir = TempPath::new();\n\n let db = AptosDB::new_for_test(&tmp_dir);\n\n\n\n let num_batches = input.len();\n\n let mut cur_ver = 0;\n\n let mut all_committed_txns = vec![];\n\n for (batch_idx, (txns_to_commit, ledger_info_with_sigs)) in input.iter().enumerate() {\n\n db.save_transactions(\n\n txns_to_commit,\n\n cur_ver, /* first_version */\n\n Some(ledger_info_with_sigs),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n db.ledger_store.get_latest_ledger_info().unwrap(),\n\n *ledger_info_with_sigs\n\n );\n\n verify_committed_transactions(\n", "file_path": "storage/aptosdb/src/aptosdb_test.rs", "rank": 52, "score": 296077.3239122289 }, { "content": "/// Returns a random (but non-zero) u64 with a value between 1 and `max_value` - 1 (inclusive).\n\nfn create_non_zero_random_u64(max_value: u64) -> u64 {\n\n create_range_random_u64(1, max_value)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/data-streaming-service/src/tests/utils.rs", "rank": 53, "score": 295235.7803777125 }, { "content": "fn gen_events(sender: AccountAddress) -> Vec<ContractEvent> {\n\n vec![ContractEvent::new(\n\n EventKey::new_from_address(&sender, 0),\n\n 0,\n\n TypeTag::Vector(Box::new(TypeTag::U8)),\n\n b\"event_data\".to_vec(),\n\n )]\n\n}\n\n\n", "file_path": "execution/executor/src/mock_vm/mod.rs", "rank": 54, "score": 294878.2241641582 }, { "content": "pub fn test_proof_impl((batch1, batch2): (Vec<HashValue>, Vec<HashValue>)) {\n\n let total_leaves = batch1.len() + batch2.len();\n\n let mut store = MockHashStore::new();\n\n\n\n // insert all leaves in two batches\n\n let (root_hash1, writes1) = TestAccumulator::append(&store, 0, &batch1).unwrap();\n\n store.put_many(&writes1);\n\n let (root_hash2, writes2) =\n\n TestAccumulator::append(&store, batch1.len() as LeafCount, &batch2).unwrap();\n\n store.put_many(&writes2);\n\n\n\n // verify proofs for all leaves towards current root\n\n verify(&store, total_leaves as u64, root_hash2, &batch1, 0);\n\n verify(\n\n &store,\n\n total_leaves as u64,\n\n root_hash2,\n\n &batch2,\n\n batch1.len() as u64,\n\n );\n\n\n\n // verify proofs for all leaves of a subtree towards subtree root\n\n verify(&store, batch1.len() as u64, root_hash1, &batch1, 0);\n\n}\n\n\n", "file_path": "storage/accumulator/src/test_helpers.rs", "rank": 55, "score": 294384.5886875746 }, { "content": "/// Fetches the latest synced ledger info from the specified storage\n\npub fn fetch_latest_synced_ledger_info(\n\n storage: Arc<dyn DbReader>,\n\n) -> Result<LedgerInfoWithSignatures, Error> {\n\n let startup_info = fetch_startup_info(storage)?;\n\n Ok(startup_info.latest_ledger_info)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/utils.rs", "rank": 56, "score": 294311.4962046263 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn uniform_keypair_strategy<Priv, Pub>() -> impl Strategy<Value = KeyPair<Priv, Pub>>\n\nwhere\n\n Pub: Serialize + for<'a> From<&'a Priv>,\n\n Priv: Serialize + Uniform,\n\n{\n\n // The no_shrink is because keypairs should be fixed -- shrinking would cause a different\n\n // keypair to be generated, which appears to not be very useful.\n\n any::<[u8; 32]>()\n\n .prop_map(|seed| {\n\n let mut rng = StdRng::from_seed(seed);\n\n KeyPair::<Priv, Pub>::generate(&mut rng)\n\n })\n\n .no_shrink()\n\n}\n\n\n\n/// Produces a uniformly random keypair from a seed and the user can alter this sleed slightly.\n\n/// Useful for circumstances where you want two disjoint keypair generations that may interact with\n\n/// each other.\n", "file_path": "crates/aptos-crypto/src/test_utils.rs", "rank": 57, "score": 294173.38264397904 }, { "content": "struct StaleNodeIndicesByVersionIterator<'a> {\n\n inner: Peekable<SchemaIterator<'a, StaleNodeIndexSchema>>,\n\n target_least_readable_version: Version,\n\n}\n\n\n\nimpl<'a> StaleNodeIndicesByVersionIterator<'a> {\n\n fn new(\n\n db: &'a DB,\n\n least_readable_version: Version,\n\n target_least_readable_version: Version,\n\n ) -> anyhow::Result<Self> {\n\n let mut iter = db.iter::<StaleNodeIndexSchema>(ReadOptions::default())?;\n\n iter.seek(&least_readable_version)?;\n\n\n\n Ok(Self {\n\n inner: iter.peekable(),\n\n target_least_readable_version,\n\n })\n\n }\n\n\n", "file_path": "storage/aptosdb/src/pruner/state_store/mod.rs", "rank": 58, "score": 294163.88892165286 }, { "content": "pub fn tmp_db_with_random_content() -> (\n\n TempPath,\n\n Arc<AptosDB>,\n\n Vec<(Vec<TransactionToCommit>, LedgerInfoWithSignatures)>,\n\n) {\n\n let (tmpdir, db) = tmp_db_empty();\n\n let mut cur_ver = 0;\n\n let blocks = ValueGenerator::new().generate(arb_blocks_to_commit());\n\n for (txns_to_commit, ledger_info_with_sigs) in &blocks {\n\n db.save_transactions(\n\n txns_to_commit,\n\n cur_ver, /* first_version */\n\n Some(ledger_info_with_sigs),\n\n )\n\n .unwrap();\n\n cur_ver += txns_to_commit.len() as u64;\n\n }\n\n\n\n (tmpdir, db, blocks)\n\n}\n\n\n", "file_path": "storage/backup/backup-cli/src/utils/test_utils.rs", "rank": 59, "score": 293845.5904312115 }, { "content": "pub fn test_append_empty_impl(leaves: Vec<HashValue>) {\n\n let mut store = MockHashStore::new();\n\n\n\n let (root_hash, writes) = TestAccumulator::append(&store, 0, &leaves).unwrap();\n\n store.put_many(&writes);\n\n\n\n let (root_hash2, writes2) =\n\n TestAccumulator::append(&store, leaves.len() as LeafCount, &[]).unwrap();\n\n\n\n assert_eq!(root_hash, root_hash2);\n\n assert!(writes2.is_empty());\n\n}\n", "file_path": "storage/accumulator/src/test_helpers.rs", "rank": 60, "score": 293730.91087387386 }, { "content": "pub fn encode_reconfiguration_transaction(sender: AccountAddress) -> Transaction {\n\n let raw_transaction =\n\n RawTransaction::new_write_set(sender, 0, WriteSet::default(), ChainId::test());\n\n\n\n let privkey = Ed25519PrivateKey::generate_for_testing();\n\n Transaction::UserTransaction(\n\n raw_transaction\n\n .sign(&privkey, privkey.public_key())\n\n .expect(\"Failed to sign raw transaction.\")\n\n .into_inner(),\n\n )\n\n}\n\n\n", "file_path": "execution/executor/src/mock_vm/mod.rs", "rank": 61, "score": 293508.8162787792 }, { "content": "pub fn create_new_validator_set(\n\n validator_infos: Vec<ValidatorInfo>,\n\n) -> (ValidatorSet, Vec<ValidatorSigner>) {\n\n let num_validators = validator_infos.len();\n\n let (signers, _) = random_validator_verifier(num_validators, None, true);\n\n let new_validator_infos = validator_infos\n\n .iter()\n\n .enumerate()\n\n .map(|(index, validator_info)| {\n\n ValidatorInfo::new(\n\n signers[index].author(),\n\n validator_info.consensus_voting_power(),\n\n validator_info.config().clone(),\n\n )\n\n })\n\n .collect::<Vec<ValidatorInfo>>();\n\n (ValidatorSet::new(new_validator_infos), signers)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v1/tests/test_harness.rs", "rank": 62, "score": 293230.5160323422 }, { "content": "/// Returns a random u64 within the range, [min, max)\n\nfn create_range_random_u64(min_value: u64, max_value: u64) -> u64 {\n\n let mut rng = OsRng;\n\n rng.gen_range(min_value..max_value)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/data-streaming-service/src/tests/utils.rs", "rank": 63, "score": 293192.2103846513 }, { "content": "pub fn impl_enum_privatekey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let res = quote! {\n\n impl aptos_crypto::PrivateKey for #name {\n\n type PublicKeyMaterial = #pkt;\n\n }\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crates/aptos-crypto-derive/src/unions.rs", "rank": 64, "score": 292109.68187635625 }, { "content": "pub fn impl_enum_verifyingkey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n let res = quote! {\n\n impl aptos_crypto::VerifyingKey for #name {\n\n type SigningKeyMaterial = #pkt;\n\n type SignatureMaterial = #st;\n\n }\n\n impl aptos_crypto::private::Sealed for #name {}\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crates/aptos-crypto-derive/src/unions.rs", "rank": 65, "score": 292109.68187635625 }, { "content": "pub fn impl_enum_publickey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let mut from_match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n from_match_arms.extend(quote! {\n\n #pkt::#variant_ident(key) => #name::#variant_ident(key.into()),\n\n });\n\n }\n\n let mut res = quote! {\n\n impl From<&#pkt> for #name {\n\n fn from(public_key: &#pkt) -> Self {\n\n match public_key {\n\n #from_match_arms\n\n }\n", "file_path": "crates/aptos-crypto-derive/src/unions.rs", "rank": 66, "score": 292109.68187635625 }, { "content": "pub fn impl_enum_signingkey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n\n\n let mut match_arms_arbitrary = quote! {};\n\n let mut match_struct_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_struct_arms.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign(message)),\n\n });\n\n match_arms_arbitrary.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign_arbitrary_message(message)),\n\n });\n", "file_path": "crates/aptos-crypto-derive/src/unions.rs", "rank": 67, "score": 292109.68187635625 }, { "content": "pub fn impl_enum_signature(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let priv_kt: syn::Type = private_key_type.parse().unwrap();\n\n let pub_kt: syn::Type = public_key_type.parse().unwrap();\n\n let mut res = impl_enum_tryfrom(name, variants);\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n let mut match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_arms.extend(quote! {\n\n (#name::#variant_ident(sig), #pub_kt::#variant_ident(pk)) => {\n\n sig.verify_arbitrary_msg(message, pk)\n\n }\n\n })\n", "file_path": "crates/aptos-crypto-derive/src/unions.rs", "rank": 68, "score": 292109.68187635625 }, { "content": "fn gen_mint_writeset(sender: AccountAddress, balance: u64, seqnum: u64) -> WriteSet {\n\n let mut write_set = WriteSetMut::default();\n\n write_set.push((\n\n StateKey::AccessPath(balance_ap(sender)),\n\n WriteOp::Value(balance.to_le_bytes().to_vec()),\n\n ));\n\n write_set.push((\n\n StateKey::AccessPath(seqnum_ap(sender)),\n\n WriteOp::Value(seqnum.to_le_bytes().to_vec()),\n\n ));\n\n write_set.freeze().expect(\"mint writeset should be valid\")\n\n}\n\n\n", "file_path": "execution/executor/src/mock_vm/mod.rs", "rank": 69, "score": 291928.1715099035 }, { "content": "/// Sets the gauge with the specific label and value\n\npub fn set_gauge(counter: &Lazy<IntGaugeVec>, label: String, value: u64) {\n\n counter.with_label_values(&[&label]).set(value as i64);\n\n}\n\n\n", "file_path": "state-sync/aptos-data-client/src/aptosnet/metrics.rs", "rank": 70, "score": 291828.98526726745 }, { "content": "pub fn set_timestamp(timestamp_type: TimestampType, time_as_usecs: u64) {\n\n TIMESTAMP\n\n .with_label_values(&[timestamp_type.as_str()])\n\n .set((time_as_usecs / 1000) as i64)\n\n}\n\n\n\npub enum TimestampType {\n\n /// Current ledger committed timestamp\n\n Committed,\n\n /// Current computers clock\n\n Real,\n\n /// Current ledger synced timestamp\n\n Synced,\n\n}\n\n\n\nimpl TimestampType {\n\n pub fn as_str(&self) -> &'static str {\n\n match self {\n\n TimestampType::Committed => \"committed\",\n\n TimestampType::Real => \"real\",\n\n TimestampType::Synced => \"synced\",\n\n }\n\n }\n\n}\n\n\n", "file_path": "state-sync/state-sync-v1/src/counters.rs", "rank": 71, "score": 291023.8452575241 }, { "content": "pub fn test_consistency_proof_impl((batch1, batch2): (Vec<HashValue>, Vec<HashValue>)) {\n\n let mut store = MockHashStore::new();\n\n let empty_in_mem_acc = InMemoryAccumulator::default();\n\n\n\n let (root_hash1, writes1) = TestAccumulator::append(&store, 0, &batch1).unwrap();\n\n store.put_many(&writes1);\n\n let proof1 =\n\n TestAccumulator::get_consistency_proof(&store, batch1.len() as LeafCount, 0).unwrap();\n\n let in_mem_acc1 = empty_in_mem_acc\n\n .append_subtrees(proof1.subtrees(), batch1.len() as LeafCount)\n\n .unwrap();\n\n assert_eq!(root_hash1, in_mem_acc1.root_hash());\n\n\n\n let (root_hash2, writes2) =\n\n TestAccumulator::append(&store, batch1.len() as LeafCount, &batch2).unwrap();\n\n store.put_many(&writes2);\n\n let proof2 = TestAccumulator::get_consistency_proof(\n\n &store,\n\n (batch1.len() + batch2.len()) as LeafCount,\n\n batch1.len() as LeafCount,\n\n )\n\n .unwrap();\n\n let in_mem_acc2 = in_mem_acc1\n\n .append_subtrees(proof2.subtrees(), batch2.len() as LeafCount)\n\n .unwrap();\n\n assert_eq!(root_hash2, in_mem_acc2.root_hash());\n\n}\n\n\n", "file_path": "storage/accumulator/src/test_helpers.rs", "rank": 72, "score": 290914.0440456858 }, { "content": "/// Initializes the Aptos logger for tests\n\npub fn initialize_logger() {\n\n aptos_logger::Logger::builder()\n\n .is_async(false)\n\n .level(Level::Info)\n\n .build();\n\n}\n\n\n\n/// Returns a data notification from the given stream listener\n\npub async fn get_data_notification(\n\n stream_listener: &mut DataStreamListener,\n\n) -> Result<DataNotification, Error> {\n\n if let Ok(data_notification) = timeout(\n\n Duration::from_secs(MAX_NOTIFICATION_TIMEOUT_SECS),\n\n stream_listener.select_next_some(),\n\n )\n\n .await\n\n {\n\n Ok(data_notification)\n\n } else {\n\n Err(Error::UnexpectedErrorEncountered(\n\n \"Timed out waiting for a data notification!\".into(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/data-streaming-service/src/tests/utils.rs", "rank": 73, "score": 289063.12019499193 }, { "content": "/// This produces the genesis block\n\npub fn genesis_strategy() -> impl Strategy<Value = Block> {\n\n Just(Block::make_genesis_block())\n\n}\n\n\n\nprop_compose! {\n\n /// This produces an unmoored block, with arbitrary parent & QC ancestor\n\n pub fn unmoored_block(ancestor_id_strategy: impl Strategy<Value = HashValue>)(\n\n ancestor_id in ancestor_id_strategy,\n\n )(\n\n block in new_proposal(\n\n ancestor_id,\n\n Round::arbitrary(),\n\n proptests::arb_signer(),\n\n certificate_for_genesis(),\n\n )\n\n ) -> Block {\n\n block\n\n }\n\n}\n\n\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 74, "score": 287180.2890028834 }, { "content": "/// Offers the genesis block.\n\npub fn leaf_strategy() -> impl Strategy<Value = Block> {\n\n genesis_strategy().boxed()\n\n}\n\n\n\nprop_compose! {\n\n /// This produces a block with an invalid id (and therefore signature)\n\n /// given a valid block\n\n pub fn fake_id(block_strategy: impl Strategy<Value = Block>)\n\n (fake_id in HashValue::arbitrary(),\n\n block in block_strategy) -> Block {\n\n Block {\n\n id: fake_id,\n\n block_data: BlockData::new_proposal(\n\n block.payload().unwrap().clone(),\n\n block.author().unwrap(),\n\n block.round(),\n\n aptos_infallible::duration_since_epoch().as_micros() as u64,\n\n block.quorum_cert().clone(),\n\n ),\n\n signature: Some(block.signature().unwrap().clone()),\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 75, "score": 287180.2890028834 }, { "content": "pub fn set_up(\n\n path: &impl AsRef<Path>,\n\n ledger_infos_with_sigs: &[LedgerInfoWithSignatures],\n\n) -> AptosDB {\n\n let db = AptosDB::new_for_test(path);\n\n let store = &db.ledger_store;\n\n\n\n // Write LIs to DB.\n\n let mut cs = ChangeSet::new();\n\n ledger_infos_with_sigs\n\n .iter()\n\n .map(|info| store.put_ledger_info(info, &mut cs))\n\n .collect::<anyhow::Result<Vec<_>>>()\n\n .unwrap();\n\n store.db.write_schemas(cs.batch).unwrap();\n\n store.set_latest_ledger_info(ledger_infos_with_sigs.last().unwrap().clone());\n\n db\n\n}\n", "file_path": "storage/aptosdb/src/ledger_store/ledger_info_test_utils.rs", "rank": 76, "score": 287107.2186284191 }, { "content": "fn check_chunk_request(message: Message, known_version: u64, target_version: Option<u64>) {\n\n let chunk_request: StateSyncMessage = bcs::from_bytes(&message.mdata).unwrap();\n\n match chunk_request {\n\n StateSyncMessage::GetChunkRequest(chunk_request) => {\n\n assert_eq!(chunk_request.known_version, known_version);\n\n assert_eq!(chunk_request.target.version(), target_version);\n\n }\n\n StateSyncMessage::GetChunkResponse(_) => {\n\n panic!(\"Received chunk response but expecting chunk request!\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "state-sync/state-sync-v1/tests/integration_tests.rs", "rank": 77, "score": 286841.31203914015 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn arb_aptosnet_addr() -> impl Strategy<Value = NetworkAddress> {\n\n let arb_transport_protos = prop_oneof![\n\n any::<u16>().prop_map(|port| vec![Protocol::Memory(port)]),\n\n any::<(Ipv4Addr, u16)>()\n\n .prop_map(|(addr, port)| vec![Protocol::Ip4(addr), Protocol::Tcp(port)]),\n\n any::<(Ipv6Addr, u16)>()\n\n .prop_map(|(addr, port)| vec![Protocol::Ip6(addr), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns(name), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns4(name), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns6(name), Protocol::Tcp(port)]),\n\n ];\n\n let arb_aptosnet_protos = any::<(x25519::PublicKey, u8)>()\n\n .prop_map(|(pubkey, hs)| vec![Protocol::NoiseIK(pubkey), Protocol::Handshake(hs)]);\n\n\n\n (arb_transport_protos, arb_aptosnet_protos).prop_map(\n\n |(mut transport_protos, mut aptosnet_protos)| {\n\n transport_protos.append(&mut aptosnet_protos);\n", "file_path": "types/src/network_address/mod.rs", "rank": 78, "score": 286835.8818749102 }, { "content": "/// Creates a single test transaction\n\npub fn create_transaction() -> Transaction {\n\n let private_key = Ed25519PrivateKey::generate_for_testing();\n\n let public_key = private_key.public_key();\n\n\n\n let transaction_payload = TransactionPayload::Script(Script::new(vec![], vec![], vec![]));\n\n let raw_transaction = RawTransaction::new(\n\n AccountAddress::random(),\n\n 0,\n\n transaction_payload,\n\n 0,\n\n 0,\n\n 0,\n\n ChainId::new(10),\n\n );\n\n let signed_transaction = SignedTransaction::new(\n\n raw_transaction,\n\n public_key,\n\n Ed25519Signature::dummy_signature(),\n\n );\n\n\n\n Transaction::UserTransaction(signed_transaction)\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/tests/utils.rs", "rank": 79, "score": 286530.1073100012 }, { "content": "pub fn update_counters_for_committed_blocks(blocks_to_commit: &[Arc<ExecutedBlock>]) {\n\n for block in blocks_to_commit {\n\n observe_block(block.block().timestamp_usecs(), BlockStage::COMMITTED);\n\n let txn_status = block.compute_result().compute_status();\n\n counters::NUM_TXNS_PER_BLOCK.observe(txn_status.len() as f64);\n\n counters::COMMITTED_BLOCKS_COUNT.inc();\n\n counters::LAST_COMMITTED_ROUND.set(block.round() as i64);\n\n counters::LAST_COMMITTED_VERSION.set(block.compute_result().num_leaves() as i64);\n\n\n\n for status in txn_status.iter() {\n\n match status {\n\n TransactionStatus::Keep(_) => {\n\n counters::COMMITTED_TXNS_COUNT\n\n .with_label_values(&[\"success\"])\n\n .inc();\n\n }\n\n TransactionStatus::Discard(_) => {\n\n counters::COMMITTED_TXNS_COUNT\n\n .with_label_values(&[\"failed\"])\n\n .inc();\n", "file_path": "consensus/src/block_storage/block_store.rs", "rank": 80, "score": 285997.64964741335 }, { "content": "pub fn test_get_frozen_subtree_hashes_impl(leaves: Vec<HashValue>) {\n\n let mut store = MockHashStore::new();\n\n let (root_hash, writes) = TestAccumulator::append(&store, 0, &leaves).unwrap();\n\n store.put_many(&writes);\n\n\n\n let num_leaves = leaves.len() as LeafCount;\n\n let frozen_subtree_hashes =\n\n TestAccumulator::get_frozen_subtree_hashes(&store, num_leaves).unwrap();\n\n let consistency_proof = TestAccumulator::get_consistency_proof(&store, num_leaves, 0).unwrap();\n\n let in_mem_acc_1 = InMemoryAccumulator::new(frozen_subtree_hashes, num_leaves).unwrap();\n\n let in_mem_acc_2 = InMemoryAccumulator::default()\n\n .append_subtrees(consistency_proof.subtrees(), num_leaves)\n\n .unwrap();\n\n assert_eq!(root_hash, in_mem_acc_1.root_hash());\n\n assert_eq!(root_hash, in_mem_acc_2.root_hash());\n\n}\n\n\n\nprop_compose! {\n\n pub fn arb_list_of_hash_batches(each_batch_size: usize, num_batches: usize)(\n\n batches in vec(vec(any::<HashValue>(), each_batch_size), num_batches)\n\n ) -> Vec<Vec<HashValue>> {\n\n batches\n\n }\n\n}\n\n\n", "file_path": "storage/accumulator/src/test_helpers.rs", "rank": 81, "score": 285892.3850164199 }, { "content": "pub fn create_channel<T>() -> (Sender<T>, Receiver<T>) {\n\n unbounded::<T>()\n\n}\n\n\n\n/// BufferManager handles the states of ordered blocks and\n\n/// interacts with the execution phase, the signing phase, and\n\n/// the persisting phase.\n\npub struct BufferManager {\n\n author: Author,\n\n\n\n buffer: Buffer<BufferItem>,\n\n\n\n // the roots point to the first *unprocessed* item.\n\n // None means no items ready to be processed (either all processed or no item finishes previous stage)\n\n execution_root: BufferItemRootType,\n\n execution_phase_tx: Sender<ExecutionRequest>,\n\n execution_phase_rx: Receiver<ExecutionResponse>,\n\n\n\n signing_root: BufferItemRootType,\n\n signing_phase_tx: Sender<SigningRequest>,\n", "file_path": "consensus/src/experimental/buffer_manager.rs", "rank": 82, "score": 285845.6758679054 }, { "content": "/// Creates a ledger info with the given version and epoch. If `epoch_ending`\n\n/// is true, makes the ledger info an epoch ending ledger info.\n\npub fn create_ledger_info(\n\n version: Version,\n\n epoch: Epoch,\n\n epoch_ending: bool,\n\n) -> LedgerInfoWithSignatures {\n\n let next_epoch_state = if epoch_ending {\n\n let mut epoch_state = EpochState::empty();\n\n epoch_state.epoch = epoch + 1;\n\n Some(epoch_state)\n\n } else {\n\n None\n\n };\n\n\n\n let block_info = BlockInfo::new(\n\n epoch,\n\n 0,\n\n HashValue::zero(),\n\n HashValue::zero(),\n\n version,\n\n 0,\n\n next_epoch_state,\n\n );\n\n LedgerInfoWithSignatures::new(\n\n LedgerInfo::new(block_info, HashValue::zero()),\n\n BTreeMap::new(),\n\n )\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/data-streaming-service/src/tests/utils.rs", "rank": 83, "score": 285190.0587111678 }, { "content": "fn thread(n: u64) {\n\n let signer = ValidatorSigner::from_int(0);\n\n let file_path = NamedTempFile::new().unwrap().into_temp_path().to_path_buf();\n\n let waypoint = test_utils::validator_signers_to_waypoint(&[&signer]);\n\n let storage = PersistentSafetyStorage::initialize(\n\n Storage::from(OnDiskStorage::new(file_path)),\n\n signer.author(),\n\n signer.private_key().clone(),\n\n Ed25519PrivateKey::generate_for_testing(),\n\n waypoint,\n\n true,\n\n );\n\n // Test value, in milliseconds\n\n let timeout_ms = 5_000;\n\n let safety_rules_manager = SafetyRulesManager::new_thread(storage, false, false, timeout_ms);\n\n lsr(safety_rules_manager.client(), signer, n);\n\n}\n\n\n", "file_path": "consensus/safety-rules/benches/safety_rules.rs", "rank": 84, "score": 285139.760152637 }, { "content": "pub fn module_blobs() -> Vec<Vec<u8>> {\n\n super::module_blobs(&*APTOS_PKG)\n\n}\n\n\n", "file_path": "aptos-move/framework/src/aptos.rs", "rank": 85, "score": 284636.0127608837 }, { "content": "/// Fetches the startup info from the specified storage\n\nfn fetch_startup_info(storage: Arc<dyn DbReader>) -> Result<StartupInfo, Error> {\n\n let startup_info = storage.get_startup_info().map_err(|error| {\n\n Error::StorageError(format!(\n\n \"Failed to get startup info from storage: {:?}\",\n\n error\n\n ))\n\n })?;\n\n startup_info.ok_or_else(|| Error::StorageError(\"Missing startup info from storage\".into()))\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/utils.rs", "rank": 86, "score": 284121.27024030103 }, { "content": "/// Creates a single test event\n\npub fn create_event() -> ContractEvent {\n\n ContractEvent::new(\n\n EventKey::random(),\n\n 0,\n\n TypeTag::Bool,\n\n bcs::to_bytes(&0).unwrap(),\n\n )\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/tests/utils.rs", "rank": 87, "score": 282927.8746000114 }, { "content": "/// Creates a random epoch ending ledger info with the specified values\n\npub fn create_random_epoch_ending_ledger_info(\n\n version: Version,\n\n epoch: Epoch,\n\n) -> LedgerInfoWithSignatures {\n\n let block_info = BlockInfo::new(\n\n epoch,\n\n 0,\n\n HashValue::zero(),\n\n HashValue::random(),\n\n version,\n\n 0,\n\n Some(EpochState::empty()),\n\n );\n\n let ledger_info = LedgerInfo::new(block_info, HashValue::random());\n\n LedgerInfoWithSignatures::new(ledger_info, BTreeMap::new())\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/tests/utils.rs", "rank": 88, "score": 282642.8741840372 }, { "content": "/// Decrements the pending data chunks\n\nfn decrement_pending_data_chunks(atomic_u64: Arc<AtomicU64>) {\n\n let delta = 1;\n\n atomic_u64.fetch_sub(delta, Ordering::Relaxed);\n\n metrics::decrement_gauge(\n\n &metrics::STORAGE_SYNCHRONIZER_GAUGES,\n\n metrics::STORAGE_SYNCHRONIZER_PENDING_DATA,\n\n delta,\n\n );\n\n}\n\n\n\n/// Sends an error notification to the notification listener\n\nasync fn send_storage_synchronizer_error(\n\n mut error_notification_sender: mpsc::UnboundedSender<ErrorNotification>,\n\n notification_id: NotificationId,\n\n error_message: String,\n\n) {\n\n let error_message = format!(\"Storage synchronizer error: {:?}\", error_message);\n\n error!(LogSchema::new(LogEntry::StorageSynchronizer).message(&error_message));\n\n\n\n // Send an error notification\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/storage_synchronizer.rs", "rank": 89, "score": 282058.4315580075 }, { "content": "/// Helper function to serialize version in a more efficient encoding.\n\n/// We use a super simple encoding - the high bit is set if more bytes follow.\n\nfn serialize_u64_varint(mut num: u64, binary: &mut Vec<u8>) {\n\n for _ in 0..8 {\n\n let low_bits = num as u8 & 0x7f;\n\n num >>= 7;\n\n let more = match num {\n\n 0 => 0u8,\n\n _ => 0x80,\n\n };\n\n binary.push(low_bits | more);\n\n if more == 0 {\n\n return;\n\n }\n\n }\n\n // Last byte is encoded raw; this means there are no bad encodings.\n\n assert_ne!(num, 0);\n\n assert!(num <= 0xff);\n\n binary.push(num as u8);\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/node_type/mod.rs", "rank": 90, "score": 281662.3052352996 }, { "content": "pub fn create_transaction_list_with_proof(\n\n start_version: u64,\n\n end_version: u64,\n\n include_events: bool,\n\n) -> TransactionListWithProof {\n\n // Include events if required\n\n let events = if include_events { Some(vec![]) } else { None };\n\n\n\n // Create the requested transactions\n\n let mut transactions = vec![];\n\n for _ in start_version..=end_version {\n\n transactions.push(create_transaction());\n\n }\n\n\n\n // Create a transaction list with an empty proof\n\n let mut transaction_list_with_proof = TransactionListWithProof::new_empty();\n\n transaction_list_with_proof.first_transaction_version = Some(start_version);\n\n transaction_list_with_proof.events = events;\n\n transaction_list_with_proof.transactions = transactions;\n\n\n\n transaction_list_with_proof\n\n}\n", "file_path": "state-sync/state-sync-v2/data-streaming-service/src/tests/utils.rs", "rank": 91, "score": 281453.02573766094 }, { "content": "/// Initializes a DB with a set of key-value pairs by inserting one key at each version.\n\npub fn init_mock_db<V>(kvs: &HashMap<HashValue, V>) -> (MockTreeStore<V>, Version)\n\nwhere\n\n V: crate::TestValue,\n\n{\n\n assert!(!kvs.is_empty());\n\n\n\n let db = MockTreeStore::default();\n\n let tree = JellyfishMerkleTree::new(&db);\n\n\n\n for (i, (key, value)) in kvs.iter().enumerate() {\n\n let (_root_hash, write_batch) = tree\n\n .put_value_set(vec![(*key, value)], i as Version)\n\n .unwrap();\n\n db.write_tree_update_batch(write_batch).unwrap();\n\n }\n\n\n\n (db, (kvs.len() - 1) as Version)\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/test_helper.rs", "rank": 92, "score": 280520.400258914 }, { "content": "pub fn placeholder_ledger_info() -> LedgerInfo {\n\n LedgerInfo::new(BlockInfo::empty(), HashValue::zero())\n\n}\n\n\n", "file_path": "consensus/src/test_utils/mod.rs", "rank": 93, "score": 280219.3502989932 }, { "content": "pub fn sync_ack_new() -> ResetAck {}\n\n\n\npub struct ResetRequest {\n\n pub tx: oneshot::Sender<ResetAck>,\n\n pub stop: bool,\n\n}\n\n\n\npub struct OrderedBlocks {\n\n pub ordered_blocks: Vec<ExecutedBlock>,\n\n pub ordered_proof: LedgerInfoWithSignatures,\n\n pub callback: StateComputerCommitCallBackType,\n\n}\n\n\n\npub type BufferItemRootType = Cursor;\n\npub type Sender<T> = UnboundedSender<T>;\n\npub type Receiver<T> = UnboundedReceiver<T>;\n\n\n", "file_path": "consensus/src/experimental/buffer_manager.rs", "rank": 94, "score": 280206.68360458163 }, { "content": "fn test_index_get_impl(event_batches: Vec<Vec<ContractEvent>>) {\n\n // Put into db.\n\n let tmp_dir = TempPath::new();\n\n let db = AptosDB::new_for_test(&tmp_dir);\n\n let store = &db.event_store;\n\n\n\n let mut cs = ChangeSet::new();\n\n event_batches.iter().enumerate().for_each(|(ver, events)| {\n\n store.put_events(ver as u64, events, &mut cs).unwrap();\n\n });\n\n store.db.write_schemas(cs.batch);\n\n let ledger_version_plus_one = event_batches.len() as u64;\n\n\n\n assert_eq!(\n\n store\n\n .get_events_by_version_iter(0, event_batches.len())\n\n .unwrap()\n\n .collect::<Result<Vec<_>>>()\n\n .unwrap(),\n\n event_batches,\n", "file_path": "storage/aptosdb/src/event_store/test.rs", "rank": 95, "score": 280084.7030673213 }, { "content": "fn read_u64_from_storage(state_view: &impl StateView, access_path: &AccessPath) -> u64 {\n\n state_view\n\n .get_state_value(&StateKey::AccessPath(access_path.clone()))\n\n .expect(\"Failed to query storage.\")\n\n .map_or(0, |bytes| decode_bytes(&bytes))\n\n}\n\n\n", "file_path": "execution/executor/src/mock_vm/mod.rs", "rank": 96, "score": 280061.12779882347 }, { "content": "fn mock_txn_hashes(version: Version) -> Vec<HashValue> {\n\n (0..=version).map(HashValue::from_u64).collect::<Vec<_>>()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{block_info::BlockInfo, ledger_info::LedgerInfo};\n\n use proptest::prelude::*;\n\n\n\n fn mock_ledger_info(version: Version, root_hash: HashValue) -> LedgerInfo {\n\n LedgerInfo::new(\n\n BlockInfo::new(0, 0, HashValue::zero(), root_hash, version, 0, None),\n\n HashValue::zero(),\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_mock_accumulator() {\n\n let end = 255;\n", "file_path": "types/src/proof/accumulator/mock.rs", "rank": 97, "score": 279882.7461245722 }, { "content": "/// Creates a test transaction info\n\npub fn create_transaction_info() -> TransactionInfo {\n\n TransactionInfo::new(\n\n HashValue::random(),\n\n HashValue::random(),\n\n HashValue::random(),\n\n 0,\n\n ExecutionStatus::Success,\n\n )\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/tests/utils.rs", "rank": 98, "score": 279457.0000101768 }, { "content": "/// Creates a test startup info\n\npub fn create_startup_info() -> StartupInfo {\n\n StartupInfo::new(\n\n create_epoch_ending_ledger_info(),\n\n Some(EpochState::empty()),\n\n TreeState::new(0, vec![], HashValue::random()),\n\n None,\n\n )\n\n}\n\n\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/tests/utils.rs", "rank": 99, "score": 279457.0000101768 } ]
Rust
src/basic/constpool.rs
orasunis/jbcrs
723185189c8422b1a1d1eb3a0bacb59ed00cf00a
use std::hash::{Hash, Hasher}; use std::collections::HashMap; use std::cmp::{Eq, PartialEq}; use std::rc::Rc; use std::slice::Iter; use result::*; #[derive(Debug, Clone)] pub enum Item { UTF8(String), Integer(i32), Float(f32), Long(i64), Double(f64), Class(u16), String(u16), FieldRef { class: u16, name_and_type: u16, }, MethodRef { class: u16, name_and_type: u16, }, InterfaceMethodRef { class: u16, name_and_type: u16, }, NameAndType { name: u16, desc: u16, }, MethodHandle { kind: ReferenceKind, index: u16, }, MethodType(u16), InvokeDynamic { bootstrap_method_attribute: u16, name_and_type: u16, }, Module(u16), Package(u16), } impl Item { fn is_double(&self) -> bool { match *self { Item::Long(_) | Item::Double(_) => true, _ => false, } } } impl Hash for Item { fn hash<H: Hasher>(&self, state: &mut H) { match *self { Item::UTF8(ref s) => { state.write_u8(1); s.hash(state); } Item::Integer(i) => { state.write_u8(3); i.hash(state); } Item::Float(f) => { state.write_u8(4); f.to_bits().hash(state); } Item::Long(i) => { state.write_u8(5); i.hash(state); } Item::Double(f) => { state.write_u8(6); f.to_bits().hash(state); } Item::Class(ptr) => { state.write_u8(7); ptr.hash(state); } Item::String(ptr) => { state.write_u8(8); ptr.hash(state); } Item::FieldRef { class, name_and_type, } => { state.write_u8(9); class.hash(state); name_and_type.hash(state); } Item::MethodRef { class, name_and_type, } => { state.write_u8(10); class.hash(state); name_and_type.hash(state); } Item::InterfaceMethodRef { class, name_and_type, } => { state.write_u8(11); class.hash(state); name_and_type.hash(state); } Item::NameAndType { name, desc } => { state.write_u8(12); name.hash(state); desc.hash(state); } Item::MethodHandle { ref kind, index } => { state.write_u8(15); kind.hash(state); index.hash(state); } Item::MethodType(ptr) => { state.write_u8(16); ptr.hash(state); } Item::InvokeDynamic { bootstrap_method_attribute, name_and_type, } => { state.write_u8(18); bootstrap_method_attribute.hash(state); name_and_type.hash(state); } Item::Module(ptr) => { state.write_u8(19); ptr.hash(state); } Item::Package(ptr) => { state.write_u8(20); ptr.hash(state); } } } } impl PartialEq for Item { fn eq(&self, other: &Item) -> bool { match (self, other) { (&Item::UTF8(ref str1), &Item::UTF8(ref str2)) => *str1 == *str2, (&Item::Integer(i1), &Item::Integer(i2)) => i1 == i2, (&Item::Float(f1), &Item::Float(f2)) => f1.to_bits() == f2.to_bits(), (&Item::Long(i1), &Item::Long(i2)) => i1 == i2, (&Item::Double(f1), &Item::Double(f2)) => f1.to_bits() == f2.to_bits(), (&Item::Class(i1), &Item::Class(i2)) | (&Item::String(i1), &Item::String(i2)) => { i1 == i2 } ( &Item::FieldRef { class: class1, name_and_type: nat1, }, &Item::FieldRef { class: class2, name_and_type: nat2, }, ) | ( &Item::MethodRef { class: class1, name_and_type: nat1, }, &Item::MethodRef { class: class2, name_and_type: nat2, }, ) | ( &Item::InterfaceMethodRef { class: class1, name_and_type: nat1, }, &Item::InterfaceMethodRef { class: class2, name_and_type: nat2, }, ) => class1 == class2 && nat1 == nat2, ( &Item::NameAndType { name: name1, desc: desc1, }, &Item::NameAndType { name: name2, desc: desc2, }, ) => name1 == name2 && desc1 == desc2, ( &Item::MethodHandle { kind: ref kind1, index: index1, }, &Item::MethodHandle { kind: ref kind2, index: index2, }, ) => kind1 == kind2 && index1 == index2, ( &Item::InvokeDynamic { bootstrap_method_attribute: bma1, name_and_type: nat1, }, &Item::InvokeDynamic { bootstrap_method_attribute: bma2, name_and_type: nat2, }, ) => bma1 == bma2 && nat1 == nat2, (&Item::Package(index1), &Item::Package(index2)) | (&Item::Module(index1), &Item::Module(index2)) | (&Item::MethodType(index1), &Item::MethodType(index2)) => index1 == index2, _ => false, } } } impl Eq for Item {} #[derive(Eq, PartialEq, Hash, Debug, Clone)] pub enum ReferenceKind { GetField, GetStatic, PutField, PutStatic, InvokeVirtual, InvokeStatic, InvokeSpecial, NewInvokeSpecial, InvokeInterface, } #[derive(Default)] pub struct Pool { length: u16, by_index: Vec<Option<Rc<Item>>>, by_entry: HashMap<Rc<Item>, u16>, } impl Pool { pub fn new() -> Self { Pool { length: 1, by_index: Vec::new(), by_entry: HashMap::new(), } } pub fn with_capacity(cap: u16) -> Self { Pool { length: 1, by_index: Vec::with_capacity(cap as usize), by_entry: HashMap::with_capacity(cap as usize), } } #[inline] pub fn len(&self) -> u16 { self.length } #[inline] pub fn is_empty(&self) -> bool { self.len() == 1 } pub fn get(&self, index: u16) -> Result<&Item> { if index != 0 && index <= self.len() { if let Some(ref item) = self.by_index[index as usize - 1] { return Ok(item); } } Err(Error::InvalidCPItem(index)) } pub fn get_utf8(&self, index: u16) -> Result<String> { if let Item::UTF8(ref s) = *self.get(index)? { Ok(s.clone()) } else { Err(Error::InvalidCPItem(index)) } } pub fn get_class_name_opt(&self, index: u16) -> Result<Option<String>> { if let Item::Class(utf_index) = *self.get(index)? { if utf_index == 0 { Ok(None) } else { Ok(Some(self.get_utf8(utf_index)?)) } } else { Err(Error::InvalidCPItem(index)) } } pub fn get_class_name(&self, index: u16) -> Result<String> { if let Item::Class(utf_index) = *self.get(index)? { self.get_utf8(utf_index) } else { Err(Error::InvalidCPItem(index)) } } pub fn push(&mut self, item: Item) -> Result<u16> { if self.len() == u16::max_value() { return Err(Error::CPTooLarge); } let double = item.is_double(); let length = &mut self.length; let rc_item = Rc::new(item); let rc_item1 = Rc::clone(&rc_item); let by_index = &mut self.by_index; Ok(*self.by_entry.entry(rc_item).or_insert_with(move || { by_index.push(Some(Rc::clone(&rc_item1))); let prev_length = *length; if double { by_index.push(None); *length += 2; } else { *length += 1; } prev_length })) } pub fn iter(&self) -> PoolIter { PoolIter { iter: self.by_index.iter(), index: 0, } } } pub struct PoolIter<'a> { iter: Iter<'a, Option<Rc<Item>>>, index: u16, } impl<'a> Iterator for PoolIter<'a> { type Item = (u16, &'a Item); fn next(&mut self) -> Option<Self::Item> { self.index += 1; if let Some(rc_item) = self.iter.next() { if let Some(ref item) = *rc_item { Some((self.index + 1, item)) } else { self.next() } } else { None } } } #[cfg(test)] mod tests { use super::*; #[test] fn constpool() { let mut pool = Pool::new(); assert_eq!(pool.push(Item::Integer(123)).unwrap(), 1); assert_eq!(pool.push(Item::Long(32767)).unwrap(), 2); assert_eq!(pool.push(Item::Long(65535)).unwrap(), 4); assert_eq!(pool.push(Item::Float(3.8)).unwrap(), 6); assert_eq!(pool.push(Item::Integer(123)).unwrap(), 1); assert_eq!(pool.get(1).unwrap(), &Item::Integer(123)); assert_eq!(pool.get(2).unwrap(), &Item::Long(32767)); assert_eq!(pool.get(4).unwrap(), &Item::Long(65535)); assert_eq!(pool.get(6).unwrap(), &Item::Float(3.8)); } }
use std::hash::{Hash, Hasher}; use std::collections::HashMap; use std::cmp::{Eq, PartialEq}; use std::rc::Rc; use std::slice::Iter; use result::*; #[derive(Debug, Clone)] pub enum Item { UTF8(String), Integer(i32), Float(f32), Long(i64), Double(f64), Class(u16), String(u16), FieldRef { class: u16, name_and_type: u16, }, MethodRef { class: u16, name_and_type: u16, }, InterfaceMethodRef { class: u16, name_and_type: u16, }, NameAndType { name: u16, desc: u16, }, MethodHandle { kind: ReferenceKind, index: u16, }, MethodType(u16), InvokeDynamic { bootstrap_method_attribute: u16, name_and_type: u16, }, Module(u16), Package(u16), } impl Item { fn is_double(&self) -> bool { match *self { Item::Long(_) | Item::Double(_) => true, _ => false, } } } impl Hash for Item { fn hash<H: Hasher>(&self, state: &mut H) { match *self { Item::UTF8(ref s) => { state.write_u8(1); s.hash(state); } Item::Integer(i) => { state.write_u8(3); i.hash(state); } Item::Float(f) => { state.write_u8(4); f.to_bits().hash(state); } Item::Long(i) => { state.write_u8(5); i.hash(state); } Item::Double(f) => { state.write_u8(6); f.to_bits().hash(state); } Item::Class(ptr) => { state.write_u8(7); ptr.hash(state); } Item::String(ptr) => { state.write_u8(8); ptr.hash(state); } Item::FieldRef { class, name_and_type, } => { state.write_u8(9); class.hash(state); name_and_type.hash(state); } Item::MethodRef { class, name_and_type, } => { state.write_u8(10); class.hash(state); name_and_type.hash(state); } Item::InterfaceMethodRef { class, name_and_type, } => { state.write_u8(11); class.hash(state); name_and_type.hash(state); } Item::NameAndType { name, desc } => { state.write_u8(12); name.hash(state); desc.hash(state); } Item::MethodHandle { ref kind, index } => { state.write_u8(15); kind.hash(state); index.hash(state); } Item::MethodType(ptr) => { state.write_u8(16); ptr.hash(state); } Item::InvokeDynamic { bootstrap_method_attribute, name_and_type, } => { state.write_u8(18); bootstrap_method_attribute.hash(state); name_and_type.hash(state); } Item::Module(ptr) => { state.write_u8(19); ptr.hash(state); } Item::Package(ptr) => { state.write_u8(20); ptr.hash(state); } } } } impl PartialEq for Item { fn eq(&self, other: &Item) -> bool { match (self, other) { (&Item::UTF8(ref str1), &Item::UTF8(ref str2)) => *str1 == *str2, (&Item::Integer(i1), &Item::Integer(i2)) => i1 == i2, (&Item::Float(f1), &Item::Float(f2)) => f1.to_bits() == f2.to_bits(), (&Item::Long(i1), &Item::Long(i2)) => i1 == i2, (&Item::Double(f1), &Item::Double(f2)) => f1.to_bits() == f2.to_bits(), (&Item::Class(i1), &Item::Class(i2)) | (&Item::String(i1), &Item::String(i2)) => { i1 == i2 } ( &Item::FieldRef { class: class1, name_and_type: nat1, }, &Item::FieldRef { class: class2, name_and_type: nat2, }, ) | ( &Item::MethodRef { class: class1, name_and_type: nat1, }, &Item::MethodRef { class: class2, name_and_type: nat2, }, ) | ( &Item::InterfaceMethodRef { class: class1, name_and_type: nat1, }, &Item::InterfaceMethodRef { class: class2, name_and_type: nat2, }, ) => class1 == class2 && nat1 == nat2, ( &Item::NameAndType { name: name1, desc: desc1, }, &Item::NameAndType { name: name2, desc: desc2, }, ) => name1 == name2 && desc1 == desc2, ( &Item::MethodHandle { kind: ref kind1, index: index1, }, &Item::MethodHandle { kind: ref kind2, index: index2, }, ) => kind1 == kind2 && index1 == index2, ( &Item::InvokeDynamic { bootstrap_method_attribute: bma1, name_and_type: nat1, }, &Item::InvokeDynamic { bootstrap_method_attribute: bma2, name_and_type: nat2, }, ) => bma1 == bma2 && nat1 == nat2, (&Item::Package(index1), &Item::Package(index2)) | (&Item::Module(index1), &Item::Module(index2)) | (&Item::MethodType(index1), &Item::MethodType(index2)) => index1 == index2, _ => false, } } } impl Eq for Item {} #[derive(Eq, PartialEq, Hash, Debug, Clone)] pub enum ReferenceKind { GetField, GetStatic, PutField, PutStatic, InvokeVirtual, InvokeStatic, InvokeSpecial, NewInvokeSpecial, InvokeInterface, } #[derive(Default)] pub struct Pool { length: u16, by_index: Vec<Option<Rc<Item>>>, by_entry: HashMap<Rc<Item>, u16>, } impl Pool { pub fn new() -> Self { Pool { length: 1, by_index: Vec::new(), by_entry: HashMap::new(), } } pub fn with_capacity(cap: u16) -> Self { Pool { length: 1, by_index: Vec::with_capacity(cap as usize), by_entry: HashMap::with_capacity(cap as usize), } } #[inline] pub fn len(&self) -> u16 { self.length } #[inline] pub fn is_empty(&self) -> bool { self.len() == 1 } pub fn get(&self, index: u16) -> Result<&Item> { if index != 0 && index <= self.len() { if let Some(ref item) = self.by_index[index as usize - 1] { return Ok(item); } } Err(Error::InvalidCPItem(index)) } pub fn get_utf8(&self, index: u16) -> Result<String> { if let Item::UTF8(ref s) = *self.get(index)? { Ok(s.clone()) } else { Err(Error::InvalidCPItem(index)) } } pub fn get_class_name_opt(&self, index: u16) -> Result<Option<String>> { if let Item::Class(utf_index) = *self.get(index)? { if utf_index == 0 { Ok(None) } else { Ok(Some(self.get_utf8(utf_index)?)) } } else { Err(Error::InvalidCPItem(index)) } } pub fn get_class_name(&self, index: u16) -> Result<String> { if let Item::Class(utf_index) = *self.get(index)? { self.get_utf8(utf_index) } else { Err(Error::InvalidCPItem(index)) } }
pub fn iter(&self) -> PoolIter { PoolIter { iter: self.by_index.iter(), index: 0, } } } pub struct PoolIter<'a> { iter: Iter<'a, Option<Rc<Item>>>, index: u16, } impl<'a> Iterator for PoolIter<'a> { type Item = (u16, &'a Item); fn next(&mut self) -> Option<Self::Item> { self.index += 1; if let Some(rc_item) = self.iter.next() { if let Some(ref item) = *rc_item { Some((self.index + 1, item)) } else { self.next() } } else { None } } } #[cfg(test)] mod tests { use super::*; #[test] fn constpool() { let mut pool = Pool::new(); assert_eq!(pool.push(Item::Integer(123)).unwrap(), 1); assert_eq!(pool.push(Item::Long(32767)).unwrap(), 2); assert_eq!(pool.push(Item::Long(65535)).unwrap(), 4); assert_eq!(pool.push(Item::Float(3.8)).unwrap(), 6); assert_eq!(pool.push(Item::Integer(123)).unwrap(), 1); assert_eq!(pool.get(1).unwrap(), &Item::Integer(123)); assert_eq!(pool.get(2).unwrap(), &Item::Long(32767)); assert_eq!(pool.get(4).unwrap(), &Item::Long(65535)); assert_eq!(pool.get(6).unwrap(), &Item::Float(3.8)); } }
pub fn push(&mut self, item: Item) -> Result<u16> { if self.len() == u16::max_value() { return Err(Error::CPTooLarge); } let double = item.is_double(); let length = &mut self.length; let rc_item = Rc::new(item); let rc_item1 = Rc::clone(&rc_item); let by_index = &mut self.by_index; Ok(*self.by_entry.entry(rc_item).or_insert_with(move || { by_index.push(Some(Rc::clone(&rc_item1))); let prev_length = *length; if double { by_index.push(None); *length += 2; } else { *length += 1; } prev_length })) }
function_block-full_function
[ { "content": "/// Parses the class file, which is represented as a byte array.\n\n/// The constant pool and the class is returned, if no error occurred.\n\npub fn parse(input: &[u8]) -> Result<(Pool, Class)> {\n\n // create a new decoder from the byte array\n\n let mut cursor = 0;\n\n let mut decoder = Decoder::new(input, &mut cursor);\n\n\n\n // check if input is a class file\n\n if decoder.read_bytes(4)? != MAGIC {\n\n return Err(Error::NotAClass);\n\n }\n\n\n\n let minor_version = decoder.read_u16()?;\n\n let major_version = decoder.read_u16()?;\n\n\n\n let constant_pool = read_constant_pool(&mut decoder)?;\n\n\n\n let access_flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n\n\n let name = decoder.read_u16()?;\n\n let super_name = decoder.read_u16()?;\n\n\n", "file_path": "src/basic/parser/mod.rs", "rank": 0, "score": 179042.3094660376 }, { "content": "/// Parses the code attribute\n\npub fn parse_code(decoder: &mut Decoder, constant_pool: &Pool) -> Result<Attribute> {\n\n let max_stack = decoder.read_u16()?;\n\n let max_locals = decoder.read_u16()?;\n\n\n\n let code_length = decoder.read_u32()?;\n\n let mut instructions = HashMap::new();\n\n\n\n // Read the instructions\n\n // Using an extra block so we don't have to enable NLL on nightly\n\n // A method doesn't seem necessary to me, too\n\n {\n\n let mut code_decoder = decoder.limit(code_length as usize)?;\n\n\n\n let mut code_location = 0;\n\n loop {\n\n let (end, instruction) = parse_instruction(&mut code_decoder, code_location)?;\n\n instructions.insert(code_location, instruction);\n\n\n\n // we have read all instructions\n\n if end == code_length {\n", "file_path": "src/basic/parser/code.rs", "rank": 1, "score": 164141.6820781917 }, { "content": "/// Parses the `InnerClasses` attribute\n\npub fn parse_inner_classes(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut inner_classes = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let inner_class_info = decoder.read_u16()?;\n\n let outer_class_info = decoder.read_u16()?;\n\n let inner_name = decoder.read_u16()?;\n\n let inner_class_access_flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n inner_classes.push(InnerClass {\n\n inner_class_info,\n\n outer_class_info,\n\n inner_name,\n\n inner_class_access_flags,\n\n })\n\n }\n\n Ok(Attribute::InnerClasses(inner_classes))\n\n}\n\n\n", "file_path": "src/basic/parser/class.rs", "rank": 2, "score": 162060.29498724663 }, { "content": "/// Parses the `Module` attribute.\n\npub fn parse_module(decoder: &mut Decoder) -> Result<Attribute> {\n\n let name = decoder.read_u16()?;\n\n let flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n let version = decoder.read_u16()?;\n\n\n\n // read requires\n\n let requires_count = decoder.read_u16()?;\n\n let mut requires = Vec::with_capacity(requires_count as usize);\n\n for _ in 0..requires_count {\n\n let index = decoder.read_u16()?;\n\n let flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n let version = decoder.read_u16()?;\n\n requires.push(Requirement {\n\n index,\n\n flags,\n\n version,\n\n });\n\n }\n\n\n\n // read exports\n", "file_path": "src/basic/parser/class.rs", "rank": 3, "score": 156543.6573238613 }, { "content": "/// Parses the `BootstrapMethods` attribute\n\npub fn parse_bootstrap_methods(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut bootstrap_methods = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let method_ref = decoder.read_u16()?;\n\n let mut arguments = Vec::with_capacity(count as usize);\n\n let count = decoder.read_u16()?;\n\n for _ in 0..count {\n\n arguments.push(decoder.read_u16()?);\n\n }\n\n bootstrap_methods.push(BootstrapMethod {\n\n method_ref,\n\n arguments,\n\n })\n\n }\n\n Ok(Attribute::BootstrapMethods(bootstrap_methods))\n\n}\n\n\n", "file_path": "src/basic/parser/class.rs", "rank": 4, "score": 153119.78597093903 }, { "content": "/// Parses the `ModulePackages` attribute.\n\npub fn parse_module_packages(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut packages = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n packages.push(decoder.read_u16()?);\n\n }\n\n Ok(Attribute::ModulePackages(packages))\n\n}\n\n\n", "file_path": "src/basic/parser/class.rs", "rank": 5, "score": 153119.78597093903 }, { "content": "/// Parses the `EnclosingMethod` attribute\n\npub fn parse_enclosing_method(decoder: &mut Decoder) -> Result<Attribute> {\n\n Ok(Attribute::EnclosingMethod {\n\n class_index: decoder.read_u16()?,\n\n method_index: decoder.read_u16()?,\n\n })\n\n}\n\n\n", "file_path": "src/basic/parser/class.rs", "rank": 6, "score": 153119.78597093903 }, { "content": "/// Reads the entire constant pool\n\nfn read_constant_pool(decoder: &mut Decoder) -> Result<Pool> {\n\n let size = decoder.read_u16()?;\n\n let mut pool = Pool::new();\n\n\n\n let mut index = 1;\n\n while index < size {\n\n let tag = decoder.read_u8()?;\n\n\n\n // match a tag and read the additional information\n\n let item = match tag {\n\n 1 => {\n\n let length = decoder.read_u16()?;\n\n Item::UTF8(decoder.read_str(length as usize)?)\n\n }\n\n 3 => Item::Integer(decoder.read_i32()?),\n\n 4 => Item::Float(decoder.read_f32()?),\n\n 5 => Item::Long(decoder.read_i64()?),\n\n 6 => Item::Double(decoder.read_f64()?),\n\n 7 => Item::Class(decoder.read_u16()?),\n\n 8 => Item::String(decoder.read_u16()?),\n", "file_path": "src/basic/parser/mod.rs", "rank": 7, "score": 149437.8390096186 }, { "content": "/// Parses the `Exceptions` attribute.\n\npub fn parse_exceptions(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut exceptions = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n exceptions.push(decoder.read_u16()?);\n\n }\n\n Ok(Attribute::Exceptions(exceptions))\n\n}\n\n\n", "file_path": "src/basic/parser/method.rs", "rank": 8, "score": 135661.66375552086 }, { "content": "/// Parses the `MethodParameters` attribute.\n\npub fn parse_method_parameters(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut params = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let name = decoder.read_u16()?;\n\n let access_flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n params.push(MethodParameter { name, access_flags });\n\n }\n\n Ok(Attribute::MethodParameters(params))\n\n}\n", "file_path": "src/basic/parser/method.rs", "rank": 9, "score": 132930.77113239194 }, { "content": "/// Parses all methods and their attributes\n\nfn parse_methods(decoder: &mut Decoder, constant_pool: &Pool) -> Result<Vec<Method>> {\n\n let count = decoder.read_u16()?;\n\n let mut fields = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let access_flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n let name = decoder.read_u16()?;\n\n let desc = decoder.read_u16()?;\n\n let attributes = parse_attributes(decoder, constant_pool)?;\n\n\n\n fields.push(Method {\n\n access_flags,\n\n name,\n\n desc,\n\n attributes,\n\n })\n\n }\n\n\n\n Ok(fields)\n\n}\n\n\n", "file_path": "src/basic/parser/mod.rs", "rank": 10, "score": 132397.95474646077 }, { "content": "/// Parses all fields and their attributes\n\nfn parse_fields(decoder: &mut Decoder, constant_pool: &Pool) -> Result<Vec<Field>> {\n\n let count = decoder.read_u16()?;\n\n let mut fields = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let access_flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n let name = decoder.read_u16()?;\n\n let desc = decoder.read_u16()?;\n\n let attributes = parse_attributes(decoder, constant_pool)?;\n\n\n\n fields.push(Field {\n\n access_flags,\n\n name,\n\n desc,\n\n attributes,\n\n })\n\n }\n\n\n\n Ok(fields)\n\n}\n\n\n", "file_path": "src/basic/parser/mod.rs", "rank": 11, "score": 132397.95474646077 }, { "content": "/// Parses all attributes\n\nfn parse_attributes(decoder: &mut Decoder, constant_pool: &Pool) -> Result<Vec<Attribute>> {\n\n let count = decoder.read_u16()?;\n\n let mut attributes = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let name_index = decoder.read_u16()?;\n\n let name = constant_pool.get_utf8(name_index)?;\n\n let length = decoder.read_u32()?;\n\n\n\n // limit attribute length\n\n let mut attr_decoder = decoder.limit(length as usize)?;\n\n\n\n let attribute = match name.as_ref() {\n\n \"AnnotationDefault\" => {\n\n Attribute::AnnotationDefault(parse_element_value(&mut attr_decoder)?)\n\n }\n\n \"BootstrapMethods\" => parse_bootstrap_methods(&mut attr_decoder)?,\n\n \"Code\" => parse_code(&mut attr_decoder, constant_pool)?,\n\n \"ConstantValue\" => {\n\n let index = attr_decoder.read_u16()?;\n\n Attribute::ConstantValue(index)\n", "file_path": "src/basic/parser/mod.rs", "rank": 12, "score": 132397.95474646077 }, { "content": "/// Parses the `LocalVariableTable` attribute.\n\npub fn parse_local_variable_table(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut table = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let start = decoder.read_u16()?;\n\n let length = decoder.read_u16()?;\n\n let name = decoder.read_u16()?;\n\n let descriptor = decoder.read_u16()?;\n\n let index = decoder.read_u16()?;\n\n table.push(LocalVariable {\n\n start,\n\n length,\n\n name,\n\n descriptor,\n\n index,\n\n });\n\n }\n\n Ok(Attribute::LocalVariableTable(table))\n\n}\n\n\n", "file_path": "src/basic/parser/method.rs", "rank": 13, "score": 130375.30867668359 }, { "content": "/// Parses the `StackMapTable` attribute.\n\npub fn parse_stack_map_table(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut table = Vec::with_capacity(count as usize);\n\n\n\n for _ in 0..count {\n\n let frame_type = decoder.read_u8()?;\n\n let frame = match frame_type {\n\n 0...63 => StackMapFrame::Same {\n\n offset_delta: u16::from(frame_type),\n\n },\n\n 64...127 => StackMapFrame::Same1 {\n\n offset_delta: u16::from(frame_type) - 64,\n\n stack: parse_verification_type(decoder)?,\n\n },\n\n 247 => StackMapFrame::Same1 {\n\n offset_delta: decoder.read_u16()?,\n\n stack: parse_verification_type(decoder)?,\n\n },\n\n 248...250 => StackMapFrame::Chop {\n\n offset_delta: decoder.read_u16()?,\n", "file_path": "src/basic/parser/method.rs", "rank": 14, "score": 130375.30867668359 }, { "content": "/// Reads an element value.\n\npub fn parse_element_value(decoder: &mut Decoder) -> Result<ElementValue> {\n\n let tag = decoder.read_u8()?;\n\n\n\n match tag {\n\n b'B' => Ok(ElementValue::Byte(decoder.read_u16()?)),\n\n b'S' => Ok(ElementValue::Short(decoder.read_u16()?)),\n\n b'C' => Ok(ElementValue::Char(decoder.read_u16()?)),\n\n b'I' => Ok(ElementValue::Int(decoder.read_u16()?)),\n\n b'J' => Ok(ElementValue::Long(decoder.read_u16()?)),\n\n b'F' => Ok(ElementValue::Float(decoder.read_u16()?)),\n\n b'D' => Ok(ElementValue::Double(decoder.read_u16()?)),\n\n b'Z' => Ok(ElementValue::Boolean(decoder.read_u16()?)),\n\n b's' => Ok(ElementValue::String(decoder.read_u16()?)),\n\n b'c' => Ok(ElementValue::Class(decoder.read_u16()?)),\n\n b'e' => {\n\n let type_name = decoder.read_u16()?;\n\n let const_name = decoder.read_u16()?;\n\n Ok(ElementValue::Enum {\n\n type_name,\n\n const_name,\n", "file_path": "src/basic/parser/annotation.rs", "rank": 15, "score": 130375.30867668359 }, { "content": "/// Parses the `LineNumberTable` attribute.\n\npub fn parse_line_number_table(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut table = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let start = decoder.read_u16()?;\n\n let line_number = decoder.read_u16()?;\n\n table.push(LineNumber { start, line_number })\n\n }\n\n Ok(Attribute::LineNumberTable(table))\n\n}\n\n\n", "file_path": "src/basic/parser/method.rs", "rank": 16, "score": 130375.30867668359 }, { "content": "/// Reads the next few annotations.\n\npub fn parse_annotations(decoder: &mut Decoder) -> Result<Vec<Annotation>> {\n\n let count = decoder.read_u16()?;\n\n let mut annotations = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n annotations.push(parse_annotation(decoder)?);\n\n }\n\n Ok(annotations)\n\n}\n\n\n", "file_path": "src/basic/parser/annotation.rs", "rank": 17, "score": 129933.40091730512 }, { "content": "/// Parses the `LocalVariableTypeTable` attribute.\n\npub fn parse_local_variable_type_table(decoder: &mut Decoder) -> Result<Attribute> {\n\n let count = decoder.read_u16()?;\n\n let mut table = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n let start = decoder.read_u16()?;\n\n let length = decoder.read_u16()?;\n\n let name = decoder.read_u16()?;\n\n let signature = decoder.read_u16()?;\n\n let index = decoder.read_u16()?;\n\n table.push(LocalVariableType {\n\n start,\n\n length,\n\n name,\n\n signature,\n\n index,\n\n });\n\n }\n\n Ok(Attribute::LocalVariableTypeTable(table))\n\n}\n\n\n", "file_path": "src/basic/parser/method.rs", "rank": 18, "score": 127978.8982257484 }, { "content": "/// Reads the next few type annotations.\n\npub fn parse_type_annotations(decoder: &mut Decoder) -> Result<Vec<TypeAnnotation>> {\n\n let count = decoder.read_u16()?;\n\n let mut annotations = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n annotations.push(parse_type_annotation(decoder)?);\n\n }\n\n Ok(annotations)\n\n}\n\n\n", "file_path": "src/basic/parser/annotation.rs", "rank": 19, "score": 124981.52801066157 }, { "content": "/// Reads the next few parameter annotations.\n\npub fn parse_parameter_annotations(decoder: &mut Decoder) -> Result<Vec<Vec<Annotation>>> {\n\n let count = decoder.read_u8()?;\n\n let mut annotations = Vec::with_capacity(count as usize);\n\n for _ in 0..count {\n\n annotations.push(parse_annotations(decoder)?);\n\n }\n\n Ok(annotations)\n\n}\n\n\n", "file_path": "src/basic/parser/annotation.rs", "rank": 20, "score": 122302.93276675086 }, { "content": "/// Reads a single annotation.\n\nfn parse_annotation(decoder: &mut Decoder) -> Result<Annotation> {\n\n let type_index = decoder.read_u16()?;\n\n let count = decoder.read_u16()?;\n\n let mut element_value_pairs = Vec::with_capacity(count as usize);\n\n\n\n for _ in 0..count {\n\n let name_index = decoder.read_u16()?;\n\n element_value_pairs.push((name_index, parse_element_value(decoder)?));\n\n }\n\n\n\n Ok(Annotation {\n\n type_index,\n\n element_value_pairs,\n\n })\n\n}\n\n\n", "file_path": "src/basic/parser/annotation.rs", "rank": 21, "score": 105091.45397522062 }, { "content": "/// Parses the target type of a type annotation\n\nfn parse_target_type(decoder: &mut Decoder) -> Result<TargetType> {\n\n use self::TargetType::*;\n\n\n\n Ok(match decoder.read_u8()? {\n\n 0x00 => TypeParameterClass(decoder.read_u8()?),\n\n 0x01 => TypeParameterMethod(decoder.read_u8()?),\n\n 0x10 => SuperType(decoder.read_u16()?),\n\n 0x11 => TypeParameterBoundClass {\n\n type_parameter: decoder.read_u8()?,\n\n bound_index: decoder.read_u8()?,\n\n },\n\n 0x12 => TypeParameterBoundMethod {\n\n type_parameter: decoder.read_u8()?,\n\n bound_index: decoder.read_u8()?,\n\n },\n\n 0x13 => EmptyField,\n\n 0x14 => EmptyReturn,\n\n 0x15 => EmptyReceiver,\n\n 0x16 => FormalParameter(decoder.read_u8()?),\n\n 0x17 => Throws(decoder.read_u16()?),\n", "file_path": "src/basic/parser/annotation.rs", "rank": 22, "score": 100863.25318274314 }, { "content": "/// Parses a verification type.\n\nfn parse_verification_type(decoder: &mut Decoder) -> Result<VerificationType> {\n\n use self::VerificationType::*;\n\n\n\n let tag = decoder.read_u8()?;\n\n match tag {\n\n 0 => Ok(Top),\n\n 1 => Ok(Integer),\n\n 2 => Ok(Float),\n\n 3 => Ok(Double),\n\n 4 => Ok(Long),\n\n 5 => Ok(Null),\n\n 6 => Ok(UninitializedThis),\n\n 7 => Ok(Object(decoder.read_u16()?)),\n\n 8 => Ok(Uninitialized(decoder.read_u16()?)),\n\n\n\n _ => Err(Error::InvalidVerificationType(tag)),\n\n }\n\n}\n\n\n", "file_path": "src/basic/parser/method.rs", "rank": 23, "score": 100863.25318274314 }, { "content": "/// Parses a type annotation\n\nfn parse_type_annotation(decoder: &mut Decoder) -> Result<TypeAnnotation> {\n\n let target_type = parse_target_type(decoder)?;\n\n let target_path = parse_type_path(decoder)?;\n\n let annotation = parse_annotation(decoder)?;\n\n Ok(TypeAnnotation {\n\n target_type,\n\n target_path,\n\n annotation,\n\n })\n\n}\n\n\n", "file_path": "src/basic/parser/annotation.rs", "rank": 24, "score": 100863.25318274314 }, { "content": "/// Parses the local variables of a local variable target type\n\nfn parse_local_variable(decoder: &mut Decoder) -> Result<Vec<LocalVariableTarget>> {\n\n let length = decoder.read_u8()?;\n\n let mut table = Vec::with_capacity(length as usize);\n\n\n\n for _ in 0..length {\n\n let start = decoder.read_u16()?;\n\n let length = decoder.read_u16()?;\n\n let index = decoder.read_u16()?;\n\n table.push(LocalVariableTarget {\n\n start,\n\n length,\n\n index,\n\n })\n\n }\n\n\n\n Ok(table)\n\n}\n\n\n", "file_path": "src/basic/parser/annotation.rs", "rank": 25, "score": 94698.75000734832 }, { "content": "/// Parses the type path of a type annotation\n\nfn parse_type_path(decoder: &mut Decoder) -> Result<Vec<TypePathElement>> {\n\n let length = decoder.read_u8()?;\n\n let mut type_path = Vec::with_capacity(length as usize);\n\n\n\n for _ in 0..length {\n\n let path_kind = match decoder.read_u8()? {\n\n 0x00 => TypePathKind::ArrayType,\n\n 0x01 => TypePathKind::NestedType,\n\n 0x02 => TypePathKind::WildcardType,\n\n 0x03 => TypePathKind::Type,\n\n\n\n _ => return Err(Error::InvalidTypePath),\n\n };\n\n\n\n let argument_index = decoder.read_u8()?;\n\n type_path.push(TypePathElement {\n\n path_kind,\n\n argument_index,\n\n })\n\n }\n\n\n\n Ok(type_path)\n\n}\n", "file_path": "src/basic/parser/annotation.rs", "rank": 26, "score": 94698.75000734832 }, { "content": "/// Parses a single instruction and then returns its end\n\nfn parse_instruction(decoder: &mut Decoder, at: u32) -> Result<(u32, Instruction)> {\n\n use self::Instruction::*;\n\n\n\n let prev_cursor = decoder.cursor();\n\n let op_code = decoder.read_u8()?;\n\n\n\n let insn = match op_code {\n\n 0x00 => NOP,\n\n 0x01 => AConstNull,\n\n 0x02 => IConstM1,\n\n 0x03 => IConst0,\n\n 0x04 => IConst1,\n\n 0x05 => IConst2,\n\n 0x06 => IConst3,\n\n 0x07 => IConst4,\n\n 0x08 => IConst5,\n\n 0x09 => LConst0,\n\n 0x0A => LConst1,\n\n 0x0B => FConst0,\n\n 0x0C => FConst1,\n", "file_path": "src/basic/parser/code.rs", "rank": 27, "score": 94264.52493478214 }, { "content": "fn main() {\n\n // get the file path\n\n let file_path = env::args().nth(1).expect(\"no file path provided\");\n\n\n\n // first read the class file\n\n let mut file = File::open(file_path).expect(\"could not open file\");\n\n let mut buf = Vec::new();\n\n file.read_to_end(&mut buf).expect(\"could not read file\");\n\n\n\n // then parse it\n\n let (pool, class) = jbcrs::basic::parse(buf.as_ref()).expect(\"could not parse class file\");\n\n\n\n // now print it to stdout\n\n // the constant pool will be printed differently\n\n // to help the user get a constant pool item by index\n\n\n\n println!(\"Constant Pool:\");\n\n for (index, item) in pool.iter() {\n\n println!(\" #{}: {:?}\", index, item);\n\n }\n\n\n\n // Class derives Debug,\n\n // the output might not be the best (compared to javap or krakatau-disassemble)\n\n // but it should be enough.\n\n println!(\"{:#?}\", class);\n\n}\n", "file_path": "examples/basic_read_class.rs", "rank": 28, "score": 74393.73439893874 }, { "content": "use std::{io, result};\n\n\n\n#[derive(Debug, YadeError)]\n\npub enum Error {\n\n /// Just an I/O error\n\n IO(#[cause] io::Error),\n\n\n\n /// A modified utf-8 string could not be read\n\n InvalidUTF8,\n\n\n\n /// Decoder has come to the end of the file or the limit was exceeded\n\n LimitExceeded,\n\n\n\n /// Not a class file, the header does not equal 0xCAFEBABE\n\n NotAClass,\n\n\n\n /// Invalid constant pool entry\n\n InvalidCPItem(u16),\n\n\n\n /// The constant pool cannot be larger than `u16::max_value()`\n", "file_path": "src/result.rs", "rank": 29, "score": 31190.020854324932 }, { "content": " CPTooLarge,\n\n\n\n /// Not a valid descriptor\n\n InvalidDescriptor { desc: String, at: usize },\n\n\n\n /// Invalid instruction, (e.g. unknown op code)\n\n InvalidInstruction { op_code: u8, at: u32 },\n\n\n\n /// Reserved (invalid) stack map frame\n\n ReservedStackMapFrame(u8),\n\n\n\n /// Invalid verification type in stack map table\n\n InvalidVerificationType(u8),\n\n\n\n /// Invalid element value of annotation, where the u8 is the tag\n\n InvalidElementValue(u8),\n\n\n\n /// Invalid target type of annotation\n\n InvalidTargetType,\n\n\n\n /// Invalid type path element kind of annotation\n\n InvalidTypePath,\n\n}\n\n\n\npub type Result<T> = result::Result<T, Error>;\n", "file_path": "src/result.rs", "rank": 30, "score": 31183.5280688553 }, { "content": " let mut provides = Vec::with_capacity(provides_count as usize);\n\n for _ in 0..provides_count {\n\n let index = decoder.read_u16()?;\n\n\n\n let with_count = decoder.read_u16()?;\n\n let mut with = Vec::with_capacity(with_count as usize);\n\n for _ in 0..with_count {\n\n with.push(decoder.read_u16()?);\n\n }\n\n\n\n provides.push(Provider { index, with });\n\n }\n\n\n\n Ok(Attribute::Module {\n\n name,\n\n flags,\n\n version,\n\n requires,\n\n exports,\n\n opens,\n\n uses,\n\n provides,\n\n })\n\n}\n", "file_path": "src/basic/parser/class.rs", "rank": 31, "score": 28801.023889366217 }, { "content": " let flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n\n\n let to_count = decoder.read_u16()?;\n\n let mut to = Vec::with_capacity(to_count as usize);\n\n for _ in 0..to_count {\n\n to.push(decoder.read_u16()?);\n\n }\n\n\n\n opens.push(Opening { index, flags, to });\n\n }\n\n\n\n // read uses\n\n let uses_count = decoder.read_u16()?;\n\n let mut uses = Vec::with_capacity(uses_count as usize);\n\n for _ in 0..uses_count {\n\n uses.push(decoder.read_u16()?);\n\n }\n\n\n\n // read provides\n\n let provides_count = decoder.read_u16()?;\n", "file_path": "src/basic/parser/class.rs", "rank": 32, "score": 28799.818921160277 }, { "content": " let exports_count = decoder.read_u16()?;\n\n let mut exports = Vec::with_capacity(exports_count as usize);\n\n for _ in 0..exports_count {\n\n let index = decoder.read_u16()?;\n\n let flags = AccessFlags::from_bits_truncate(decoder.read_u16()?);\n\n\n\n let to_count = decoder.read_u16()?;\n\n let mut to = Vec::with_capacity(to_count as usize);\n\n for _ in 0..to_count {\n\n to.push(decoder.read_u16()?);\n\n }\n\n\n\n exports.push(Export { index, flags, to });\n\n }\n\n\n\n // read opens\n\n let opens_count = decoder.read_u16()?;\n\n let mut opens = Vec::with_capacity(opens_count as usize);\n\n for _ in 0..opens_count {\n\n let index = decoder.read_u16()?;\n", "file_path": "src/basic/parser/class.rs", "rank": 33, "score": 28798.312989196675 }, { "content": "use super::*;\n\nuse self::decode::Decoder;\n\n\n\n/// Parses the `BootstrapMethods` attribute\n", "file_path": "src/basic/parser/class.rs", "rank": 34, "score": 28790.51111724718 }, { "content": "//! Reads and prints the content of a class file to STDOUT.\n\nextern crate jbcrs;\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::env;\n\n\n", "file_path": "examples/basic_read_class.rs", "rank": 35, "score": 28790.4616127854 }, { "content": " /// The index to the constant pool\n\n /// which must be an `Item::Double(_)`.\n\n Double(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::Integer(_)`.\n\n /// Yes, it really needs a constant pool entry for this.\n\n Boolean(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::UTF8(_)`.\n\n String(u16),\n\n /// An enum constant.\n\n Enum {\n\n /// The index to the constant pool,\n\n /// which must be an `Item::UTF8(_)`.\n\n /// It results in the internal form of the binary name\n\n /// of the type of this enum constant.\n\n type_name: u16,\n\n /// The index to the constant pool,\n\n /// which must be an `Item::UTF8(_)`.\n\n /// It results in the simple name\n", "file_path": "src/basic/tree.rs", "rank": 49, "score": 20.137694072852188 }, { "content": " /// Length of the Code.\n\n pub length: u16,\n\n /// The index to an `Item::UTF8(_)` representing a valid unqalified name.\n\n pub name: u16,\n\n /// The index to an `Item::UTF8(_)` representing a field signature.\n\n pub signature: u16,\n\n /// The index in the local variable array of the current frame.\n\n /// double and long do occupy two spaces.\n\n pub index: u16,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MethodParameter {\n\n pub name: u16,\n\n pub access_flags: AccessFlags,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Requirement {\n\n pub index: u16,\n", "file_path": "src/basic/tree.rs", "rank": 50, "score": 20.051734804821844 }, { "content": "#[derive(Debug)]\n\npub struct LocalVariable {\n\n /// Start of the Code.\n\n pub start: u16,\n\n /// Length of the Code.\n\n pub length: u16,\n\n /// The index to an `Item::UTF8(_)` representing a valid unqalified name.\n\n pub name: u16,\n\n /// The index to an `Item::UTF8(_)` representing a field/type descriptor.\n\n pub descriptor: u16,\n\n /// The index in the local variable array of the current frame.\n\n /// double and long do occupy two spaces.\n\n pub index: u16,\n\n}\n\n\n\n/// An entry of the `LocalVariableTypeTable`\n\n#[derive(Debug)]\n\npub struct LocalVariableType {\n\n /// Start of the Code.\n\n pub start: u16,\n", "file_path": "src/basic/tree.rs", "rank": 52, "score": 18.51118310253915 }, { "content": " /// on the type of a new expression.\n\n TypeArgumentConstructor { offset: u16, type_argument: u8 },\n\n /// Indicates that an annotation is present\n\n /// on the type of a ::new expression.\n\n TypeArgumentNewRef { offset: u16, type_argument: u8 },\n\n /// Indicates that an annotation is present\n\n /// on the type of a ::name expression.\n\n TypeArgumentRef { offset: u16, type_argument: u8 },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TypePathElement {\n\n pub path_kind: TypePathKind,\n\n pub argument_index: u8,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum TypePathKind {\n\n /// Annotation is deeper in an array kind\n\n ArrayType,\n", "file_path": "src/basic/tree.rs", "rank": 53, "score": 18.459599671685528 }, { "content": " let name_and_type = decoder.read_u16()?;\n\n\n\n Item::InterfaceMethodRef {\n\n class,\n\n name_and_type,\n\n }\n\n }\n\n 12 => {\n\n let name = decoder.read_u16()?;\n\n let desc = decoder.read_u16()?;\n\n\n\n Item::NameAndType { name, desc }\n\n }\n\n 15 => {\n\n let kind = match decoder.read_u8()? {\n\n 1 => ReferenceKind::GetField,\n\n 2 => ReferenceKind::GetStatic,\n\n 3 => ReferenceKind::PutField,\n\n 4 => ReferenceKind::PutStatic,\n\n 5 => ReferenceKind::InvokeVirtual,\n", "file_path": "src/basic/parser/mod.rs", "rank": 54, "score": 18.2057498346311 }, { "content": "#[derive(Debug)]\n\npub enum ElementValue {\n\n /// The index to the constant pool\n\n /// which must be an `Item::Integer(_)`.\n\n Byte(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::Integer(_)`.\n\n Short(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::Integer(_)`.\n\n Char(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::Integer(_)`.\n\n Int(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::Long(_)`.\n\n Long(u16),\n\n /// The index to the constant pool\n\n /// which must be an `Item::Float(_)`.\n\n Float(u16),\n", "file_path": "src/basic/tree.rs", "rank": 59, "score": 17.319400216867457 }, { "content": "//! The `decode` crate is used for decoding simple data,\n\n//! like integers, floats and Strings.\n\n\n\nuse result::*;\n\nuse byteorder::{BigEndian, ByteOrder};\n\nuse std::char;\n\n\n\npub struct Decoder<'a> {\n\n bytes: &'a [u8],\n\n cursor: &'a mut usize,\n\n limit: usize,\n\n}\n\n\n\nimpl<'a> Decoder<'a> {\n\n /// Creates a new decoder,\n\n /// the cursor has to be a mutable pointer to support limits without copying\n\n pub fn new(bytes: &'a [u8], cursor: &'a mut usize) -> Decoder<'a> {\n\n Decoder {\n\n bytes,\n\n cursor,\n", "file_path": "src/basic/parser/decode.rs", "rank": 61, "score": 16.194249499275095 }, { "content": " 6 => ReferenceKind::InvokeStatic,\n\n 7 => ReferenceKind::InvokeSpecial,\n\n 8 => ReferenceKind::NewInvokeSpecial,\n\n 9 => ReferenceKind::InvokeInterface,\n\n\n\n _ => return Err(Error::InvalidCPItem(index)),\n\n };\n\n let index = decoder.read_u16()?;\n\n\n\n Item::MethodHandle { kind, index }\n\n }\n\n 16 => Item::MethodType(decoder.read_u16()?),\n\n 18 => {\n\n let bootstrap_method_attribute = decoder.read_u16()?;\n\n let name_and_type = decoder.read_u16()?;\n\n\n\n Item::InvokeDynamic {\n\n bootstrap_method_attribute,\n\n name_and_type,\n\n }\n", "file_path": "src/basic/parser/mod.rs", "rank": 62, "score": 15.96264671176884 }, { "content": " /// of this enum constant.\n\n const_name: u16,\n\n },\n\n /// A class literal.\n\n /// The index to the constant pool\n\n /// which must be an `Item::UTF8(_)`\n\n /// representing a return descriptor.\n\n Class(u16),\n\n /// Another annotation.\n\n Annotation(Box<Annotation>),\n\n /// Multiple `ElementValue`s\n\n Array(Vec<ElementValue>),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TypeAnnotation {\n\n pub target_type: TargetType,\n\n pub target_path: Vec<TypePathElement>,\n\n pub annotation: Annotation,\n\n}\n", "file_path": "src/basic/tree.rs", "rank": 64, "score": 15.640142463447537 }, { "content": "//! The tree package provides the basic structure of a basic class file\n\n\n\nuse std::collections::{BTreeMap, HashMap};\n\n\n\n/// A java class file.\n\n#[derive(Debug)]\n\npub struct Class {\n\n pub minor_version: u16,\n\n pub major_version: u16,\n\n\n\n pub access_flags: AccessFlags,\n\n pub name: u16,\n\n pub super_name: u16,\n\n pub interfaces: Vec<u16>,\n\n\n\n pub fields: Vec<Field>,\n\n pub methods: Vec<Method>,\n\n\n\n pub attributes: Vec<Attribute>,\n\n}\n", "file_path": "src/basic/tree.rs", "rank": 65, "score": 15.56058539904071 }, { "content": " Double,\n\n Long,\n\n Null,\n\n UninitializedThis,\n\n Object(u16),\n\n Uninitialized(u16),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Annotation {\n\n /// Must be an index to the constant pool with an `Item::UTF8(_)`\n\n /// representing a field descriptor\n\n pub type_index: u16,\n\n /// The value every single pair holds.\n\n /// The first part is an index to the constant pool,\n\n /// which must be an `Item::UTF8(_)`.\n\n /// The second one is the value itself.\n\n pub element_value_pairs: Vec<(u16, ElementValue)>,\n\n}\n\n\n", "file_path": "src/basic/tree.rs", "rank": 67, "score": 15.473154325062993 }, { "content": "\n\n/// A field.\n\n#[derive(Debug)]\n\npub struct Field {\n\n pub access_flags: AccessFlags,\n\n pub name: u16,\n\n pub desc: u16,\n\n pub attributes: Vec<Attribute>,\n\n}\n\n\n\n/// A method.\n\n#[derive(Debug)]\n\npub struct Method {\n\n pub access_flags: AccessFlags,\n\n pub name: u16,\n\n pub desc: u16,\n\n pub attributes: Vec<Attribute>,\n\n}\n\n\n\n/// An Attribute.\n", "file_path": "src/basic/tree.rs", "rank": 68, "score": 15.294251395962988 }, { "content": " Long,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct InnerClass {\n\n pub inner_class_info: u16,\n\n pub outer_class_info: u16,\n\n pub inner_name: u16,\n\n pub inner_class_access_flags: AccessFlags,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum StackMapFrame {\n\n Same {\n\n offset_delta: u16,\n\n },\n\n Same1 {\n\n offset_delta: u16,\n\n stack: VerificationType,\n\n },\n", "file_path": "src/basic/tree.rs", "rank": 71, "score": 14.912186829447286 }, { "content": " pub fn read_f32(&mut self) -> Result<f32> {\n\n Ok(f32::from_bits(self.read_u32()?))\n\n }\n\n\n\n pub fn read_f64(&mut self) -> Result<f64> {\n\n Ok(f64::from_bits(self.read_u64()?))\n\n }\n\n\n\n /// Decodes a modified UTF-8 string.\n\n /// Length is the amount of bytes the String was encoded in.\n\n /// The length used here may differ from the count of all chars.\n\n pub fn read_str(&mut self, length: usize) -> Result<String> {\n\n let mut out = String::with_capacity(length);\n\n\n\n let mut i = length;\n\n while i > 0 {\n\n // read first byte\n\n let r1 = u32::from(self.read_u8()?);\n\n let ch = if r1 != 0 && r1 < 0x80 {\n\n // single byte\n", "file_path": "src/basic/parser/decode.rs", "rank": 72, "score": 14.603748288063573 }, { "content": " // There is no need to have a shorter name than this\n\n // usize.max(usize) is used to prevent panics\n\n let mut name = String::with_capacity((desc.len() - dimensions as usize).max(2) - 2);\n\n\n\n // now read the name of the reference\n\n loop {\n\n let ch = match chars.next() {\n\n Some(ch) => ch,\n\n None => err!(),\n\n };\n\n i += 1;\n\n if ch == ';' {\n\n // a class name cannot be empty, nor can any chars follow the descriptor\n\n if name.is_empty() || chars.count() != 0 {\n\n err!();\n\n }\n\n\n\n return Ok(TypeDescriptor::new(dimensions, Type::Reference(name)));\n\n } else {\n\n name.push(ch);\n", "file_path": "src/types.rs", "rank": 73, "score": 14.225118399424886 }, { "content": "/// a single type (parameter or return type) of a method,\n\n/// or an element value of an annotation.\n\n/// In the JVM Specification `FieldDescriptor` is used as a name.\n\n/// Maybe using that one would be better, but I am too lazy to refactor now.\n\n#[derive(Eq, PartialEq, Debug)]\n\npub struct TypeDescriptor {\n\n /// The dimensions of the type\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use jbcrs::TypeDescriptor;\n\n /// let desc: TypeDescriptor = \"[[I\".parse().unwrap();\n\n /// assert_eq!(desc.dimensions, 2);\n\n /// ```\n\n pub dimensions: u8,\n\n\n\n /// The base type\n\n ///\n\n /// # Examples\n", "file_path": "src/types.rs", "rank": 74, "score": 14.21996107277063 }, { "content": " // Read interfaces\n\n let interface_count = decoder.read_u16()?;\n\n let mut interfaces = Vec::with_capacity(interface_count as usize);\n\n for _ in 0..interface_count {\n\n interfaces.push(decoder.read_u16()?);\n\n }\n\n\n\n let fields = parse_fields(&mut decoder, &constant_pool)?;\n\n let methods = parse_methods(&mut decoder, &constant_pool)?;\n\n let attributes = parse_attributes(&mut decoder, &constant_pool)?;\n\n\n\n let class = Class {\n\n minor_version,\n\n major_version,\n\n\n\n access_flags,\n\n name,\n\n super_name,\n\n\n\n interfaces,\n\n\n\n fields,\n\n methods,\n\n attributes,\n\n };\n\n\n\n Ok((constant_pool, class))\n\n}\n\n\n", "file_path": "src/basic/parser/mod.rs", "rank": 75, "score": 14.163892220398177 }, { "content": " let index = attr_decoder.read_u16()?;\n\n Attribute::SourceFile(index)\n\n }\n\n \"Signature\" => {\n\n let index = attr_decoder.read_u16()?;\n\n Attribute::Signature(index)\n\n }\n\n \"StackMapTable\" => parse_stack_map_table(&mut attr_decoder)?,\n\n \"Synthetic\" => Attribute::Synthetic,\n\n \"SourceDebugExtension\" => {\n\n Attribute::SourceDebugExtension(attr_decoder.read_str(length as usize)?)\n\n }\n\n\n\n _ => {\n\n let bytes = attr_decoder.read_bytes(length as usize)?;\n\n Attribute::Unknown(name_index, bytes.to_vec())\n\n }\n\n };\n\n attributes.push(attribute);\n\n\n\n // go on\n\n attr_decoder.remove_limit()?;\n\n }\n\n\n\n Ok(attributes)\n\n}\n", "file_path": "src/basic/parser/mod.rs", "rank": 76, "score": 14.136575053854656 }, { "content": " pub flags: AccessFlags,\n\n pub version: u16,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Export {\n\n pub index: u16,\n\n pub flags: AccessFlags,\n\n pub to: Vec<u16>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Opening {\n\n pub index: u16,\n\n pub flags: AccessFlags,\n\n pub to: Vec<u16>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Provider {\n\n pub index: u16,\n\n pub with: Vec<u16>,\n\n}\n", "file_path": "src/basic/tree.rs", "rank": 77, "score": 13.955651161204093 }, { "content": " 9 => {\n\n let class = decoder.read_u16()?;\n\n let name_and_type = decoder.read_u16()?;\n\n\n\n Item::FieldRef {\n\n class,\n\n name_and_type,\n\n }\n\n }\n\n 10 => {\n\n let class = decoder.read_u16()?;\n\n let name_and_type = decoder.read_u16()?;\n\n\n\n Item::MethodRef {\n\n class,\n\n name_and_type,\n\n }\n\n }\n\n 11 => {\n\n let class = decoder.read_u16()?;\n", "file_path": "src/basic/parser/mod.rs", "rank": 78, "score": 13.325265904773266 }, { "content": " }\n\n 19 => Item::Module(decoder.read_u16()?),\n\n 20 => Item::Package(decoder.read_u16()?),\n\n\n\n _ => return Err(Error::InvalidCPItem(index)),\n\n };\n\n\n\n pool.push(item)?;\n\n index += 1;\n\n }\n\n\n\n Ok(pool)\n\n}\n\n\n", "file_path": "src/basic/parser/mod.rs", "rank": 79, "score": 13.308633202021806 }, { "content": " pub end: u16,\n\n pub handler: u16,\n\n pub catch_type: u16,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BootstrapMethod {\n\n pub method_ref: u16,\n\n pub arguments: Vec<u16>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LineNumber {\n\n pub start: u16,\n\n pub line_number: u16,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Instruction {\n\n /// No operation\n", "file_path": "src/basic/tree.rs", "rank": 80, "score": 13.064852975851162 }, { "content": " Err(Error::LimitExceeded)\n\n }\n\n }\n\n\n\n /// Skips a certain amount of bytes and returns an error if it exceeded the limit\n\n pub fn skip(&mut self, to: usize) -> Result<()> {\n\n let end = *self.cursor + to;\n\n self.check(end)?;\n\n *self.cursor = end;\n\n Ok(())\n\n }\n\n\n\n /// Returns the current cursor\n\n pub fn cursor(&self) -> usize {\n\n *self.cursor\n\n }\n\n\n\n /// Reads a specific amount of bytes.\n\n /// If not enough bytes are available, an EOF error is returned.\n\n pub fn read_bytes(&mut self, count: usize) -> Result<&'a [u8]> {\n", "file_path": "src/basic/parser/decode.rs", "rank": 81, "score": 12.811525875884685 }, { "content": " // now read the name of the reference\n\n for ch in &mut chars {\n\n i += 1;\n\n if ch == ';' {\n\n if name.is_empty() {\n\n err!();\n\n }\n\n\n\n let parsed_desc = TypeDescriptor::new(dimensions, Type::Reference(name));\n\n if state == 0 {\n\n params.push(parsed_desc);\n\n continue 'type_loop;\n\n } else {\n\n ret = Some(parsed_desc);\n\n break 'type_loop;\n\n }\n\n } else {\n\n name.push(ch);\n\n }\n\n }\n", "file_path": "src/types.rs", "rank": 82, "score": 12.672038481462748 }, { "content": "#[derive(Debug)]\n\npub enum Attribute {\n\n AnnotationDefault(ElementValue),\n\n BootstrapMethods(Vec<BootstrapMethod>),\n\n Code {\n\n max_stack: u16,\n\n max_locals: u16,\n\n instructions: HashMap<u32, Instruction>,\n\n exceptions: Vec<Exception>,\n\n attributes: Vec<Attribute>,\n\n },\n\n ConstantValue(u16),\n\n Deprecated,\n\n EnclosingMethod {\n\n class_index: u16,\n\n method_index: u16,\n\n },\n\n Exceptions(Vec<u16>),\n\n InnerClasses(Vec<InnerClass>),\n\n LineNumberTable(Vec<LineNumber>),\n", "file_path": "src/basic/tree.rs", "rank": 83, "score": 12.431974751041333 }, { "content": "# Basic\n\nWe want to parse a class from a byte array\n\nand print its version, access flags and name.\n\nOf course you could use std::fs::File or a zip library,\n\nbut showing this is not the purpose of this tutorial.\n\n\n\n```rust\n\nuse jbcrs::basic;\n\n\n\n// You got the bytes from any possible source.\n\nlet bytes: &[u8] = [0xCA, 0xFE, 0xBA, 0xBE];\n\n\n\n// After parsing the class file,\n\n// you will get the constant pool\n\n// and the class itself.\n\n// You don't have to annotate the types here.\n\nlet (constant_pool, class): (basic::Pool, basic::Class) = basic::parse(bytes)\n\n .expect(\"could not parse class file\");\n\n\n\n// Print its major and minor version:\n\nprintln!(\"version: {}.{}\", class.major_version, class.minor_version);\n\n\n\n// Access Flags can be printed human readable\n\nprintln!(\"access: {}\", class.access_flags);\n\n\n\n// Printing the name requires us to use the constant pool.\n\nprintln!(\"name: {}\", constant_pool.get_class_name(class.name).expect(\"could not get class name\"));\n\n```\n\n\n\n---\n\n# Resources\n\n[Java Virtual Machine Specification (Java SE 9)](https://docs.oracle.com/javase/specs/jvms/se9/jvms9.pdf)\n", "file_path": "README.md", "rank": 84, "score": 12.429521314364383 }, { "content": " /// Annotation is deeper in a nested type\n\n NestedType,\n\n /// Annotation is on the bound of a wildcard type argument of a parameterized type\n\n WildcardType,\n\n /// Annotation is on a type argument of a parameterized type\n\n Type,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LocalVariableTarget {\n\n /// Start of the Code.\n\n pub start: u16,\n\n /// Length of the Code.\n\n pub length: u16,\n\n /// The index in the local variable array of the current frame.\n\n /// double and long do occupy two spaces.\n\n pub index: u16,\n\n}\n\n\n\n/// An entry of the `LocalVariableTable`\n", "file_path": "src/basic/tree.rs", "rank": 85, "score": 12.388528258236615 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n // dimensions\n\n f.write_str(&\"[\".repeat(self.dimensions as usize))?;\n\n\n\n // base type\n\n match self.base_type {\n\n Type::Boolean => f.write_char('Z'),\n\n Type::Byte => f.write_char('B'),\n\n Type::Short => f.write_char('S'),\n\n Type::Int => f.write_char('I'),\n\n Type::Long => f.write_char('J'),\n\n Type::Float => f.write_char('F'),\n\n Type::Double => f.write_char('D'),\n\n Type::Char => f.write_char('C'),\n\n Type::Reference(ref name) => write!(f, \"L{};\", name),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Eq, PartialEq, Debug)]\n", "file_path": "src/types.rs", "rank": 86, "score": 12.212385478778874 }, { "content": "use result::*;\n\n\n\nuse std::str::FromStr;\n\nuse std::fmt::{self, Write};\n\n\n\n/// All types present in a type descriptor.\n\n#[derive(Eq, PartialEq, Debug)]\n\npub enum Type {\n\n Boolean,\n\n Byte,\n\n Short,\n\n Int,\n\n Long,\n\n Float,\n\n Double,\n\n Char,\n\n Reference(String),\n\n}\n\n\n\n/// A `TypeDescriptor` is either a field descriptor,\n", "file_path": "src/types.rs", "rank": 87, "score": 11.995544244860742 }, { "content": " let end = *self.cursor + count;\n\n self.check(end)?;\n\n\n\n let bytes = &self.bytes[*self.cursor..end];\n\n *self.cursor = end;\n\n Ok(bytes)\n\n }\n\n\n\n pub fn read_u8(&mut self) -> Result<u8> {\n\n Ok(self.read_bytes(1)?[0])\n\n }\n\n\n\n pub fn read_u16(&mut self) -> Result<u16> {\n\n Ok(BigEndian::read_u16(self.read_bytes(2)?))\n\n }\n\n\n\n pub fn read_u32(&mut self) -> Result<u32> {\n\n Ok(BigEndian::read_u32(self.read_bytes(4)?))\n\n }\n\n\n", "file_path": "src/basic/parser/decode.rs", "rank": 88, "score": 11.966396374334945 }, { "content": " ///\n\n /// ```\n\n /// use jbcrs::{Type, TypeDescriptor};\n\n ///\n\n /// let short_desc: TypeDescriptor = \"S\".parse().unwrap();\n\n /// assert_eq!(short_desc.base_type, Type::Short);\n\n ///\n\n /// let string_desc: TypeDescriptor = \"[Ljava/lang/String;\".parse().unwrap();\n\n /// assert_eq!(\n\n /// string_desc.base_type,\n\n /// Type::Reference(\"java/lang/String\".to_owned())\n\n /// );\n\n /// ```\n\n pub base_type: Type,\n\n}\n\n\n\nimpl TypeDescriptor {\n\n pub fn new(dimensions: u8, base_type: Type) -> TypeDescriptor {\n\n TypeDescriptor {\n\n dimensions,\n", "file_path": "src/types.rs", "rank": 89, "score": 11.832504346606958 }, { "content": " pub fn read_u64(&mut self) -> Result<u64> {\n\n Ok(BigEndian::read_u64(self.read_bytes(8)?))\n\n }\n\n\n\n pub fn read_i8(&mut self) -> Result<i8> {\n\n Ok(self.read_u8()? as i8)\n\n }\n\n\n\n pub fn read_i16(&mut self) -> Result<i16> {\n\n Ok(self.read_u16()? as i16)\n\n }\n\n\n\n pub fn read_i32(&mut self) -> Result<i32> {\n\n Ok(self.read_u32()? as i32)\n\n }\n\n\n\n pub fn read_i64(&mut self) -> Result<i64> {\n\n Ok(self.read_u64()? as i64)\n\n }\n\n\n", "file_path": "src/basic/parser/decode.rs", "rank": 91, "score": 11.728539973602494 }, { "content": " 'F' => Type::Float,\n\n 'D' => Type::Double,\n\n 'C' => Type::Char,\n\n 'L' => break, // read the name of the reference\n\n _ => err!(), // unknown type tag\n\n },\n\n );\n\n\n\n if state == 0 {\n\n params.push(parsed_desc);\n\n continue 'type_loop;\n\n } else {\n\n ret = Some(parsed_desc);\n\n break 'type_loop;\n\n }\n\n }\n\n }\n\n\n\n let mut name = String::new();\n\n\n", "file_path": "src/types.rs", "rank": 92, "score": 11.689389308246552 }, { "content": " ///\n\n /// let desc: MethodDescriptor = \"()I\".parse().unwrap();\n\n /// assert_eq!(Type::Int, desc.return_type.unwrap().base_type);\n\n /// ```\n\n pub return_type: Option<TypeDescriptor>,\n\n}\n\n\n\nimpl MethodDescriptor {\n\n pub fn new(\n\n params: Vec<TypeDescriptor>,\n\n return_type: Option<TypeDescriptor>,\n\n ) -> MethodDescriptor {\n\n MethodDescriptor {\n\n params,\n\n return_type,\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for MethodDescriptor {\n", "file_path": "src/types.rs", "rank": 94, "score": 11.375210833241097 }, { "content": " /// ],\n\n /// return_type: None,\n\n /// });\n\n /// ```\n\n fn from_str(desc: &str) -> Result<MethodDescriptor> {\n\n let mut chars = desc.chars();\n\n let mut i = 0;\n\n\n\n // avoid code duplication\n\n macro_rules! err {\n\n () => {{\n\n return Err(Error::InvalidDescriptor {\n\n desc: desc.to_owned(),\n\n at: i,\n\n });\n\n }}\n\n }\n\n\n\n match chars.next() {\n\n Some('(') => {}\n", "file_path": "src/types.rs", "rank": 95, "score": 11.00175881949521 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for TypeDescriptor {\n\n /// Formats this descriptor\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use jbcrs::{Type, TypeDescriptor};\n\n ///\n\n /// let mut desc: TypeDescriptor = \"[[Ljava/lang/String;\".parse().unwrap();\n\n /// desc.base_type = Type::Float;\n\n /// assert_eq!(\"[[F\", desc.to_string());\n\n ///\n\n /// desc.base_type = Type::Reference(\"java/lang/Float\".to_owned());\n\n /// assert_eq!(\"[[Ljava/lang/Float;\", desc.to_string());\n\n /// ```\n", "file_path": "src/types.rs", "rank": 96, "score": 10.975058416987633 }, { "content": "mod decode;\n\nmod class;\n\nmod method;\n\nmod code;\n\nmod annotation;\n\n\n\npub use super::constpool::*;\n\npub use super::tree::*;\n\npub use result::*;\n\n\n\nuse self::class::*;\n\nuse self::method::*;\n\nuse self::code::*;\n\nuse self::annotation::*;\n\nuse self::decode::Decoder;\n\n\n\n/// The first 4 bytes of every java class file\n\nconst MAGIC: &[u8] = &[0xCA, 0xFE, 0xBA, 0xBE];\n\n\n\n/// Parses the class file, which is represented as a byte array.\n\n/// The constant pool and the class is returned, if no error occurred.\n", "file_path": "src/basic/parser/mod.rs", "rank": 97, "score": 10.83213902798632 }, { "content": " /// let mut desc: MethodDescriptor = \"(Ljava/lang/String;)I\".parse().unwrap();\n\n /// desc.return_type.as_mut().unwrap().base_type = Type::Long;\n\n /// assert_eq!(\"(Ljava/lang/String;)J\", desc.to_string());\n\n ///\n\n /// desc.params[0].base_type = Type::Reference(\"java/lang/Double\".to_owned());\n\n /// assert_eq!(\"(Ljava/lang/Double;)J\", desc.to_string());\n\n /// ```\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_char('(')?;\n\n for param in &self.params {\n\n param.fmt(f)?;\n\n }\n\n f.write_char(')')?;\n\n if let Some(ref ret) = self.return_type {\n\n ret.fmt(f)\n\n } else {\n\n f.write_char('V')\n\n }\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 98, "score": 10.783382243268042 }, { "content": "//! The basic module provides basic read and write capabilities.\n\n\n\nmod constpool;\n\nmod parser;\n\nmod tree;\n\n\n\npub use self::constpool::*;\n\npub use self::parser::*;\n\npub use self::tree::*;\n", "file_path": "src/basic/mod.rs", "rank": 99, "score": 10.560113686291391 } ]
Rust
tests/builder.rs
denzp/rust-ptx-builder
4b5bb6845674417d04f3cba48ba0cda6f5c4772a
use std::env; use std::env::current_dir; use std::fs::{remove_dir_all, File}; use std::io::prelude::*; use std::path::PathBuf; use antidote::Mutex; use lazy_static::*; use ptx_builder::error::*; use ptx_builder::prelude::*; lazy_static! { static ref ENV_MUTEX: Mutex<()> = Mutex::new(()); } #[test] fn should_provide_output_path() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { assert!(output.get_assembly_path().starts_with( env::temp_dir() .join("ptx-builder-0.5") .join("sample_ptx_crate"), )); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_write_assembly() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_build_application_crate() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/app-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_build_mixed_crate_lib() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/mixed-crate").unwrap(); match builder .set_crate_type(CrateType::Library) .disable_colors() .build() .unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); println!("{}", output.get_assembly_path().display()); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_build_mixed_crate_bin() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/mixed-crate").unwrap(); match builder .set_crate_type(CrateType::Binary) .disable_colors() .build() .unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); println!("{}", output.get_assembly_path().display()); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_handle_rebuild_without_changes() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = { Builder::new("tests/fixtures/app-crate") .unwrap() .disable_colors() }; builder.build().unwrap(); match builder.build().unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_write_assembly_in_debug_mode() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder .set_profile(Profile::Debug) .disable_colors() .build() .unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("debug")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_report_about_build_failure() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/faulty-crate") .unwrap() .disable_colors(); let output = builder.build(); let crate_absoulte_path = current_dir() .unwrap() .join("tests") .join("fixtures") .join("faulty-crate"); let lib_path = PathBuf::from("src").join("lib.rs"); let crate_absoulte_path_str = crate_absoulte_path.display().to_string(); match output.unwrap_err().kind() { BuildErrorKind::BuildFailed(diagnostics) => { assert_eq!( diagnostics .into_iter() .filter(|item| !item.contains("Blocking waiting") && !item.contains("Compiling core") && !item.contains("Compiling compiler_builtins") && !item.contains("Finished release [optimized] target(s)")) .collect::<Vec<_>>(), &[ format!( " Compiling faulty-ptx_crate v0.1.0 ({})", crate_absoulte_path_str ), String::from("error[E0425]: cannot find function `external_fn` in this scope"), format!(" --> {}:6:20", lib_path.display()), String::from(" |"), String::from("6 | *y.offset(0) = external_fn(*x.offset(0)) * a;"), String::from(" | ^^^^^^^^^^^ not found in this scope"), String::from(""), String::from("error: aborting due to previous error"), String::from(""), String::from( "For more information about this error, try `rustc --explain E0425`.", ), String::from("error: could not compile `faulty-ptx_crate`."), String::from(""), ] ); } _ => unreachable!("it should fail with proper error"), } } #[test] fn should_provide_crate_source_files() { let _lock = ENV_MUTEX.lock(); let crate_path = { current_dir() .unwrap() .join("tests") .join("fixtures") .join("sample-crate") }; let builder = Builder::new(&crate_path.display().to_string()).unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut sources = output.dependencies().unwrap(); let mut expectations = vec![ crate_path.join("src").join("lib.rs"), crate_path.join("src").join("mod1.rs"), crate_path.join("src").join("mod2.rs"), crate_path.join("Cargo.toml"), crate_path.join("Cargo.lock"), ]; sources.sort(); expectations.sort(); assert_eq!(sources, expectations); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_provide_application_crate_source_files() { let _lock = ENV_MUTEX.lock(); let crate_path = { current_dir() .unwrap() .join("tests") .join("fixtures") .join("app-crate") }; let builder = Builder::new(&crate_path.display().to_string()).unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut sources = output.dependencies().unwrap(); let mut expectations = vec![ crate_path.join("src").join("main.rs"), crate_path.join("src").join("mod1.rs"), crate_path.join("src").join("mod2.rs"), crate_path.join("Cargo.toml"), crate_path.join("Cargo.lock"), ]; sources.sort(); expectations.sort(); assert_eq!(sources, expectations); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_not_get_built_from_rls() { let _lock = ENV_MUTEX.lock(); env::set_var("CARGO", "some/path/to/rls"); assert_eq!(Builder::is_build_needed(), false); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::NotNeeded => {} BuildStatus::Success(_) => unreachable!(), } env::set_var("CARGO", ""); } #[test] fn should_not_get_built_recursively() { let _lock = ENV_MUTEX.lock(); env::set_var("PTX_CRATE_BUILDING", "1"); assert_eq!(Builder::is_build_needed(), false); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::NotNeeded => {} BuildStatus::Success(_) => unreachable!(), } env::set_var("PTX_CRATE_BUILDING", ""); } fn cleanup_temp_location() { let crate_names = &[ "faulty_ptx_crate", "sample_app_ptx_crate", "sample_ptx_crate", "mixed_crate", ]; for name in crate_names { remove_dir_all(env::temp_dir().join("ptx-builder-0.5").join(name)).unwrap_or_default(); } }
use std::env; use std::env::current_dir; use std::fs::{remove_dir_all, File}; use std::io::prelude::*; use std::path::PathBuf; use antidote::Mutex; use lazy_static::*; use ptx_builder::error::*; use ptx_builder::prelude::*; lazy_static! { static ref ENV_MUTEX: Mutex<()> = Mutex::new(()); } #[test] fn should_provide_output_path() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { assert!(output.get_assembly_path().starts_with( env::temp_dir() .join("ptx-builder-0.5") .join("sample_ptx_crate"), )); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_write_assembly() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_build_application_crate() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/app-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_build_mixed_crate_lib() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/mixed-crate").unwrap(); match builder .set_crate_type(CrateType::Library) .disable_colors() .build() .unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); println!("{}", output.get_assembly_path().display()); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_build_mixed_crate_bin() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/mixed-crate").unwrap(); match builder .set_crate_type(CrateType::Binary) .disable_colors() .build() .unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); println!("{}", output.get_assembly_path().display()); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_handle_rebuild_without_changes() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = { Builder::new("tests/fixtures/app-crate") .unwrap() .disable_colors() }; builder.build().unwrap(); match builder.build().unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("release")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_write_assembly_in_debug_mode() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder .set_profile(Profile::Debug) .disable_colors() .build() .unwrap() { BuildStatus::Success(output) => { let mut assembly_contents = String::new(); File::open(output.get_assembly_path()) .unwrap() .read_to_string(&mut assembly_contents) .unwrap(); assert!(output .get_assembly_path() .to_string_lossy() .contains("debug")); assert!(assembly_contents.contains(".visible .entry the_kernel(")); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_report_about_build_failure() { cleanup_temp_location(); let _lock = ENV_MUTEX.lock(); let builder = Builder::new("tests/fixtures/faulty-crate") .unwrap() .disable_colors(); let output = builder.build(); let crate_absoulte_path = current_dir() .unwrap() .join("tests") .join("fixtures") .join("faulty-crate"); let lib_path = PathBuf::from("src").join("lib.rs"); let crate_absoulte_path_str = crate_absoulte_path.display().to_string(); match output.unwrap_err().kind() { BuildErrorKind::BuildFailed(diagnostics) => { assert_eq!( diagnostics .into_iter() .filter(|item| !item.contains("Blocking waiting") && !item.contains("Compiling core") && !item.contains("Compiling compiler_builtins") && !item.contains("Finished release [optimized] target(s)")) .collect::<Vec<_>>(), &[ format!( " Compiling faulty-ptx_crate v0.1.0 ({})", crate_absoulte_path_str ), String::from("error[E0425]: cannot find function `external_fn` in this scope"), format!(" --> {}:6:20", lib_path.display()), String::from(" |"), String::from("6 | *y.offset(0) = external_fn(*x.offset(0)) * a;"), String::from(" | ^^^^^^^^^^^ not found in this scope"), String::from(""), String::from("error: aborting due to previous error"), String::from(""), String::from( "For more information about this error, try `rustc --explain E0425`.", ), String::from("error: could not compile `faulty-ptx_crate`."), String::from(""), ] ); } _ => unreachable!("it should fail with proper error"), } } #[test] fn should_provide_crate_source_files() { let _lock = ENV_MUTEX.lock(); let crate_path = { current_dir() .unwrap() .join("tests") .join("fixtures") .join("sample-crate") }; let builder = Builder::new(&crate_path.display().to_string()).unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut sources = output.dependencies().unwrap(); let mut expectations = vec![ crate_path.join("src").join("lib.rs"), crate_path.join("src").join("mod1.rs"), crate_path.join("src").join("mod2.rs"), crate_path.join("Cargo.toml"), crate_path.join("Cargo.lock"), ]; sources.sort(); expectations.sort(); assert_eq!(sources, expectations); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_provide_application_crate_source_files() { let _lock = ENV_MUTEX.lock(); let crate_path = { current_dir() .unwrap() .join("tests") .join("fixtures") .join("app-crate") }; let builder = Builder::new(&crate_path.display().to_string()).unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::Success(output) => { let mut sources = output.dependencies().unwrap(); let mut expectations = vec![ crate_path.join("src").join("main.rs"), crate_path.join("src").join("mod1.rs"), crate_path.join("src").join("mod2.rs"), crate_path.join("Cargo.toml"), crate_path.join("Cargo.lock"), ]; sources.sort(); expectations.sort(); assert_eq!(sources, expectations); } BuildStatus::NotNeeded => unreachable!(), } } #[test] fn should_not_get_built_from_rls() { let _lock = ENV_MUTEX.lock(); env::set_var("CARGO", "some/path/to/rls"); assert_eq!(Builder::is_build_needed(), false); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::NotNeeded => {} BuildStatus::Success(_) => unreachable!(), } env::set_var("CARGO", ""); } #[test] fn should_not_get_built_recursively() { let _lock = ENV_MUTEX.lock(); env::set_var("PTX_CRATE_BUILDING", "1"); assert_eq!(Builder::is_build_needed(), false); let builder = Builder::new("tests/fixtures/sample-crate").unwrap(); match builder.disable_colors().build().unwrap() { BuildStatus::NotNeeded => {} BuildStatus::Success(_) => unreachable!(), } env::set_var("PTX_CRATE_BUILDING", ""); } f
n cleanup_temp_location() { let crate_names = &[ "faulty_ptx_crate", "sample_app_ptx_crate", "sample_ptx_crate", "mixed_crate", ]; for name in crate_names { remove_dir_all(env::temp_dir().join("ptx-builder-0.5").join(name)).unwrap_or_default(); } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn should_provide_output_path() {\n\n let source_crate = Crate::analyse(\"tests/fixtures/sample-crate\").unwrap();\n\n\n\n assert!(source_crate.get_output_path().unwrap().starts_with(\n\n env::temp_dir()\n\n .join(\"ptx-builder-0.5\")\n\n .join(\"sample_ptx_crate\")\n\n ));\n\n}\n", "file_path": "src/source.rs", "rank": 7, "score": 88560.58013425847 }, { "content": "#[test]\n\nfn should_find_crate_names() {\n\n let source = Crate::analyse(\"tests/fixtures/sample-crate\").unwrap();\n\n\n\n assert_eq!(source.get_output_file_prefix(), \"sample_ptx_crate\");\n\n\n\n assert_eq!(\n\n source.get_deps_file_prefix(None).unwrap(),\n\n \"libsample_ptx_crate\"\n\n );\n\n\n\n assert_eq!(\n\n source\n\n .get_deps_file_prefix(Some(CrateType::Library))\n\n .unwrap(),\n\n \"libsample_ptx_crate\"\n\n );\n\n\n\n match source\n\n .get_deps_file_prefix(Some(CrateType::Binary))\n\n .unwrap_err()\n\n .kind()\n\n {\n\n BuildErrorKind::InvalidCrateType(kind) => {\n\n assert_eq!(kind, \"Binary\");\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n}\n\n\n", "file_path": "src/source.rs", "rank": 8, "score": 72813.88397726342 }, { "content": "#[test]\n\nfn should_find_mixed_crate_names() {\n\n let source = Crate::analyse(\"tests/fixtures/mixed-crate\").unwrap();\n\n\n\n assert_eq!(source.get_output_file_prefix(), \"mixed_crate\");\n\n\n\n assert_eq!(\n\n source\n\n .get_deps_file_prefix(Some(CrateType::Binary))\n\n .unwrap(),\n\n \"mixed-crate\"\n\n );\n\n\n\n assert_eq!(\n\n source\n\n .get_deps_file_prefix(Some(CrateType::Library))\n\n .unwrap(),\n\n \"libmixed_crate\"\n\n );\n\n\n\n match source.get_deps_file_prefix(None).unwrap_err().kind() {\n\n BuildErrorKind::MissingCrateType => {}\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n}\n\n\n", "file_path": "src/source.rs", "rank": 10, "score": 69362.58134102446 }, { "content": "#[test]\n\nfn should_find_app_crate_names() {\n\n let source = Crate::analyse(\"tests/fixtures/app-crate\").unwrap();\n\n\n\n assert_eq!(source.get_output_file_prefix(), \"sample_app_ptx_crate\");\n\n\n\n assert_eq!(\n\n source.get_deps_file_prefix(None).unwrap(),\n\n \"sample-app-ptx_crate\"\n\n );\n\n\n\n assert_eq!(\n\n source\n\n .get_deps_file_prefix(Some(CrateType::Binary))\n\n .unwrap(),\n\n \"sample-app-ptx_crate\"\n\n );\n\n\n\n match source\n\n .get_deps_file_prefix(Some(CrateType::Library))\n\n .unwrap_err()\n\n .kind()\n\n {\n\n BuildErrorKind::InvalidCrateType(kind) => {\n\n assert_eq!(kind, \"Library\");\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n}\n\n\n", "file_path": "src/source.rs", "rank": 11, "score": 69362.58134102447 }, { "content": "#[panic_handler]\n\nfn dummy_panic_handler(_info: &::core::panic::PanicInfo) -> ! {\n\n loop {}\n\n}\n", "file_path": "tests/fixtures/mixed-crate/src/main.rs", "rank": 17, "score": 55914.44370040058 }, { "content": "#[test]\n\nfn should_report_in_cargo_style() {\n\n let original_error: Result<()> = Err(Error::from(BuildErrorKind::CommandFailed {\n\n command: String::from(\"some_name\"),\n\n code: 0,\n\n stderr: String::from(\"some\\nmultiline\\noutput\"),\n\n }));\n\n\n\n let chained_error = original_error\n\n .with_context(|_| BuildErrorKind::InternalError(String::from(\"internal error\")));\n\n\n\n let chained_error = chained_error.with_context(|_| {\n\n BuildErrorKind::BuildFailed(vec![\n\n String::from(\"error[E0425]: cannot find function `external_fn` in this scope\"),\n\n String::from(\" --> src/lib.rs:6:20\"),\n\n String::from(\" |\"),\n\n String::from(\"6 | *y.offset(0) = external_fn(*x.offset(0)) * a;\"),\n\n String::from(\" | ^^^^^^^^^^^ not found in this scope\"),\n\n ])\n\n });\n\n\n", "file_path": "tests/reporter.rs", "rank": 18, "score": 49169.153204145055 }, { "content": "#[test]\n\nfn should_check_validity_of_crate_path() {\n\n let result = Crate::analyse(\"tests/builder.rs\");\n\n\n\n match result.unwrap_err().kind() {\n\n BuildErrorKind::InvalidCratePath(path) => {\n\n assert!(path.ends_with(\"tests/builder.rs\"));\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n}\n\n\n", "file_path": "src/source.rs", "rank": 19, "score": 48410.65125632893 }, { "content": "#[test]\n\nfn should_check_existence_of_crate_path() {\n\n let result = Crate::analyse(\"tests/fixtures/non-existing-crate\");\n\n\n\n match result.unwrap_err().kind() {\n\n BuildErrorKind::InvalidCratePath(path) => {\n\n assert!(path.ends_with(\"tests/fixtures/non-existing-crate\"));\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n}\n\n\n", "file_path": "src/source.rs", "rank": 20, "score": 48410.65125632893 }, { "content": "#[cfg(not(target_os = \"cuda\"))]\n\nfn main() {\n\n println!(\"Hello, world!\");\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"ptx-kernel\" fn the_kernel(x: *const f64, y: *mut f64, a: f64) {\n\n *y.offset(0) = *x.offset(0) * a;\n\n}\n\n\n", "file_path": "tests/fixtures/mixed-crate/src/main.rs", "rank": 30, "score": 45000.54626476405 }, { "content": "enum BuildCommand {\n\n Realtime(String),\n\n Cached(String),\n\n}\n\n\n\nimpl std::ops::Deref for BuildCommand {\n\n type Target = str;\n\n\n\n fn deref(&self) -> &str {\n\n match self {\n\n BuildCommand::Realtime(line) => &line,\n\n BuildCommand::Cached(line) => &line,\n\n }\n\n }\n\n}\n", "file_path": "src/builder.rs", "rank": 31, "score": 44377.618714175296 }, { "content": "}\n\n\n\nimpl fmt::Display for BuildErrorKind {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n use BuildErrorKind::*;\n\n\n\n match self {\n\n CommandNotFound { command, hint } => write!(\n\n formatter,\n\n \"Command not found in PATH: '{}'. {}.\",\n\n command.bold(),\n\n hint.underline()\n\n ),\n\n\n\n CommandFailed {\n\n command,\n\n code,\n\n stderr,\n\n } => write!(\n\n formatter,\n", "file_path": "src/error.rs", "rank": 32, "score": 26313.526519946598 }, { "content": " InvalidCratePath(PathBuf),\n\n BuildFailed(Vec<String>),\n\n InvalidCrateType(String),\n\n MissingCrateType,\n\n InternalError(String),\n\n OtherError,\n\n}\n\n\n\nimpl Fail for Error {\n\n fn name(&self) -> Option<&str> {\n\n self.inner.name()\n\n }\n\n\n\n fn cause(&self) -> Option<&dyn Fail> {\n\n self.inner.cause()\n\n }\n\n\n\n fn backtrace(&self) -> Option<&Backtrace> {\n\n self.inner.backtrace()\n\n }\n", "file_path": "src/error.rs", "rank": 33, "score": 26307.95923105645 }, { "content": "use std::fmt;\n\nuse std::path::PathBuf;\n\n\n\nuse colored::*;\n\nuse failure::{Backtrace, Context, Fail};\n\nuse semver::{Version, VersionReq};\n\n\n\n#[macro_export]\n\nmacro_rules! bail {\n\n ($err:expr) => {\n\n return Err($err.into());\n\n };\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n inner: Context<BuildErrorKind>,\n\n}\n", "file_path": "src/error.rs", "rank": 34, "score": 26307.79689522571 }, { "content": "\n\n#[derive(Debug, PartialEq, Fail, Clone)]\n\npub enum BuildErrorKind {\n\n CommandNotFound {\n\n command: String,\n\n hint: String,\n\n },\n\n\n\n CommandFailed {\n\n command: String,\n\n code: i32,\n\n stderr: String,\n\n },\n\n CommandVersionNotFulfilled {\n\n command: String,\n\n current: Version,\n\n required: VersionReq,\n\n hint: String,\n\n },\n\n\n", "file_path": "src/error.rs", "rank": 35, "score": 26306.731593727083 }, { "content": "}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Display::fmt(&self.inner, formatter)\n\n }\n\n}\n\n\n\nimpl Error {\n\n pub fn kind(&self) -> BuildErrorKind {\n\n self.inner.get_context().clone()\n\n }\n\n}\n\n\n\nimpl From<BuildErrorKind> for Error {\n\n fn from(kind: BuildErrorKind) -> Error {\n\n Error {\n\n inner: Context::new(kind),\n\n }\n\n }\n", "file_path": "src/error.rs", "rank": 36, "score": 26306.427462725784 }, { "content": "}\n\n\n\nimpl From<Context<BuildErrorKind>> for Error {\n\n fn from(inner: Context<BuildErrorKind>) -> Error {\n\n Error { inner }\n\n }\n\n}\n\n\n\nimpl From<Context<String>> for Error {\n\n fn from(inner: Context<String>) -> Error {\n\n Error {\n\n inner: inner.map(BuildErrorKind::InternalError),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> From<Context<&'a str>> for Error {\n\n fn from(inner: Context<&'a str>) -> Error {\n\n Self::from(inner.map(String::from))\n\n }\n", "file_path": "src/error.rs", "rank": 37, "score": 26304.804142766832 }, { "content": " InvalidCratePath(path) => write!(\n\n formatter,\n\n \"{}: {}\",\n\n \"Invalid device crate path\".bold(),\n\n path.display()\n\n ),\n\n\n\n BuildFailed(lines) => write!(\n\n formatter,\n\n \"{}\\n{}\",\n\n \"Unable to build a PTX crate!\".bold(),\n\n lines.join(\"\\n\")\n\n ),\n\n\n\n InvalidCrateType(crate_type) => write!(\n\n formatter,\n\n \"{}: the crate cannot be build as '{}'\",\n\n \"Impossible CrateType\".bold(),\n\n crate_type\n\n ),\n", "file_path": "src/error.rs", "rank": 38, "score": 26303.152713916355 }, { "content": " \"Command failed: '{}' with code '{}' and output:\\n{}\",\n\n command.bold(),\n\n code,\n\n stderr.trim(),\n\n ),\n\n\n\n CommandVersionNotFulfilled {\n\n command,\n\n current,\n\n required,\n\n hint,\n\n } => write!(\n\n formatter,\n\n \"Command version is not fulfilled: '{}' is currently '{}' but '{}' is required. {}.\",\n\n command.bold(),\n\n current.to_string().underline(),\n\n required.to_string().underline(),\n\n hint.underline(),\n\n ),\n\n\n", "file_path": "src/error.rs", "rank": 39, "score": 26302.587281694432 }, { "content": "\n\n MissingCrateType => write!(\n\n formatter,\n\n \"{}: it's mandatory for mixed-type crates\",\n\n \"Missing CrateType\".bold()\n\n ),\n\n\n\n InternalError(message) => write!(formatter, \"{}: {}\", \"Internal error\".bold(), message),\n\n OtherError => write!(formatter, \"Other error\"),\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 40, "score": 26301.86199167721 }, { "content": "#[derive(Hash, Clone, Debug)]\n\n/// Information about CUDA crate.\n\npub struct Crate {\n\n name: String,\n\n path: PathBuf,\n\n output_file_prefix: String,\n\n deps_file_prefix: FilePrefix,\n\n}\n\n\n\nimpl Crate {\n\n /// Try to locate a crate at the `path` and collect needed information.\n\n pub fn analyse<P: AsRef<Path>>(path: P) -> Result<Self> {\n\n let path = {\n\n env::current_dir()\n\n .context(BuildErrorKind::OtherError)?\n\n .join(&path)\n\n };\n\n\n\n match fs::metadata(path.join(\"Cargo.toml\")) {\n\n Ok(metadata) => {\n", "file_path": "src/source.rs", "rank": 41, "score": 25935.414253016577 }, { "content": " }\n\n\n\n /// Returns temporary crate build location.\n\n pub fn get_output_path(&self) -> Result<PathBuf> {\n\n let mut path = env::temp_dir().join(\"ptx-builder-0.5\");\n\n\n\n path.push(&self.output_file_prefix);\n\n path.push(format!(\"{:x}\", self.get_hash()));\n\n\n\n fs::create_dir_all(&path).context(BuildErrorKind::OtherError)?;\n\n Ok(path)\n\n }\n\n\n\n fn get_hash(&self) -> u64 {\n\n let mut hasher = DefaultHasher::new();\n\n self.hash(&mut hasher);\n\n\n\n hasher.finish()\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/source.rs", "rank": 42, "score": 25934.944437351496 }, { "content": "\n\n toml::from_str(&contents).context(BuildErrorKind::OtherError)?\n\n };\n\n\n\n let cargo_toml_name = match cargo_toml[\"package\"][\"name\"].as_str() {\n\n Some(name) => name,\n\n None => {\n\n bail!(BuildErrorKind::InternalError(String::from(\n\n \"Cannot get crate name\"\n\n )));\n\n }\n\n };\n\n\n\n let is_library = path.join(\"src\").join(\"lib.rs\").exists();\n\n let is_binary = path.join(\"src\").join(\"main.rs\").exists();\n\n\n\n let output_file_prefix = cargo_toml_name.replace(\"-\", \"_\");\n\n\n\n let deps_file_prefix = match (is_binary, is_library) {\n\n (false, true) => FilePrefix::Library(format!(\"lib{}\", output_file_prefix)),\n", "file_path": "src/source.rs", "rank": 43, "score": 25932.694017309455 }, { "content": " if metadata.is_dir() {\n\n bail!(BuildErrorKind::InvalidCratePath(path.clone()));\n\n }\n\n }\n\n\n\n Err(_) => {\n\n bail!(BuildErrorKind::InvalidCratePath(path.clone()));\n\n }\n\n }\n\n\n\n let cargo_toml: toml::Value = {\n\n let mut reader = BufReader::new(\n\n fs::File::open(path.join(\"Cargo.toml\")).context(BuildErrorKind::OtherError)?,\n\n );\n\n\n\n let mut contents = String::new();\n\n\n\n reader\n\n .read_to_string(&mut contents)\n\n .context(BuildErrorKind::OtherError)?;\n", "file_path": "src/source.rs", "rank": 44, "score": 25931.2474625911 }, { "content": " (true, false) => FilePrefix::Binary(cargo_toml_name.to_string()),\n\n\n\n (true, true) => FilePrefix::Mixed {\n\n lib: format!(\"lib{}\", output_file_prefix),\n\n bin: cargo_toml_name.to_string(),\n\n },\n\n\n\n (false, false) => {\n\n bail!(BuildErrorKind::InternalError(\n\n \"Unable to find neither `lib.rs` nor `main.rs`\".into()\n\n ));\n\n }\n\n };\n\n\n\n Ok(Crate {\n\n name: cargo_toml_name.to_string(),\n\n path,\n\n output_file_prefix,\n\n deps_file_prefix,\n\n })\n", "file_path": "src/source.rs", "rank": 45, "score": 25930.250202319934 }, { "content": " }\n\n\n\n /// Returns PTX assmbly filename prefix.\n\n pub fn get_output_file_prefix(&self) -> &str {\n\n &self.output_file_prefix\n\n }\n\n\n\n /// Returns deps file filename prefix.\n\n pub fn get_deps_file_prefix(&self, crate_type: Option<CrateType>) -> Result<String> {\n\n match (&self.deps_file_prefix, crate_type) {\n\n (FilePrefix::Library(prefix), Some(CrateType::Library)) => Ok(prefix.clone()),\n\n (FilePrefix::Library(prefix), None) => Ok(prefix.clone()),\n\n\n\n (FilePrefix::Binary(prefix), Some(CrateType::Binary)) => Ok(prefix.clone()),\n\n (FilePrefix::Binary(prefix), None) => Ok(prefix.clone()),\n\n\n\n (FilePrefix::Mixed { bin, .. }, Some(CrateType::Binary)) => Ok(bin.clone()),\n\n (FilePrefix::Mixed { lib, .. }, Some(CrateType::Library)) => Ok(lib.clone()),\n\n (FilePrefix::Mixed { .. }, None) => {\n\n bail!(BuildErrorKind::MissingCrateType);\n", "file_path": "src/source.rs", "rank": 46, "score": 25929.27601555404 }, { "content": "use std::collections::hash_map::DefaultHasher;\n\nuse std::env;\n\nuse std::fs;\n\nuse std::hash::{Hash, Hasher};\n\nuse std::io::{BufReader, Read};\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse failure::ResultExt;\n\nuse toml;\n\n\n\nuse crate::builder::CrateType;\n\nuse crate::error::*;\n\n\n\n#[derive(Hash, Clone, Debug)]\n\npub enum FilePrefix {\n\n Library(String),\n\n Binary(String),\n\n Mixed { lib: String, bin: String },\n\n}\n\n\n", "file_path": "src/source.rs", "rank": 47, "score": 25929.22062855298 }, { "content": " }\n\n\n\n (FilePrefix::Library(_), Some(CrateType::Binary)) => {\n\n bail!(BuildErrorKind::InvalidCrateType(\"Binary\".into()));\n\n }\n\n\n\n (FilePrefix::Binary(_), Some(CrateType::Library)) => {\n\n bail!(BuildErrorKind::InvalidCrateType(\"Library\".into()));\n\n }\n\n }\n\n }\n\n\n\n /// Returns crate name.\n\n pub fn get_name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n /// Returns crate root path.\n\n pub fn get_path(&self) -> &Path {\n\n &self.path.as_path()\n", "file_path": "src/source.rs", "rank": 48, "score": 25928.140533255082 }, { "content": "\n\n fn get_required_version(&self) -> Option<VersionReq> {\n\n None\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_not_provide_output() {\n\n let output = ExecutableRunner::new(NonExistingCommand).run();\n\n\n\n match output.unwrap_err().kind() {\n\n BuildErrorKind::CommandNotFound { command, hint } => {\n\n assert_eq!(command, String::from(\"almost-unique-name\"));\n\n assert_eq!(hint, String::from(\"Some useful hint\"));\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n }\n\n}\n", "file_path": "tests/executable.rs", "rank": 49, "score": 24117.12901960239 }, { "content": " Some(VersionReq::parse(\"> 100.0.0\").unwrap())\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_not_provide_output() {\n\n let output = ExecutableRunner::new(UnrealisticCommand).run();\n\n\n\n match output.unwrap_err().kind() {\n\n BuildErrorKind::CommandVersionNotFulfilled {\n\n command,\n\n required,\n\n hint,\n\n ..\n\n } => {\n\n assert_eq!(command, String::from(\"cargo\"));\n\n assert_eq!(required, VersionReq::parse(\"> 100.0.0\").unwrap());\n\n assert_eq!(hint, String::from(\"Some useful hint about version\"));\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n\n }\n\n}\n", "file_path": "tests/executable.rs", "rank": 50, "score": 24115.924589418675 }, { "content": " fn should_check_exit_code() {\n\n let output = ExecutableRunner::new(Cargo)\n\n .with_args(&[\"rustc\", \"-q\", \"--unknown-flag\"])\n\n .with_cwd(\"tests/fixtures/sample-crate\")\n\n .run();\n\n\n\n match output.unwrap_err().kind() {\n\n BuildErrorKind::CommandFailed {\n\n command,\n\n code,\n\n stderr,\n\n } => {\n\n assert_eq!(command, String::from(\"cargo\"));\n\n assert_eq!(code, 1);\n\n\n\n assert!(stderr.contains(\"argument '--unknown-flag'\"));\n\n }\n\n\n\n _ => unreachable!(\"it should fail with proper error\"),\n\n }\n", "file_path": "tests/executable.rs", "rank": 51, "score": 24115.454333902333 }, { "content": " let mut reporter = ErrorLogPrinter::print(chained_error.unwrap_err().into());\n\n\n\n assert_eq!(\n\n reporter.disable_colors().to_string(),\n\n \"[PTX] Unable to build a PTX crate!\n\n[PTX] error[E0425]: cannot find function `external_fn` in this scope\n\n[PTX] --> src/lib.rs:6:20\n\n[PTX] |\n\n[PTX] 6 | *y.offset(0) = external_fn(*x.offset(0)) * a;\n\n[PTX] | ^^^^^^^^^^^ not found in this scope\n\n[PTX]\n\n[PTX] caused by:\n\n[PTX] Internal error: internal error\n\n[PTX]\n\n[PTX] caused by:\n\n[PTX] Command failed: 'some_name' with code '0' and output:\n\n[PTX] some\n\n[PTX] multiline\n\n[PTX] output\"\n\n );\n\n}\n", "file_path": "tests/reporter.rs", "rank": 52, "score": 24114.933778385304 }, { "content": "use semver::VersionReq;\n\n\n\nuse ptx_builder::error::*;\n\nuse ptx_builder::executable::{Cargo, Executable, ExecutableRunner};\n\n\n\nmod cargo {\n\n use super::*;\n\n\n\n #[test]\n\n fn should_provide_output() {\n\n let output = ExecutableRunner::new(Cargo)\n\n .with_args(&[\"rustc\", \"-q\", \"--\", \"--print\", \"crate-name\"])\n\n .with_cwd(\"tests/fixtures/sample-crate\")\n\n .run();\n\n\n\n assert_eq!(output.is_ok(), true);\n\n assert_eq!(output.unwrap().stdout, String::from(\"sample_ptx_crate\\n\"));\n\n }\n\n\n\n #[test]\n", "file_path": "tests/executable.rs", "rank": 53, "score": 24112.648849797533 }, { "content": "use failure::ResultExt;\n\n\n\nuse ptx_builder::error::*;\n\nuse ptx_builder::reporter::ErrorLogPrinter;\n\n\n\n#[test]\n", "file_path": "tests/reporter.rs", "rank": 54, "score": 24109.205523789708 }, { "content": " }\n\n}\n\n\n\nmod non_existing_command {\n\n use super::*;\n\n\n\n struct NonExistingCommand;\n\n\n\n impl Executable for NonExistingCommand {\n\n fn get_name(&self) -> String {\n\n String::from(\"almost-unique-name\")\n\n }\n\n\n\n fn get_verification_hint(&self) -> String {\n\n String::from(\"Some useful hint\")\n\n }\n\n\n\n fn get_version_hint(&self) -> String {\n\n String::from(\"Some useful hint about version\")\n\n }\n", "file_path": "tests/executable.rs", "rank": 55, "score": 24100.13874847598 }, { "content": "\n\nmod unrealistic_version_requirement {\n\n use super::*;\n\n\n\n struct UnrealisticCommand;\n\n\n\n impl Executable for UnrealisticCommand {\n\n fn get_name(&self) -> String {\n\n String::from(\"cargo\")\n\n }\n\n\n\n fn get_verification_hint(&self) -> String {\n\n String::from(\"Some useful hint\")\n\n }\n\n\n\n fn get_version_hint(&self) -> String {\n\n String::from(\"Some useful hint about version\")\n\n }\n\n\n\n fn get_required_version(&self) -> Option<VersionReq> {\n", "file_path": "tests/executable.rs", "rank": 56, "score": 24100.098748317363 }, { "content": " /// Can also be the same crate, for single-source mode:\n\n /// ``` no_run\n\n /// use ptx_builder::prelude::*;\n\n /// # use ptx_builder::error::Result;\n\n ///\n\n /// # fn main() -> Result<()> {\n\n /// match Builder::new(\".\")?.build()? {\n\n /// BuildStatus::Success(output) => {\n\n /// // do something with the output...\n\n /// }\n\n ///\n\n /// BuildStatus::NotNeeded => {\n\n /// // ...\n\n /// }\n\n /// }\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn new<P: AsRef<Path>>(path: P) -> Result<Self> {\n\n Ok(Builder {\n", "file_path": "src/builder.rs", "rank": 57, "score": 23879.016575465135 }, { "content": " /// Can be used from `build.rs` script to provide Rust with the path\n\n /// via environment variable:\n\n /// ```no_run\n\n /// use ptx_builder::prelude::*;\n\n /// # use ptx_builder::error::Result;\n\n ///\n\n /// # fn main() -> Result<()> {\n\n /// if let BuildStatus::Success(output) = Builder::new(\".\")?.build()? {\n\n /// println!(\n\n /// \"cargo:rustc-env=KERNEL_PTX_PATH={}\",\n\n /// output.get_assembly_path().display()\n\n /// );\n\n /// }\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_assembly_path(&self) -> PathBuf {\n\n self.output_path\n\n .join(TARGET_NAME)\n\n .join(self.builder.profile.to_string())\n", "file_path": "src/builder.rs", "rank": 58, "score": 23877.95108251058 }, { "content": " .output_path\n\n .join(TARGET_NAME)\n\n .join(self.builder.profile.to_string())\n\n .join(format!(\n\n \"{}.d\",\n\n self.builder\n\n .source_crate\n\n .get_deps_file_prefix(self.builder.crate_type)?\n\n ));\n\n\n\n let mut crate_deps_reader =\n\n BufReader::new(File::open(crate_deps_path).context(BuildErrorKind::OtherError)?);\n\n\n\n let mut crate_deps_contents = String::new();\n\n\n\n crate_deps_reader\n\n .read_to_string(&mut crate_deps_contents)\n\n .context(BuildErrorKind::OtherError)?;\n\n\n\n Ok(crate_deps_contents)\n", "file_path": "src/builder.rs", "rank": 59, "score": 23877.72697316266 }, { "content": " .join(\"deps\")\n\n .join(format!(\n\n \"{}{}.ptx\",\n\n self.builder.source_crate.get_output_file_prefix(),\n\n self.file_suffix,\n\n ))\n\n }\n\n\n\n /// Returns a list of crate dependencies.\n\n ///\n\n /// # Usage\n\n /// Can be used from `build.rs` script to notify Cargo the dependencies,\n\n /// so it can automatically rebuild on changes:\n\n /// ```no_run\n\n /// use ptx_builder::prelude::*;\n\n /// # use ptx_builder::error::Result;\n\n ///\n\n /// # fn main() -> Result<()> {\n\n /// if let BuildStatus::Success(output) = Builder::new(\".\")?.build()? {\n\n /// for path in output.dependencies()? {\n", "file_path": "src/builder.rs", "rank": 60, "score": 23876.87069673542 }, { "content": " .with_env(\"CARGO_TARGET_DIR\", output_path.clone());\n\n\n\n let cargo_output = cargo.run().map_err(|error| match error.kind() {\n\n BuildErrorKind::CommandFailed { stderr, .. } => {\n\n let lines = stderr\n\n .trim_matches('\\n')\n\n .split('\\n')\n\n .filter(Self::output_is_not_verbose)\n\n .map(String::from)\n\n .collect();\n\n\n\n Error::from(BuildErrorKind::BuildFailed(lines))\n\n }\n\n\n\n _ => error,\n\n })?;\n\n\n\n Ok(BuildStatus::Success(\n\n self.prepare_output(output_path, &cargo_output.stderr)?,\n\n ))\n", "file_path": "src/builder.rs", "rank": 61, "score": 23876.65206056859 }, { "content": "use std::env;\n\nuse std::fmt;\n\nuse std::fs::{read_to_string, write, File};\n\nuse std::io::{BufReader, Read};\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse failure::ResultExt;\n\nuse lazy_static::*;\n\nuse regex::Regex;\n\n\n\nuse crate::error::*;\n\nuse crate::executable::{Cargo, ExecutableRunner, Linker};\n\nuse crate::source::Crate;\n\n\n\nconst LAST_BUILD_CMD: &str = \".last-build-command\";\n\nconst TARGET_NAME: &str = \"nvptx64-nvidia-cuda\";\n\n\n\n/// Core of the crate - PTX assembly build controller.\n\n#[derive(Debug)]\n\npub struct Builder {\n", "file_path": "src/builder.rs", "rank": 62, "score": 23876.463543719627 }, { "content": " }\n\n\n\n fn prepare_output(&self, output_path: PathBuf, cargo_stderr: &str) -> Result<BuildOutput> {\n\n lazy_static! {\n\n static ref SUFFIX_REGEX: Regex =\n\n Regex::new(r\"-C extra-filename=([\\S]+)\").expect(\"Unable to parse regex...\");\n\n }\n\n\n\n let crate_name = self.source_crate.get_output_file_prefix();\n\n\n\n // We need the build command to get real output filename.\n\n let build_command = {\n\n cargo_stderr\n\n .trim_matches('\\n')\n\n .split('\\n')\n\n .find(|line| {\n\n line.contains(&format!(\"--crate-name {}\", crate_name))\n\n && line.contains(\"--crate-type cdylib\")\n\n })\n\n .map(|line| BuildCommand::Realtime(line.to_string()))\n", "file_path": "src/builder.rs", "rank": 63, "score": 23875.725941976343 }, { "content": " source_crate: Crate,\n\n\n\n profile: Profile,\n\n colors: bool,\n\n crate_type: Option<CrateType>,\n\n}\n\n\n\n/// Successful build output.\n\n#[derive(Debug)]\n\npub struct BuildOutput<'a> {\n\n builder: &'a Builder,\n\n output_path: PathBuf,\n\n file_suffix: String,\n\n}\n\n\n\n/// Non-failed build status.\n\n#[derive(Debug)]\n\npub enum BuildStatus<'a> {\n\n /// The CUDA crate building was performed without errors.\n\n Success(BuildOutput<'a>),\n", "file_path": "src/builder.rs", "rank": 64, "score": 23874.80898083371 }, { "content": " .or_else(|| Self::load_cached_build_command(&output_path))\n\n .ok_or_else(|| {\n\n Error::from(BuildErrorKind::InternalError(String::from(\n\n \"Unable to find build command of the device crate\",\n\n )))\n\n })?\n\n };\n\n\n\n if let BuildCommand::Realtime(ref command) = build_command {\n\n Self::store_cached_build_command(&output_path, &command)?;\n\n }\n\n\n\n let file_suffix = match SUFFIX_REGEX.captures(&build_command) {\n\n Some(caps) => caps[1].to_string(),\n\n\n\n None => {\n\n bail!(BuildErrorKind::InternalError(String::from(\n\n \"Unable to find `extra-filename` rustc flag\",\n\n )));\n\n }\n", "file_path": "src/builder.rs", "rank": 65, "score": 23873.764737470694 }, { "content": " /// the package by passing e.g. `--lib` or `--bin NAME` to specify a single target\n\n /// ```\n\n pub fn set_crate_type(mut self, crate_type: CrateType) -> Self {\n\n self.crate_type = Some(crate_type);\n\n self\n\n }\n\n\n\n /// Performs an actual build: runs `cargo` with proper flags and environment.\n\n pub fn build(&self) -> Result<BuildStatus> {\n\n if !Self::is_build_needed() {\n\n return Ok(BuildStatus::NotNeeded);\n\n }\n\n\n\n // Verify `ptx-linker` version.\n\n ExecutableRunner::new(Linker).with_args(vec![\"-V\"]).run()?;\n\n\n\n let mut cargo = ExecutableRunner::new(Cargo);\n\n let mut args = Vec::new();\n\n\n\n args.push(\"rustc\");\n", "file_path": "src/builder.rs", "rank": 66, "score": 23873.198823931703 }, { "content": "\n\n /// The CUDA crate building is not needed. Can happend in several cases:\n\n /// - `build.rs` script was called by **RLS**,\n\n /// - `build.rs` was called **recursively** (e.g. `build.rs` call for device crate in single-source setup)\n\n NotNeeded,\n\n}\n\n\n\n/// Debug / Release profile.\n\n///\n\n/// # Usage\n\n/// ``` no_run\n\n/// use ptx_builder::prelude::*;\n\n/// # use ptx_builder::error::Result;\n\n///\n\n/// # fn main() -> Result<()> {\n\n/// Builder::new(\".\")?\n\n/// .set_profile(Profile::Debug)\n\n/// .build()?;\n\n/// # Ok(())\n\n/// # }\n", "file_path": "src/builder.rs", "rank": 67, "score": 23872.54856932976 }, { "content": " fn store_cached_build_command(output_path: &Path, command: &str) -> Result<()> {\n\n write(output_path.join(LAST_BUILD_CMD), command.as_bytes())\n\n .context(BuildErrorKind::OtherError)?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a> BuildOutput<'a> {\n\n fn new(builder: &'a Builder, output_path: PathBuf, file_suffix: String) -> Self {\n\n BuildOutput {\n\n builder,\n\n output_path,\n\n file_suffix,\n\n }\n\n }\n\n\n\n /// Returns path to PTX assembly file.\n\n ///\n\n /// # Usage\n", "file_path": "src/builder.rs", "rank": 68, "score": 23872.435856842774 }, { "content": " /// println!(\"cargo:rerun-if-changed={}\", path.display());\n\n /// }\n\n /// }\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn dependencies(&self) -> Result<Vec<PathBuf>> {\n\n let mut deps_contents = {\n\n self.get_deps_file_contents()\n\n .context(\"Unable to get crate deps\")?\n\n };\n\n\n\n if deps_contents.is_empty() {\n\n bail!(BuildErrorKind::InternalError(String::from(\n\n \"Empty deps file\",\n\n )));\n\n }\n\n\n\n deps_contents = deps_contents\n\n .chars()\n", "file_path": "src/builder.rs", "rank": 69, "score": 23872.387192882838 }, { "content": "/// ``` no_run\n\n/// use ptx_builder::prelude::*;\n\n/// # use ptx_builder::error::Result;\n\n///\n\n/// # fn main() -> Result<()> {\n\n/// Builder::new(\".\")?\n\n/// .set_crate_type(CrateType::Library)\n\n/// .build()?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n#[derive(Clone, Copy, Debug)]\n\npub enum CrateType {\n\n Library,\n\n Binary,\n\n}\n\n\n\nimpl Builder {\n\n /// Construct a builder for device crate at `path`.\n\n ///\n", "file_path": "src/builder.rs", "rank": 70, "score": 23870.74640353425 }, { "content": " }\n\n\n\n /// Disable colors for internal calls to `cargo`.\n\n pub fn disable_colors(mut self) -> Self {\n\n self.colors = false;\n\n self\n\n }\n\n\n\n /// Set build profile.\n\n pub fn set_profile(mut self, profile: Profile) -> Self {\n\n self.profile = profile;\n\n self\n\n }\n\n\n\n /// Set crate type that needs to be built.\n\n ///\n\n /// Mandatory for mixed crates - that have both `lib.rs` and `main.rs`,\n\n /// otherwise Cargo won't know which to build:\n\n /// ```text\n\n /// error: extra arguments to `rustc` can only be passed to one target, consider filtering\n", "file_path": "src/builder.rs", "rank": 71, "score": 23870.58985591522 }, { "content": "/// ```\n\n#[derive(PartialEq, Clone, Debug)]\n\npub enum Profile {\n\n /// Equivalent for `cargo-build` **without** `--release` flag.\n\n Debug,\n\n\n\n /// Equivalent for `cargo-build` **with** `--release` flag.\n\n Release,\n\n}\n\n\n\n/// Build specified crate type.\n\n///\n\n/// Mandatory for mixed crates - that have both `lib.rs` and `main.rs`,\n\n/// otherwise Cargo won't know which to build:\n\n/// ```text\n\n/// error: extra arguments to `rustc` can only be passed to one target, consider filtering\n\n/// the package by passing e.g. `--lib` or `--bin NAME` to specify a single target\n\n/// ```\n\n///\n\n/// # Usage\n", "file_path": "src/builder.rs", "rank": 72, "score": 23869.43180593219 }, { "content": " .skip(3) // workaround for Windows paths starts wuth \"[A-Z]:\\\"\n\n .skip_while(|c| *c != ':')\n\n .skip(1)\n\n .collect::<String>();\n\n\n\n let cargo_deps = vec![\n\n self.builder.source_crate.get_path().join(\"Cargo.toml\"),\n\n self.builder.source_crate.get_path().join(\"Cargo.lock\"),\n\n ];\n\n\n\n Ok(deps_contents\n\n .trim()\n\n .split(' ')\n\n .map(|item| PathBuf::from(item.trim()))\n\n .chain(cargo_deps.into_iter())\n\n .collect())\n\n }\n\n\n\n fn get_deps_file_contents(&self) -> Result<String> {\n\n let crate_deps_path = self\n", "file_path": "src/builder.rs", "rank": 73, "score": 23869.164033877194 }, { "content": " source_crate: Crate::analyse(path).context(\"Unable to analyse source crate\")?,\n\n\n\n profile: Profile::Release, // TODO: choose automatically, e.g.: `env::var(\"PROFILE\").unwrap_or(\"release\".to_string())`\n\n colors: true,\n\n crate_type: None,\n\n })\n\n }\n\n\n\n /// Returns bool indicating whether the actual build is needed.\n\n ///\n\n /// Behavior is consistent with\n\n /// [`BuildStatus::NotNeeded`](enum.BuildStatus.html#variant.NotNeeded).\n\n pub fn is_build_needed() -> bool {\n\n let cargo_env = env::var(\"CARGO\");\n\n let recursive_env = env::var(\"PTX_CRATE_BUILDING\");\n\n\n\n let is_rls_build = cargo_env.is_ok() && cargo_env.unwrap().ends_with(\"rls\");\n\n let is_recursive_build = recursive_env.is_ok() && recursive_env.unwrap() == \"1\";\n\n\n\n !is_rls_build && !is_recursive_build\n", "file_path": "src/builder.rs", "rank": 74, "score": 23868.88877452528 }, { "content": " };\n\n\n\n Ok(BuildOutput::new(self, output_path, file_suffix))\n\n }\n\n\n\n fn output_is_not_verbose(line: &&str) -> bool {\n\n !line.starts_with(\"+ \")\n\n && !line.contains(\"Running\")\n\n && !line.contains(\"Fresh\")\n\n && !line.starts_with(\"Caused by:\")\n\n && !line.starts_with(\" process didn\\'t exit successfully: \")\n\n }\n\n\n\n fn load_cached_build_command(output_path: &Path) -> Option<BuildCommand> {\n\n match read_to_string(output_path.join(LAST_BUILD_CMD)) {\n\n Ok(contents) => Some(BuildCommand::Cached(contents)),\n\n Err(_) => None,\n\n }\n\n }\n\n\n", "file_path": "src/builder.rs", "rank": 75, "score": 23868.828511290583 }, { "content": "\n\n _ => {}\n\n }\n\n\n\n args.push(\"-v\");\n\n args.push(\"--\");\n\n args.push(\"--crate-type\");\n\n args.push(\"cdylib\");\n\n args.push(\"-Zcrate-attr=no_main\");\n\n\n\n let output_path = {\n\n self.source_crate\n\n .get_output_path()\n\n .context(\"Unable to create output path\")?\n\n };\n\n\n\n cargo\n\n .with_args(&args)\n\n .with_cwd(self.source_crate.get_path())\n\n .with_env(\"PTX_CRATE_BUILDING\", \"1\")\n", "file_path": "src/builder.rs", "rank": 76, "score": 23868.546485379804 }, { "content": "\n\n if self.profile == Profile::Release {\n\n args.push(\"--release\");\n\n }\n\n\n\n args.push(\"--color\");\n\n args.push(if self.colors { \"always\" } else { \"never\" });\n\n\n\n args.push(\"--target\");\n\n args.push(TARGET_NAME);\n\n\n\n match self.crate_type {\n\n Some(CrateType::Binary) => {\n\n args.push(\"--bin\");\n\n args.push(self.source_crate.get_name());\n\n }\n\n\n\n Some(CrateType::Library) => {\n\n args.push(\"--lib\");\n\n }\n", "file_path": "src/builder.rs", "rank": 77, "score": 23867.582139460264 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for Profile {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Profile::Debug => write!(f, \"debug\"),\n\n Profile::Release => write!(f, \"release\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/builder.rs", "rank": 78, "score": 23866.26039221468 }, { "content": "#![feature(abi_ptx, core_intrinsics)]\n\n#![no_std]\n\n\n\n#[no_mangle]\n\npub unsafe extern \"ptx-kernel\" fn the_kernel(x: *const f64, y: *mut f64, a: f64) {\n\n *y.offset(0) = external_fn(*x.offset(0)) * a;\n\n}\n\n\n\n#[panic_handler]\n\nunsafe fn breakpoint_panic_handler(_: &::core::panic::PanicInfo) -> ! {\n\n core::intrinsics::breakpoint();\n\n core::hint::unreachable_unchecked();\n\n}\n", "file_path": "tests/fixtures/faulty-crate/src/lib.rs", "rank": 79, "score": 18984.85911369927 }, { "content": "#![feature(abi_ptx, core_intrinsics)]\n\n#![no_std]\n\n\n\nmod mod1;\n\nmod mod2;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"ptx-kernel\" fn the_kernel(x: *const f64, y: *mut f64, a: f64) {\n\n *y.offset(0) = *x.offset(0) * a;\n\n}\n\n\n\n#[panic_handler]\n\nunsafe fn breakpoint_panic_handler(_: &::core::panic::PanicInfo) -> ! {\n\n core::intrinsics::breakpoint();\n\n core::hint::unreachable_unchecked();\n\n}\n", "file_path": "tests/fixtures/app-crate/src/main.rs", "rank": 80, "score": 18982.800152162403 }, { "content": "#![feature(abi_ptx, core_intrinsics)]\n\n#![no_std]\n\n\n\nmod mod1;\n\nmod mod2;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"ptx-kernel\" fn the_kernel(x: *const f64, y: *mut f64, a: f64) {\n\n *y.offset(0) = *x.offset(0) * a;\n\n}\n\n\n\n#[panic_handler]\n\nunsafe fn breakpoint_panic_handler(_: &::core::panic::PanicInfo) -> ! {\n\n core::intrinsics::breakpoint();\n\n core::hint::unreachable_unchecked();\n\n}\n", "file_path": "tests/fixtures/mixed-crate/src/lib.rs", "rank": 81, "score": 18982.800152162403 }, { "content": "#![feature(abi_ptx, core_intrinsics)]\n\n#![no_std]\n\n\n\nmod mod1;\n\nmod mod2;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"ptx-kernel\" fn the_kernel(x: *const f64, y: *mut f64, a: f64) {\n\n *y.offset(0) = *x.offset(0) * a;\n\n}\n\n\n\n#[panic_handler]\n\nunsafe fn breakpoint_panic_handler(_: &::core::panic::PanicInfo) -> ! {\n\n core::intrinsics::breakpoint();\n\n core::hint::unreachable_unchecked();\n\n}\n", "file_path": "tests/fixtures/sample-crate/src/lib.rs", "rank": 82, "score": 18982.800152162403 }, { "content": "#![feature(abi_ptx)]\n\n#![no_std]\n\n\n\nmod mod1;\n\nmod mod2;\n\n\n\n#[cfg(not(target_os = \"cuda\"))]\n", "file_path": "tests/fixtures/mixed-crate/src/main.rs", "rank": 83, "score": 18978.511957061834 }, { "content": "", "file_path": "tests/fixtures/mixed-crate/src/mod1.rs", "rank": 84, "score": 18975.739213730016 }, { "content": "", "file_path": "tests/fixtures/app-crate/src/mod1.rs", "rank": 85, "score": 18975.739213730016 }, { "content": "", "file_path": "tests/fixtures/sample-crate/src/mod2.rs", "rank": 86, "score": 18975.739213730016 }, { "content": "", "file_path": "tests/fixtures/mixed-crate/src/mod2.rs", "rank": 87, "score": 18975.739213730016 }, { "content": "", "file_path": "tests/fixtures/app-crate/src/mod2.rs", "rank": 88, "score": 18975.739213730016 }, { "content": "", "file_path": "tests/fixtures/sample-crate/src/mod1.rs", "rank": 89, "score": 18975.739213730016 }, { "content": "## Features\n\n1. Obviously, device crates building.\n\n2. Announcing device crates sources to cargo, so it can automatically rebuild after changes.\n\n3. Reporting about missing tools, for example:\n\n```\n\n[PTX] Unable to get target details\n\n[PTX]\n\n[PTX] caused by:\n\n[PTX] Command not found in PATH: 'rust-ptx-linker'. You can install it with: 'cargo install ptx-linker'.\n\n```\n\n\n\n## Prerequirements\n\nThe library depends on a fresh Nightly and [ptx-linker](https://crates.io/crates/ptx-linker).\n\nThe latter can be installed from crates.io:\n\n```\n\ncargo install ptx-linker\n\n```\n\n\n\n## Usage\n\nFirst, you need to specify a build script in host crate's `Cargo.toml` and declare the library as a *build-dependency*:\n\n``` toml\n\n[build-dependencies]\n\nptx-builder = \"0.5\"\n\n```\n\n\n\nThen, typical `build.rs` might look like:\n\n``` rust\n\nuse ptx_builder::error::Result;\n\nuse ptx_builder::prelude::*;\n\n\n\nfn main() -> Result<()> {\n\n let builder = Builder::new(\".\")?;\n\n CargoAdapter::with_env_var(\"KERNEL_PTX_PATH\").build(builder);\n\n}\n\n```\n", "file_path": "README.md", "rank": 90, "score": 13198.30254672747 }, { "content": "# Rust PTX Builder\n\n[![Build Status](https://travis-ci.org/denzp/rust-ptx-builder.svg?branch=master)](https://travis-ci.org/denzp/rust-ptx-builder)\n\n[![Current Version](https://img.shields.io/crates/v/ptx-builder.svg)](https://crates.io/crates/ptx-builder)\n\n[![Docs](https://docs.rs/ptx-builder/badge.svg)](https://docs.rs/ptx-builder)\n\n\n\n## New Release: 0.5 🎉\n\n### Say goodbye to proxy crate approach\n\nThis allows us to use single-source CUDA in **binary**-only crates (ones without `lib.rs`).\n\n\n\n### Development breaking changes\n\nThe crate does not provide a default `panic_handler` anymore.\n\nFrom now on, it either up to a user, or other crates (e.g. coming soon [`ptx-support` crate](https://github.com/denzp/rust-ptx-support)).\n\n\n\nNext workaround should work in common cases,\n\nalthough it doesn't provide any panic details in runtime:\n\n``` rust\n\n#![feature(core_intrinsics)]\n\n\n\n#[panic_handler]\n\nunsafe fn breakpoint_panic_handler(_: &::core::panic::PanicInfo) -> ! {\n\n core::intrinsics::breakpoint();\n\n core::hint::unreachable_unchecked();\n\n}\n\n```\n\n\n\n### API Breaking Changes - less boilerplate code\n\n`build.rs` script was never so compact and clear before:\n\n``` rust\n\nuse ptx_builder::error::Result;\n\nuse ptx_builder::prelude::*;\n\n\n\nfn main() -> Result<()> {\n\n let builder = Builder::new(\".\")?;\n\n CargoAdapter::with_env_var(\"KERNEL_PTX_PATH\").build(builder);\n\n}\n\n```\n\n\n\n### Documentation improvements\n\nThis release comes with a significant documentation improvement! [Check it out](https://docs.rs/ptx-builder) :)\n\n\n\n## Purpose\n\nThe library should facilitate CUDA development with Rust.\n\nIt can be used in a [cargo build script](http://doc.crates.io/build-script.html) of a host crate, and take responsibility for building device crates.\n\n\n", "file_path": "README.md", "rank": 91, "score": 13197.504690067062 }, { "content": "use std::fmt;\n\nuse std::process::exit;\n\n\n\nuse colored::*;\n\nuse failure::Fail;\n\n\n\nuse crate::builder::{BuildStatus, Builder};\n\nuse crate::error::*;\n\n\n\n/// Cargo integration adapter.\n\n///\n\n/// Provides PTX assembly path to Rust through specified environment variable name\n\n/// and informs Cargo about device crate dependencies, so it can rebuild on changes.\n\n///\n\n/// # Usage in `build.rs`\n\n/// ```no_run\n\n/// use ptx_builder::error::Result;\n\n/// use ptx_builder::prelude::*;\n\n///\n\n/// fn main() -> Result<()> {\n", "file_path": "src/reporter.rs", "rank": 92, "score": 13.72636323896644 }, { "content": " };\n\n\n\n if !raw_output.status.success() {\n\n bail!(BuildErrorKind::CommandFailed {\n\n command: executable.get_name(),\n\n code: raw_output.status.code().unwrap_or(-1),\n\n stderr: output.stderr,\n\n });\n\n }\n\n\n\n let version_regex = Regex::new(&format!(r\"{}\\s(\\S+)\", executable.get_name()))\n\n .context(BuildErrorKind::OtherError)?;\n\n\n\n match version_regex.captures(&(output.stdout + &output.stderr)) {\n\n Some(captures) => Ok(Version::parse(&captures[1]).context(BuildErrorKind::OtherError)?),\n\n\n\n None => Err(Error::from(BuildErrorKind::InternalError(\n\n \"Unable to find executable version\".into(),\n\n ))),\n\n }\n\n}\n", "file_path": "src/executable/runner.rs", "rank": 93, "score": 12.793311821434175 }, { "content": "/// Error handling.\n\n#[macro_use]\n\npub mod error;\n\n\n\n/// External executables that are needed to build CUDA crates.\n\npub mod executable;\n\n\n\n/// Build helpers.\n\npub mod builder;\n\n\n\n/// Build reporting helpers.\n\npub mod reporter;\n\n\n\nmod source;\n\n\n\n/// Convenient re-exports of mostly used types.\n\npub mod prelude {\n\n pub use crate::builder::{BuildStatus, Builder, CrateType, Profile};\n\n pub use crate::reporter::{CargoAdapter, ErrorLogPrinter};\n\n}\n", "file_path": "src/lib.rs", "rank": 94, "score": 12.383557763214174 }, { "content": " }\n\n }\n\n\n\n /// Runs build process and reports artifacts to Cargo.\n\n ///\n\n /// Depends on whether the build was successful or not, will either\n\n /// call `exit(0)` or `exit(1)` and print error log to `stderr`.\n\n #[allow(clippy::needless_pass_by_value)]\n\n pub fn build(&self, builder: Builder) -> ! {\n\n if let Err(error) = self.build_inner(&builder) {\n\n eprintln!(\"{}\", ErrorLogPrinter::print(error));\n\n exit(1);\n\n } else {\n\n exit(0);\n\n }\n\n }\n\n\n\n fn build_inner(&self, builder: &Builder) -> Result<()> {\n\n match builder.build()? {\n\n BuildStatus::Success(output) => {\n", "file_path": "src/reporter.rs", "rank": 95, "score": 12.345223265061197 }, { "content": " self.command.output().with_context(|_| {\n\n BuildErrorKind::InternalError(format!(\n\n \"Unable to execute command '{}'\",\n\n self.executable.get_name()\n\n ))\n\n })?\n\n };\n\n\n\n let output = Output {\n\n stdout: String::from_utf8(raw_output.stdout).context(BuildErrorKind::OtherError)?,\n\n stderr: String::from_utf8(raw_output.stderr).context(BuildErrorKind::OtherError)?,\n\n };\n\n\n\n if raw_output.status.success() {\n\n Ok(output)\n\n } else {\n\n Err(Error::from(BuildErrorKind::CommandFailed {\n\n command: self.executable.get_name(),\n\n code: raw_output.status.code().unwrap_or(-1),\n\n stderr: output.stderr,\n", "file_path": "src/executable/runner.rs", "rank": 96, "score": 11.979772127316256 }, { "content": "}\n\n\n\n/// Nice error log printer.\n\n///\n\n/// ```no_run\n\n/// use std::process::exit;\n\n/// use ptx_builder::prelude::*;\n\n/// # use ptx_builder::error::Result;\n\n///\n\n/// fn main() {\n\n/// if let Err(error) = build() {\n\n/// eprintln!(\"{}\", ErrorLogPrinter::print(error));\n\n/// exit(1);\n\n/// }\n\n/// }\n\n/// # fn build() -> Result<()> {\n\n/// # use ptx_builder::error::*;\n\n/// # Err(BuildErrorKind::InternalError(\"any...\".into()).into())\n\n/// # }\n\npub struct ErrorLogPrinter {\n", "file_path": "src/reporter.rs", "rank": 97, "score": 11.97893745593342 }, { "content": " }\n\n}\n\n\n\npub(crate) fn parse_executable_version<E: Executable>(executable: &E) -> Result<Version> {\n\n let mut command = Command::new(executable.get_name());\n\n\n\n command.args(&[\"-V\"]);\n\n\n\n let raw_output = {\n\n command\n\n .output()\n\n .with_context(|_| BuildErrorKind::CommandNotFound {\n\n command: executable.get_name(),\n\n hint: executable.get_verification_hint(),\n\n })?\n\n };\n\n\n\n let output = Output {\n\n stdout: String::from_utf8(raw_output.stdout).context(BuildErrorKind::OtherError)?,\n\n stderr: String::from_utf8(raw_output.stderr).context(BuildErrorKind::OtherError)?,\n", "file_path": "src/executable/runner.rs", "rank": 98, "score": 11.124282132076306 }, { "content": " let dependencies = output.dependencies()?;\n\n\n\n println!(\n\n \"cargo:rustc-env={}={}\",\n\n self.env_name,\n\n output.get_assembly_path().display()\n\n );\n\n\n\n for path in dependencies {\n\n println!(\"cargo:rerun-if-changed={}\", path.display());\n\n }\n\n }\n\n\n\n BuildStatus::NotNeeded => {\n\n println!(\"cargo:rustc-env={}=/dev/null\", self.env_name);\n\n }\n\n };\n\n\n\n Ok(())\n\n }\n", "file_path": "src/reporter.rs", "rank": 99, "score": 10.39368932902371 } ]
Rust
src/collector.rs
playXE/test
50fae1922ef116908d09cdf81b301c153e980416
use super::{allocation::ImmixSpace, block::ImmixBlock, constants::*, CollectionType}; use crate::{large_object_space::LargeObjectSpace, object::*, util::*}; use alloc::collections::VecDeque; use core::ptr::NonNull; use vec_map::VecMap; pub struct ImmixCollector; pub struct Visitor<'a> { immix_space: &'a mut ImmixSpace, queue: &'a mut VecDeque<*mut RawGc>, defrag: bool, next_live_mark: bool, } impl<'a> Tracer for Visitor<'a> { fn trace(&mut self, reference: &mut NonNull<RawGc>) { unsafe { let mut child = &mut *reference.as_ptr(); if child.is_forwarded() { debug!("Child {:p} is forwarded to 0x{:x}", child, child.vtable()); *reference = NonNull::new_unchecked(child.vtable() as *mut _); } else if (&*child).get_mark() != self.next_live_mark { if self.defrag && self.immix_space.filter_fast(Address::from_ptr(child)) { if let Some(new_child) = self.immix_space.maybe_evacuate(child) { *reference = NonNull::new_unchecked(new_child.to_mut_ptr()); debug!("Evacuated child {:p} to {}", child, new_child); child = &mut *new_child.to_mut_ptr::<RawGc>(); } } debug!("Push child {:p} into object queue", child); self.queue.push_back(child); } } } } impl ImmixCollector { pub fn collect( collection_type: &CollectionType, roots: &[*mut RawGc], precise_roots: &[*mut *mut RawGc], immix_space: &mut ImmixSpace, next_live_mark: bool, ) -> usize { let mut object_queue: VecDeque<*mut RawGc> = roots.iter().copied().collect(); for root in precise_roots.iter() { unsafe { let root = &mut **root; let mut raw = &mut **root; if immix_space.filter_fast(Address::from_ptr(raw)) { if raw.is_forwarded() { raw = &mut *(raw.vtable() as *mut RawGc); } else if *collection_type == CollectionType::ImmixEvacCollection { if let Some(new_object) = immix_space.maybe_evacuate(raw) { *root = new_object.to_mut_ptr::<RawGc>(); raw.set_forwarded(new_object.to_usize()); raw = &mut *new_object.to_mut_ptr::<RawGc>(); } } } object_queue.push_back(raw); } } let mut visited = 0; while let Some(object) = object_queue.pop_front() { unsafe { let object_addr = Address::from_ptr(object); if !(&mut *object).mark(next_live_mark) { if immix_space.filter_fast(object_addr) { let block = ImmixBlock::get_block_ptr(object_addr); immix_space.set_gc_object(object_addr); (&mut *block).line_object_mark(object_addr); } debug!("Object {:p} was unmarked: visit their children", object); visited += (&*object).object_size(); let visitor_fn = (&*object).rtti().visit_references; { let mut visitor = core::mem::transmute::<_, Visitor<'static>>(Visitor { immix_space, next_live_mark, queue: &mut object_queue, defrag: *collection_type == CollectionType::ImmixEvacCollection, }); visitor_fn( object as *mut u8, TracerPtr { tracer: core::mem::transmute(&mut visitor as &mut dyn Tracer), }, ); } } } } debug!("Completed collection with {} bytes visited", visited); visited } } use alloc::vec::Vec; pub struct Collector { all_blocks: Vec<*mut ImmixBlock>, mark_histogram: VecMap<usize>, } impl Default for Collector { fn default() -> Self { Self::new() } } impl Collector { pub fn new() -> Self { Self { all_blocks: Vec::new(), mark_histogram: VecMap::with_capacity(NUM_LINES_PER_BLOCK), } } pub fn extend_all_blocks(&mut self, blocks: Vec<*mut ImmixBlock>) { self.all_blocks.extend(blocks); } pub fn prepare_collection( &mut self, evacuation: bool, _cycle_collect: bool, available_blocks: usize, evac_headroom: usize, total_blocks: usize, emergency: bool, ) -> CollectionType { if emergency && USE_EVACUATION { for block in &mut self.all_blocks { unsafe { (**block).evacuation_candidate = true; } } return CollectionType::ImmixEvacCollection; } let mut perform_evac = evacuation; let evac_threshhold = (total_blocks as f64 * EVAC_TRIGGER_THRESHHOLD) as usize; let available_evac_blocks = available_blocks + evac_headroom; debug!( "total blocks={},evac threshold={}, available evac blocks={}", total_blocks, evac_threshhold, available_evac_blocks ); if evacuation || available_evac_blocks < evac_threshhold { let hole_threshhold = self.establish_hole_threshhold(evac_headroom); debug!("evac threshold={}", hole_threshhold); perform_evac = USE_EVACUATION && hole_threshhold > 0; if perform_evac { for block in &mut self.all_blocks { unsafe { (**block).evacuation_candidate = (**block).hole_count as usize >= hole_threshhold; } } } } match (false, perform_evac, true) { (true, false, true) => CollectionType::ImmixCollection, (true, true, true) => CollectionType::ImmixEvacCollection, (false, false, _) => CollectionType::ImmixCollection, (false, true, _) => CollectionType::ImmixEvacCollection, _ => CollectionType::ImmixCollection, } } pub fn collect( &mut self, collection_type: &CollectionType, roots: &[*mut RawGc], precise_roots: &[*mut *mut RawGc], immix_space: &mut ImmixSpace, large_object_space: &mut LargeObjectSpace, next_live_mark: bool, ) -> usize { for block in &mut self.all_blocks { unsafe { immix_space .bitmap .clear_range((*block) as usize, (*block) as usize + BLOCK_SIZE); (**block).line_map.clear_all(); } } let visited = ImmixCollector::collect( collection_type, roots, precise_roots, immix_space, next_live_mark, ); self.mark_histogram.clear(); let (recyclable_blocks, free_blocks) = self.sweep_all_blocks(); immix_space.set_recyclable_blocks(recyclable_blocks); let evac_headroom = if USE_EVACUATION { EVAC_HEADROOM - immix_space.evac_headroom() } else { 0 }; immix_space.extend_evac_headroom(free_blocks.iter().take(evac_headroom).copied()); immix_space.return_blocks(free_blocks.iter().skip(evac_headroom).copied()); large_object_space.sweep(); visited } fn sweep_all_blocks(&mut self) -> (Vec<*mut ImmixBlock>, Vec<*mut ImmixBlock>) { let mut unavailable_blocks = Vec::new(); let mut recyclable_blocks = Vec::new(); let mut free_blocks = Vec::new(); for block in self.all_blocks.drain(..) { if unsafe { (*block).is_empty() } { unsafe { (*block).reset(); } debug!("Push block {:p} into free_blocks", block); free_blocks.push(block); } else { unsafe { (*block).count_holes(); } let (holes, marked_lines) = unsafe { (*block).count_holes_and_marked_lines() }; if self.mark_histogram.contains_key(holes) { if let Some(val) = self.mark_histogram.get_mut(holes) { *val += marked_lines; } } else { self.mark_histogram.insert(holes, marked_lines); } debug!( "Found {} holes and {} marked lines in block {:p}", holes, marked_lines, block ); match holes { 0 => { debug!("Push block {:p} into unavailable_blocks", block); unavailable_blocks.push(block); } _ => { debug!("Push block {:p} into recyclable_blocks", block); recyclable_blocks.push(block); } } } } self.all_blocks = unavailable_blocks; (recyclable_blocks, free_blocks) } fn establish_hole_threshhold(&self, evac_headroom: usize) -> usize { let mut available_histogram: VecMap<usize> = VecMap::with_capacity(NUM_LINES_PER_BLOCK); for &block in &self.all_blocks { let (holes, free_lines) = unsafe { (*block).count_holes_and_available_lines() }; if available_histogram.contains_key(holes) { if let Some(val) = available_histogram.get_mut(holes) { *val += free_lines; } } else { available_histogram.insert(holes, free_lines); } } let mut required_lines = 0; let mut available_lines = evac_headroom * (NUM_LINES_PER_BLOCK - 1); for threshold in 0..NUM_LINES_PER_BLOCK { required_lines += *self.mark_histogram.get(threshold).unwrap_or(&0); available_lines = available_lines.saturating_sub(*available_histogram.get(threshold).unwrap_or(&0)); if available_lines <= required_lines { return threshold; } } NUM_LINES_PER_BLOCK } }
use super::{allocation::ImmixSpace, block::ImmixBlock, constants::*, CollectionType}; use crate::{large_object_space::LargeObjectSpace, object::*, util::*}; use alloc::collections::VecDeque; use core::ptr::NonNull; use vec_map::VecMap; pub struct ImmixCollector; pub struct Visitor<'a> { immix_space: &'a mut ImmixSpace, queue: &'a mut VecDeque<*mut RawGc>, defrag: bool, next_live_mark: bool, } impl<'a> Tracer for Visitor<'a> { fn trace(&mut self, reference: &mut NonNull<RawGc>) { unsafe { let mut child = &mut *reference.as_ptr(); if child.is_forwarded() { debug!("Child {:p} is forwarded to 0x{:x}", child, child.vtable()); *reference = NonNull::new_unchecked(child.vtable() as *mut _); } else if (&*child).get_mark() != self.next_live_mark { if self.defrag && self.immix_space.filter_fast(Address::from_ptr(child)) { if let Some(new_child) = self.immix_space.maybe_evacuate(child) { *reference = NonNull::new_unchecked(new_child.to_mut_ptr()); debug!("Evacuated child {:p} to {}", child, new_child); child = &mut *new_child.to_mut_ptr::<RawGc>(); } } debug!("Push child {:p} into object queue", child); self.queue.push_back(child); } } } } impl ImmixCollector { pub fn collect( collection_type: &CollectionType, roots: &[*mut RawGc], precise_roots: &[*mut *mut RawGc], immix_space: &mut ImmixSpace, next_live_mark: bool, ) -> usize { let mut object_queue: VecDeque<*mut RawGc> = roots.iter().copied().collect(); for root in precise_roots.iter() { unsafe { let root = &mut **root; let mut raw = &mut **root; if immix_space.filter_fast(Address::from_ptr(raw)) { if raw.is_forwarded() { raw = &mut *(raw.vtable() as *mut RawGc); } else if *collection_type == CollectionType::ImmixEvacCollection { if let Some(new_object) = immix_space.maybe_evacuate(raw) { *root = new_object.to_mut_ptr::<RawGc>(); raw.set_forwarded(new_object.to_usize()); raw = &mut *new_object.to_mut_ptr::<RawGc>(); } } } object_queue.push_back(raw); } } let mut visited = 0; while let Some(object) = object_queue.pop_front() { unsafe { let object_addr = Address::from_ptr(object); if !(&mut *object).mark(next_live_mark) { if immix_space.filter_fast(object_addr) { let block = ImmixBlock::get_block_ptr(object_addr); immix_space.set_gc_object(object_addr); (&mut *block).line_object_mark(object_addr); } debug!("Object {:p} was unmarked: visit their children", object); visited += (&*object).object_size(); let visitor_fn = (&*object).rtti().visit_references; { let mut visitor = core::mem::transmute::<_, Visitor<'static>>(Visitor { immix_space, next_live_mark, queue: &mut object_queue, defrag: *collection_type == CollectionType::ImmixEvacCollection, }); visitor_fn( object as *mut u8, TracerPtr { tracer: core::mem::transmute(&mut visitor as &mut dyn Tracer), }, ); } } } } debug!("Completed collection with {} bytes visited", visited); visited } } use alloc::vec::Vec; pub struct Collector { all_blocks: Vec<*mut ImmixBlock>, mark_histogram: VecMap<usize>, } impl Default for Collector { fn default() -> Self { Self::new() } } impl Collector { pub fn new() -> Self { Self { all_blocks: Vec::new(), mark_histogram: VecMap::with_capacity(NUM_LINES_PER_BLOCK), } } pub fn extend_all_blocks(&mut self, blocks: Vec<*mut ImmixBlock>) { self.all_blocks.extend(blocks); } pub fn prepare_collection( &mut self, evacuation: bool, _cycle_collect: bool, available_blocks: usize, evac_headroom: usize, total_blocks: usize, emergency: bool, ) -> CollectionType { if emergency && USE_EVACUATION { for block in &mut self.all_blocks { unsafe { (**block).evacuation_candidate = true; } } return CollectionType::ImmixEvacCollection; } let mut perform_evac = evacuation; let evac_threshhold = (total_blocks as f64 * EVAC_TRIGGER_THRESHHOLD) as usize; let available_evac_blocks = available_blocks + evac_headroom; debug!( "total blocks={},evac threshold={}, available evac blocks={}", total_blocks, evac_threshhold, available_evac_blocks ); if evacuation || available_evac_blocks < evac_threshhold { let hole_threshhold = self.establish_hole_threshhold(evac_headroom); debug!("evac threshold={}", hole_threshhold); perform_evac = USE_EVACUATION && hole_threshhold > 0; if perform_evac { for block in &mut self.all_blocks { unsafe { (**block).evacuation_candidate = (**block).hole_count as usize >= hole_threshhold; } } } } match (false, perform_evac, true) { (true, false, true) => CollectionType::ImmixCollection, (true, true, true) => CollectionType::ImmixEvacCollection, (false, false, _) => CollectionType::ImmixCollection, (false, true, _) => CollectionType::ImmixEvacCollection, _ => CollectionType::ImmixCollection, } }
fn sweep_all_blocks(&mut self) -> (Vec<*mut ImmixBlock>, Vec<*mut ImmixBlock>) { let mut unavailable_blocks = Vec::new(); let mut recyclable_blocks = Vec::new(); let mut free_blocks = Vec::new(); for block in self.all_blocks.drain(..) { if unsafe { (*block).is_empty() } { unsafe { (*block).reset(); } debug!("Push block {:p} into free_blocks", block); free_blocks.push(block); } else { unsafe { (*block).count_holes(); } let (holes, marked_lines) = unsafe { (*block).count_holes_and_marked_lines() }; if self.mark_histogram.contains_key(holes) { if let Some(val) = self.mark_histogram.get_mut(holes) { *val += marked_lines; } } else { self.mark_histogram.insert(holes, marked_lines); } debug!( "Found {} holes and {} marked lines in block {:p}", holes, marked_lines, block ); match holes { 0 => { debug!("Push block {:p} into unavailable_blocks", block); unavailable_blocks.push(block); } _ => { debug!("Push block {:p} into recyclable_blocks", block); recyclable_blocks.push(block); } } } } self.all_blocks = unavailable_blocks; (recyclable_blocks, free_blocks) } fn establish_hole_threshhold(&self, evac_headroom: usize) -> usize { let mut available_histogram: VecMap<usize> = VecMap::with_capacity(NUM_LINES_PER_BLOCK); for &block in &self.all_blocks { let (holes, free_lines) = unsafe { (*block).count_holes_and_available_lines() }; if available_histogram.contains_key(holes) { if let Some(val) = available_histogram.get_mut(holes) { *val += free_lines; } } else { available_histogram.insert(holes, free_lines); } } let mut required_lines = 0; let mut available_lines = evac_headroom * (NUM_LINES_PER_BLOCK - 1); for threshold in 0..NUM_LINES_PER_BLOCK { required_lines += *self.mark_histogram.get(threshold).unwrap_or(&0); available_lines = available_lines.saturating_sub(*available_histogram.get(threshold).unwrap_or(&0)); if available_lines <= required_lines { return threshold; } } NUM_LINES_PER_BLOCK } }
pub fn collect( &mut self, collection_type: &CollectionType, roots: &[*mut RawGc], precise_roots: &[*mut *mut RawGc], immix_space: &mut ImmixSpace, large_object_space: &mut LargeObjectSpace, next_live_mark: bool, ) -> usize { for block in &mut self.all_blocks { unsafe { immix_space .bitmap .clear_range((*block) as usize, (*block) as usize + BLOCK_SIZE); (**block).line_map.clear_all(); } } let visited = ImmixCollector::collect( collection_type, roots, precise_roots, immix_space, next_live_mark, ); self.mark_histogram.clear(); let (recyclable_blocks, free_blocks) = self.sweep_all_blocks(); immix_space.set_recyclable_blocks(recyclable_blocks); let evac_headroom = if USE_EVACUATION { EVAC_HEADROOM - immix_space.evac_headroom() } else { 0 }; immix_space.extend_evac_headroom(free_blocks.iter().take(evac_headroom).copied()); immix_space.return_blocks(free_blocks.iter().skip(evac_headroom).copied()); large_object_space.sweep(); visited }
function_block-full_function
[ { "content": "/// Check if `mem` is aligned for precise allocation\n\npub fn is_aligned_for_precise_allocation(mem: *mut u8) -> bool {\n\n let allocable_ptr = mem as usize;\n\n (allocable_ptr & (PreciseAllocation::ALIGNMENT - 1)) == 0\n\n}\n\n/// This space contains objects which are larger than the size limits of other spaces.\n\n/// Each object gets its own malloc'd region of memory.\n\n/// Large objects are never moved by the garbage collector.\n\npub struct LargeObjectSpace {\n\n allocations: alloc::vec::Vec<*mut PreciseAllocation>,\n\n pub(crate) current_live_mark: bool,\n\n}\n\n\n\nimpl Default for LargeObjectSpace {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl LargeObjectSpace {\n\n pub fn new() -> Self {\n", "file_path": "src/large_object_space.rs", "rank": 0, "score": 206199.19199585437 }, { "content": "/// Returns true if the pointer has the given bit set to 1.\n\npub fn bit_is_set(pointer: u64, bit: usize) -> bool {\n\n let shifted = 1 << bit;\n\n\n\n (pointer as u64 & shifted) == shifted\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 1, "score": 157443.5650276111 }, { "content": "pub fn addr_in_safepoint(addr: usize) -> bool {\n\n unsafe {\n\n let safepoint_addr = SAFEPOINT_PAGE as usize;\n\n\n\n addr == safepoint_addr\n\n }\n\n}\n", "file_path": "src/safepoint.rs", "rank": 2, "score": 150709.2169251254 }, { "content": "pub fn object_ty_of<T: HeapObject>(_: *const T) -> usize {\n\n &T::RTTI as *const GCRTTI as usize\n\n}\n\n\n", "file_path": "src/object.rs", "rank": 3, "score": 143531.5923760146 }, { "content": "pub fn safepoint_start_gc() -> bool {\n\n //assert!(get_tls_state().gc_state == GC_STATE_WAITING);\n\n unsafe {\n\n let lock = SAFEPOINT_LOCK.lock();\n\n\n\n if GC_RUNNING.compare_exchange_weak(false, true, Ordering::SeqCst, Ordering::Relaxed)\n\n != Ok(false)\n\n {\n\n // if other thread started GC first we suspend current thread and allow other thread to run GC cycle.\n\n drop(lock);\n\n safepoint_wait_gc();\n\n return false;\n\n }\n\n\n\n enable_safepoint(&*THREADS.threads.lock());\n\n drop(lock);\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/safepoint.rs", "rank": 4, "score": 139862.66909054999 }, { "content": "/// rounds the given value `val` up to the nearest multiple\n\n/// of `align`.\n\npub fn align_usize(value: usize, align: usize) -> usize {\n\n if align == 0 {\n\n return value;\n\n }\n\n\n\n ((value + align - 1) / align) * align\n\n}\n\n\n\n/// A garbage collected pointer to a value.\n\n///\n\n/// This is the equivalent of a garbage collected smart-pointer.\n\n///\n\n/// The smart pointer is simply a guarantee to the garbage collector\n\n/// that this points to a garbage collected object with the correct header,\n\n/// and not some arbitrary bits that you've decided to heap allocate.\n\n///\n\n/// NOTE: GC is smart enough to find out that for example reference like this `&*my_gc`\n\n/// on stack points into some object by aligning down to 16 bytes so you do not have to worry about it.\n\npub struct Gc<T: HeapObject + ?Sized> {\n\n pub ptr: NonNull<RawGc>,\n", "file_path": "src/object.rs", "rank": 5, "score": 138832.86804275966 }, { "content": "/// Visits garbage collected objects\n\n///\n\n/// This should only be used by a [HeapImpl]\n\npub trait Tracer {\n\n /// Traces a reference to a specified value.\n\n fn trace(&mut self, reference: &mut NonNull<RawGc>);\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! make_rtti_for {\n\n ($t: ty) => {\n\n GCRTTI {\n\n visit_references: {\n\n extern \"C\" fn visit(data: *mut u8, trace: TracerPtr) {\n\n unsafe {\n\n (*data.add(8).cast::<$t>()).visit_references(&mut *core::mem::transmute::<\n\n [usize; 2],\n\n *mut dyn Tracer,\n\n >(\n\n trace.tracer\n\n ));\n\n }\n\n }\n", "file_path": "src/object.rs", "rank": 6, "score": 138597.26165428504 }, { "content": "/// Returns the given pointer without any tags set.\n\npub fn untagged<T>(pointer: u64) -> *mut T {\n\n (pointer as u64 & UNTAG_MASK as u64) as _\n\n}\n\n\n\n/// Structure wrapping a raw, tagged pointer.\n\n#[derive(Debug)]\n\n#[repr(C)]\n\npub struct TaggedPointer<T> {\n\n pub raw: u64,\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> TaggedPointer<T> {\n\n /// Returns a new TaggedPointer without setting any bits.\n\n pub fn new(raw: *mut T) -> TaggedPointer<T> {\n\n TaggedPointer {\n\n raw: raw as u64,\n\n _marker: PhantomData,\n\n }\n\n }\n", "file_path": "src/util.rs", "rank": 7, "score": 137659.8791616846 }, { "content": "pub fn object_ty_of_type<T: HeapObject + Sized>() -> usize {\n\n let result = object_ty_of(core::ptr::null::<T>());\n\n debug_assert_ne!(result, 0);\n\n result\n\n}\n\nimpl RawGc {\n\n /*pub fn as_dyn(&self) -> &'static mut dyn HeapObject {\n\n unsafe {\n\n core::mem::transmute(core::raw::TraitObject {\n\n vtable: self.vtable() as *mut (),\n\n data: self.data() as *mut (),\n\n })\n\n }\n\n }\n\n */\n\n pub fn rtti(&self) -> &GCRTTI {\n\n unsafe { &*(self.vtable() as *mut GCRTTI) }\n\n }\n\n pub fn object_size(&self) -> usize {\n\n align_usize(\n", "file_path": "src/object.rs", "rank": 8, "score": 133791.86570892154 }, { "content": "pub fn formatted_size(size: usize) -> FormattedSize {\n\n FormattedSize { size }\n\n}\n\n#[macro_export]\n\nmacro_rules! get_sp {\n\n () => {{\n\n #[inline(always)]\n\n #[allow(dead_code)]\n\n fn generic_get_sp() -> usize {\n\n let val = 0usize;\n\n let p = &val as *const usize;\n\n p as usize\n\n }\n\n\n\n let sp: usize;\n\n #[allow(unused_unsafe)]\n\n unsafe {\n\n\n\n #[cfg(target_arch=\"x86_64\")]\n\n {\n", "file_path": "src/util.rs", "rank": 9, "score": 128089.63444920667 }, { "content": "/// Returns the pointer with the given bit set.\n\npub fn with_bit(pointer: u64, bit: usize) -> u64 {\n\n (pointer as u64 | 1 << bit as u64) as _\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 10, "score": 123124.03583198355 }, { "content": "pub fn install_default_signal_handlers() {\n\n unsafe {\n\n allocate_segv_handler();\n\n }\n\n}\n", "file_path": "src/signals/unix.rs", "rank": 11, "score": 119875.87188605139 }, { "content": "pub fn install_default_signal_handlers() {\n\n unsafe {\n\n if libc::signal(libc::SIGSEGV, crt_sig_handler as _) == libc::SIG_ERR as usize {\n\n panic!(\"can't set signal handler\");\n\n }\n\n SetUnhandledExceptionFilter(Some(exception_handler));\n\n }\n\n}\n", "file_path": "src/signals/windows.rs", "rank": 12, "score": 119875.87188605139 }, { "content": "pub fn without_bit(pointer: u64, bit: usize) -> u64 {\n\n pointer & !(1 << bit)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 13, "score": 119272.94508304403 }, { "content": "pub fn safepoint_end_gc(threads: &[*mut TLSState]) {\n\n unsafe {\n\n let l = SAFEPOINT_LOCK.lock();\n\n\n\n //disable_safepoint(1);\n\n disable_safepoint(2, threads);\n\n GC_RUNNING.store(false, Ordering::Release);\n\n drop(l);\n\n }\n\n}\n\n\n", "file_path": "src/safepoint.rs", "rank": 14, "score": 118930.53709993784 }, { "content": "pub fn safepoint_wait_gc() {\n\n while GC_RUNNING.load(Ordering::Relaxed) || GC_RUNNING.load(Ordering::Acquire) {\n\n core::hint::spin_loop();\n\n }\n\n}\n\n\n", "file_path": "src/safepoint.rs", "rank": 15, "score": 99400.85403340062 }, { "content": "pub fn safepoint_wait_for_the_world(\n\n) -> parking_lot::MutexGuard<'static, alloc::vec::Vec<*mut TLSState>> {\n\n let threads = &*THREADS;\n\n let ctls = immix_get_tls_state() as *mut _;\n\n //panic!();\n\n let lock = threads.threads.lock();\n\n\n\n for th in lock.iter() {\n\n if *th == ctls {\n\n continue;\n\n }\n\n\n\n let ptls = unsafe { &mut **th };\n\n\n\n while ptls.atomic_gc_state().load(Ordering::Relaxed) == 0\n\n || ptls.atomic_gc_state().load(Ordering::Acquire) == 0\n\n {\n\n core::hint::spin_loop();\n\n }\n\n /*unsafe {\n\n ptls.stack_end = ptls.safepoint.read() as *mut u8;\n\n }*/\n\n }\n\n lock\n\n}\n\n\n", "file_path": "src/safepoint.rs", "rank": 16, "score": 99400.85403340062 }, { "content": "/// Indicates that a type can be traced and safely allocated by a garbage collector.\n\n///\n\n///\n\n/// ## Safety\n\n/// See the documentation of the `visit_references` method for more info.\n\n/// Essentially, this object must faithfully trace anything that\n\n/// could contain garbage collected pointers or other `HeapObject` items.\n\n///\n\n/// Custom destructors must never reference garbage collected pointers.\n\n/// The garbage collector may have already freed the other objects\n\n/// before calling this type's drop function.\n\n///\n\n/// Unlike java finalizers, this allows us to deallocate objects normally\n\n/// and avoids a second pass over the objects\n\n/// to check for resurrected objects.\n\npub trait HeapObject {\n\n const RTTI: GCRTTI;\n\n /// Visit each field in this type.\n\n ///\n\n ///\n\n /// Users should never invoke this method.\n\n /// Only the collector itself is premitted to call this method,\n\n /// and **it is undefined behavior for the user to invoke this**.\n\n ///\n\n ///\n\n /// Structures should trace each of their fields,\n\n /// and collections should trace each of their elements.\n\n ///\n\n /// ### Safety\n\n /// Some types (like `Gc`) need special actions taken when they're traced,\n\n /// but those are easily handled: just invoke `visit_references` on `Gc`,\n\n /// and it will be properly passed to `tracer`.\n\n ///\n\n /// ## Always Permitted\n\n /// - Reading your own memory (includes iteration)\n", "file_path": "src/object.rs", "rank": 17, "score": 90442.51898159493 }, { "content": "pub fn immix_alloc_safe<T: HeapObject>(value: T) -> Gc<T> {\n\n unsafe {\n\n let ptr = immix_alloc(value.heap_size(), object_ty_of_type::<T>() as *mut _);\n\n let ptr = ptr as *mut RawGc;\n\n (*ptr).data().cast::<T>().write(value);\n\n Gc {\n\n marker: Default::default(),\n\n ptr: NonNull::new_unchecked(ptr),\n\n }\n\n }\n\n}\n\n\n\n/// Trigger garbage collection. If `move_objects` is true might potentially move unpinned objects.\n\n///\n\n/// \n\n/// NOTE: If libimmixcons was built with `threaded` feature this function inside might wait for other\n\n/// threads to reach yieldpoints or give up to other thread that started collection.\n\n#[no_mangle]\n\n#[inline]\n\npub extern \"C\" fn immix_collect(move_objects: bool) {\n", "file_path": "src/lib.rs", "rank": 18, "score": 87112.21089053457 }, { "content": "fn criterion_bench(c: &mut Criterion) {\n\n let mut sp = 0;\n\n immix_init(\n\n &mut sp,\n\n 2 * 1024 * 1024 * 1024,\n\n 0,\n\n immix_noop_callback,\n\n 0 as *mut _,\n\n );\n\n immix_register_thread(&mut sp as *mut usize);\n\n\n\n c.bench_function(\"libimmixcons (30% threshold)\", |b| b.iter(|| gcbench()));\n\n}\n\ncriterion_group!(benches, criterion_bench);\n\ncriterion_main!(benches);\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 19, "score": 83914.55460025222 }, { "content": "fn gcbench(space: &mut Heap) {\n\n /*simple_logger::SimpleLogger::new()\n\n .with_level(log::LevelFilter::Debug)\n\n .init();*/\n\n\n\n /*println!(\n\n \" Live storage will peak at {}.\\n\",\n\n formatted_size(\n\n (2 * (size_of::<Node>() as i32) * TreeSize(kLongLivedTreeDepth)\n\n + (size_of::<Array>() as i32)) as usize\n\n )\n\n );*/\n\n\n\n /* println!(\n\n \" Stretching memory with a binary tree or depth {}\",\n\n kStretchTreeDepth\n\n );*/\n\n let mut long_lived = space.allocate(Node {\n\n left: None,\n\n right: None,\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 20, "score": 83914.55460025222 }, { "content": "fn criterion_bench(c: &mut Criterion) {\n\n let mut heap = Heap::new();\n\n\n\n c.bench_function(\"bdwgc\", |b| b.iter(|| gcbench(&mut heap)));\n\n let mut group = c.benchmark_group(\"threaded\");\n\n group.sample_size(10);\n\n /*group.bench_function(\"bdwgc\", |b| {\n\n b.iter(|| {\n\n let mut threads = Vec::with_capacity(4);\n\n for _ in 0..2 {\n\n threads.push(std::thread::spawn(move || {\n\n let mut heap = heap;\n\n gcbench(&mut heap);\n\n }));\n\n }\n\n\n\n while let Some(th) = threads.pop() {\n\n th.join().unwrap();\n\n }\n\n });\n\n });*/\n\n}\n\ncriterion_group!(benches, criterion_bench);\n\ncriterion_main!(benches);\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 21, "score": 83914.55460025222 }, { "content": "fn gcbench(space: &mut Heap) {\n\n /*simple_logger::SimpleLogger::new()\n\n .with_level(log::LevelFilter::Debug)\n\n .init();*/\n\n\n\n /*println!(\n\n \" Live storage will peak at {}.\\n\",\n\n formatted_size(\n\n (2 * (size_of::<Node>() as i32) * TreeSize(kLongLivedTreeDepth)\n\n + (size_of::<Array>() as i32)) as usize\n\n )\n\n );*/\n\n\n\n /* println!(\n\n \" Stretching memory with a binary tree or depth {}\",\n\n kStretchTreeDepth\n\n );*/\n\n let mut long_lived = space.allocate(Node {\n\n left: None,\n\n right: None,\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 22, "score": 81989.34227541741 }, { "content": "fn criterion_bench(c: &mut Criterion) {\n\n let mut heap = Heap::new();\n\n unsafe {\n\n GC_enable_incremental();\n\n }\n\n\n\n c.bench_function(\"bdwgc incremental\", |b| b.iter(|| gcbench(&mut heap)));\n\n}\n\ncriterion_group!(benches, criterion_bench);\n\ncriterion_main!(benches);\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 23, "score": 81989.34227541741 }, { "content": "fn Populate(idepth: i32, thisnode: &mut Gc<Node>, space: &mut Heap) {\n\n if idepth <= 0 {\n\n return;\n\n }\n\n thisnode.left = Some(space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n thisnode.right = Some(space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n Populate(idepth - 1, thisnode.left.as_mut().unwrap(), space);\n\n Populate(idepth - 1, thisnode.right.as_mut().unwrap(), space)\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 24, "score": 77771.6824224894 }, { "content": "fn Populate(idepth: i32, thisnode: &mut Gc<Node>, space: &mut Heap) {\n\n if idepth <= 0 {\n\n return;\n\n }\n\n thisnode.left = Some(space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n thisnode.right = Some(space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n Populate(idepth - 1, thisnode.left.as_mut().unwrap(), space);\n\n Populate(idepth - 1, thisnode.right.as_mut().unwrap(), space)\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 25, "score": 76447.1780143776 }, { "content": "fn Populate(idepth: i32, thisnode: &mut Gc<Node>) {\n\n immix_mutator_yieldpoint();\n\n if idepth <= 0 {\n\n return;\n\n }\n\n thisnode.left = Some(immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n thisnode.right = Some(immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n Populate(idepth - 1, thisnode.left.as_mut().unwrap());\n\n Populate(idepth - 1, thisnode.right.as_mut().unwrap())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 75713.41688485324 }, { "content": "fn run<F>(name: &str, mut configure: F)\n\nwhere\n\n F: FnMut(&mut Command) -> &mut Command,\n\n{\n\n let mut command = Command::new(name);\n\n let configured = configure(&mut command);\n\n if !configured.status().is_ok() {\n\n let err = configured.status().unwrap_err();\n\n panic!(\"failed to execute {:?}: {}\", configured, err);\n\n }\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/build.rs", "rank": 27, "score": 73788.20456001844 }, { "content": "#[inline(never)]\n\nfn TimeConstruction(depth: i32, space: &mut Heap) {\n\n let iNumIters = NumIters(depth);\n\n\n\n let start = instant::Instant::now();\n\n for _ in 0..iNumIters {\n\n let mut tempTree = space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n Populate(depth, &mut tempTree, space);\n\n\n\n // destroy tempTree\n\n }\n\n\n\n let start = instant::Instant::now();\n\n for _ in 0..iNumIters {\n\n let tempTree = MakeTree(depth, space);\n\n }\n\n}\n\nconst kStretchTreeDepth: i32 = 18;\n\nconst kLongLivedTreeDepth: i32 = 16;\n\nconst kArraySize: i32 = 500000;\n\nconst kMinTreeDepth: i32 = 4;\n\nconst kMaxTreeDepth: i32 = 16;\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 28, "score": 72659.37038996012 }, { "content": "#[inline(never)]\n\nfn TimeConstruction(depth: i32, space: &mut Heap) {\n\n let iNumIters = NumIters(depth);\n\n\n\n let start = instant::Instant::now();\n\n for _ in 0..iNumIters {\n\n let mut tempTree = space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n Populate(depth, &mut tempTree, space);\n\n\n\n // destroy tempTree\n\n }\n\n\n\n let start = instant::Instant::now();\n\n for _ in 0..iNumIters {\n\n let tempTree = MakeTree(depth, space);\n\n }\n\n}\n\nconst kStretchTreeDepth: i32 = 18;\n\nconst kLongLivedTreeDepth: i32 = 16;\n\nconst kArraySize: i32 = 500000;\n\nconst kMinTreeDepth: i32 = 4;\n\nconst kMaxTreeDepth: i32 = 16;\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 29, "score": 71135.45517734831 }, { "content": "fn Populate(idepth: i32, thisnode: &mut Gc<Node>) {\n\n immix_mutator_yieldpoint();\n\n if idepth <= 0 {\n\n return;\n\n }\n\n thisnode.left = Some(immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n thisnode.right = Some(immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n }));\n\n Populate(idepth - 1, thisnode.left.as_mut().unwrap());\n\n Populate(idepth - 1, thisnode.right.as_mut().unwrap())\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 30, "score": 70370.29781477846 }, { "content": "fn MakeTree(idepth: i32, space: &mut Heap) -> Gc<Node> {\n\n if idepth <= 0 {\n\n return space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n } else {\n\n let left = MakeTree(idepth - 1, space);\n\n let right = MakeTree(idepth - 1, space);\n\n let result = space.allocate(Node {\n\n left: Some(left),\n\n right: Some(right),\n\n i: 0,\n\n j: 0,\n\n });\n\n result\n\n }\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 31, "score": 65459.79789590633 }, { "content": "fn MakeTree(idepth: i32, space: &mut Heap) -> Gc<Node> {\n\n if idepth <= 0 {\n\n return space.allocate(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n } else {\n\n let left = MakeTree(idepth - 1, space);\n\n let right = MakeTree(idepth - 1, space);\n\n let result = space.allocate(Node {\n\n left: Some(left),\n\n right: Some(right),\n\n i: 0,\n\n j: 0,\n\n });\n\n result\n\n }\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 32, "score": 64137.50123113688 }, { "content": " uint64_t raw;\n", "file_path": "libimmixcons.h", "rank": 33, "score": 60886.19535094091 }, { "content": " uintptr_t tracer[2];\n", "file_path": "libimmixcons.h", "rank": 34, "score": 60801.109246513355 }, { "content": "struct Array {\n\n value: [f64; kArraySize as usize],\n\n}\n", "file_path": "src/main.rs", "rank": 35, "score": 59579.95047870389 }, { "content": "struct Simple {\n\n x: Gc<i32>,\n\n}\n\n\n\nimpl HeapObject for Simple {\n\n // remove `finalize` if you do not have to invoke destructor for object.\n\n const RTTI: GCRTTI = make_rtti_for!(finalize Simple);\n\n fn visit_references(&mut self, tracer: &mut dyn Tracer) {\n\n println!(\"Tracing 'Simple'\");\n\n self.x.visit_references(tracer);\n\n }\n\n}\n\n\n\nimpl Drop for Simple {\n\n fn drop(&mut self) {\n\n println!(\"Drop for 'Simple' invoked after GC\");\n\n }\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 36, "score": 59579.95047870389 }, { "content": "#define EVAC_HEADROOM 5\n\n\n", "file_path": "libimmixcons.h", "rank": 37, "score": 59475.93050746771 }, { "content": "#define EVAC_TRIGGER_THRESHHOLD 0.25\n\n\n", "file_path": "libimmixcons.h", "rank": 38, "score": 58195.89261376191 }, { "content": "#[inline(never)]\n\nfn gcbench() {\n\n /*simple_logger::SimpleLogger::new()\n\n .with_level(log::LevelFilter::Debug)\n\n .init();*/\n\n\n\n /*println!(\n\n \" Live storage will peak at {}.\\n\",\n\n formatted_size(\n\n (2 * (size_of::<Node>() as i32) * TreeSize(kLongLivedTreeDepth)\n\n + (size_of::<Array>() as i32)) as usize\n\n )\n\n );*/\n\n\n\n /* println!(\n\n \" Stretching memory with a binary tree or depth {}\",\n\n kStretchTreeDepth\n\n );*/\n\n let mut long_lived = immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n", "file_path": "src/main.rs", "rank": 39, "score": 56129.55887108501 }, { "content": "#[test]\n\nfn smash() {\n\n init();\n\n\n\n let mut sp = 0;\n\n immix_init(\n\n &mut sp,\n\n 2 * 1024 * 1024 * 1024,\n\n 0,\n\n immix_noop_callback,\n\n 0 as *mut _,\n\n );\n\n //immix_register_thread(&mut sp);\n\n immix_mutator_yieldpoint();\n\n inner_smash();\n\n immix_unregister_thread();\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 40, "score": 56129.55887108501 }, { "content": "#[test]\n\nfn simple() {\n\n init();\n\n let mut sp = 0;\n\n immix_init(\n\n &mut sp,\n\n 2 * 1024 * 1024 * 1024,\n\n 0,\n\n immix_noop_callback,\n\n 0 as *mut _,\n\n );\n\n immix_register_thread(&mut sp);\n\n immix_mutator_yieldpoint();\n\n inner_simple();\n\n immix_unregister_thread();\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 41, "score": 56129.55887108501 }, { "content": "fn main() {\n\n immix_init_logger();\n\n let mut sp = 0;\n\n immix_init(\n\n &mut sp,\n\n 2 * 1024 * 1024 * 1024,\n\n 0,\n\n immix_noop_callback,\n\n 0 as *mut _,\n\n );\n\n immix_register_thread(&mut sp as *mut usize);\n\n\n\n gcbench();\n\n\n\n immix_unregister_thread();\n\n println!(\"Done\");\n\n}\n", "file_path": "src/main.rs", "rank": 42, "score": 56129.55887108501 }, { "content": "fn main() {\n\n immix_init_logger();\n\n let mut sp = 0;\n\n immix_init(&mut sp, 0, 0, immix_noop_callback, 0 as *mut _);\n\n immix_register_thread(&mut sp as *mut usize);\n\n {\n\n let p = immix_alloc_safe(42);\n\n let _s = immix_alloc_safe(Simple { x: p });\n\n immix_collect(true);\n\n }\n\n\n\n immix_unregister_thread();\n\n}\n", "file_path": "examples/simple.rs", "rank": 43, "score": 56129.55887108501 }, { "content": "fn init() {\n\n INIT.call_once(|| {\n\n immix_init_logger();\n\n });\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 44, "score": 56129.55887108501 }, { "content": "#define USE_EVACUATION true\n\n\n", "file_path": "libimmixcons.h", "rank": 45, "score": 55454.95083261858 }, { "content": "struct Array {\n\n value: [f64; kArraySize as usize],\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 46, "score": 54892.536044318105 }, { "content": "struct Array {\n\n value: [f64; kArraySize as usize],\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 47, "score": 54892.536044318105 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let mut boehm_src = PathBuf::from(out_dir);\n\n boehm_src.push(BOEHM_DIR);\n\n\n\n run(\"git\", |cmd| {\n\n cmd.arg(\"clone\").arg(BOEHM_REPO).arg(&boehm_src)\n\n });\n\n\n\n run(\"git\", |cmd| {\n\n cmd.arg(\"clone\")\n\n .arg(BOEHM_ATOMICS_REPO)\n\n .current_dir(&boehm_src)\n\n });\n\n\n\n env::set_current_dir(&boehm_src).unwrap();\n\n run(\"cmake\", |cmd| cmd.arg(\".\"));\n\n run(\"cmake\", |cmd| {\n", "file_path": "bdwgcvsimmix-bench/build.rs", "rank": 48, "score": 54444.477541845874 }, { "content": "#[inline(never)]\n\nfn inner_smash() {\n\n let mut arr: [Option<Gc<i32>>; 7000] = [None; 7000];\n\n for i in 0..7000 {\n\n arr[i] = Some(immix_alloc_safe(4));\n\n immix_mutator_yieldpoint();\n\n if i % 3000 == 0 {\n\n immix_collect(true);\n\n }\n\n if i % 5678 == 0 {\n\n assert_eq!(*arr[i / 2000].unwrap(), 4);\n\n **arr[i / 2000].as_mut().unwrap() = 42;\n\n }\n\n }\n\n assert!(true);\n\n}\n", "file_path": "src/tests.rs", "rank": 49, "score": 54444.477541845874 }, { "content": "#[inline(never)]\n\nfn inner_simple() {\n\n let p = immix_alloc_safe(42);\n\n assert_eq!(*p, 42);\n\n immix_collect(true);\n\n let x = immix_alloc_safe(3);\n\n println!(\"{:p} {:p}\", &p, &x);\n\n assert_eq!(*x, 3);\n\n assert_eq!(*p, 42);\n\n println!(\"simple done\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 50, "score": 54444.477541845874 }, { "content": "/// Trait for the allocators in the immix space.\n\n///\n\n/// Only use `get_all_blocks()` and `allocate()` from outside.\n\npub trait Allocator {\n\n /// Get all block managed by the allocator, draining any local\n\n /// collections.\n\n fn get_all_blocks(&mut self) -> alloc::vec::Vec<*mut ImmixBlock>;\n\n\n\n /// Get the current block to allocate from.\n\n fn take_current_block(&mut self) -> Option<BlockTuple>;\n\n\n\n /// Set the current block to allocate from.\n\n fn put_current_block(&mut self, block_tuple: BlockTuple);\n\n\n\n /// Get a new block from a block resource.\n\n fn get_new_block(&mut self) -> Option<BlockTuple>;\n\n\n\n /// Callback if no hole of `size` bytes was found in the current block.\n\n fn handle_no_hole(&mut self, size: usize) -> Option<BlockTuple>;\n\n\n\n /// Callback if the given `block` has no holes left.\n\n fn handle_full_block(&mut self, block: *mut ImmixBlock);\n\n\n", "file_path": "src/allocation.rs", "rank": 51, "score": 53747.21911323229 }, { "content": "struct Array {\n\n value: [f64; kArraySize as usize],\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 52, "score": 53617.98430391889 }, { "content": "fn main() {\n\n unsafe {\n\n GC_init();\n\n let p = GC_malloc(8);\n\n println!(\"{:x}\", p);\n\n }\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "bdwgcvsimmix-bench/src/main.rs", "rank": 53, "score": 52930.409326351684 }, { "content": "#[inline(never)]\n\nfn gcbench() {\n\n /*simple_logger::SimpleLogger::new()\n\n .with_level(log::LevelFilter::Debug)\n\n .init();*/\n\n\n\n /*println!(\n\n \" Live storage will peak at {}.\\n\",\n\n formatted_size(\n\n (2 * (size_of::<Node>() as i32) * TreeSize(kLongLivedTreeDepth)\n\n + (size_of::<Array>() as i32)) as usize\n\n )\n\n );*/\n\n\n\n /* println!(\n\n \" Stretching memory with a binary tree or depth {}\",\n\n kStretchTreeDepth\n\n );*/\n\n let mut long_lived = immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 54, "score": 51562.578144322455 }, { "content": "#[inline(never)]\n\nfn TimeConstruction(depth: i32) {\n\n let iNumIters = NumIters(depth);\n\n\n\n for _ in 0..iNumIters {\n\n let mut tempTree = immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n Populate(depth, &mut tempTree);\n\n\n\n // destroy tempTree\n\n }\n\n\n\n for _ in 0..iNumIters {\n\n let tempTree = MakeTree(depth);\n\n }\n\n}\n\nconst kStretchTreeDepth: i32 = 18;\n\nconst kLongLivedTreeDepth: i32 = 16;\n\nconst kArraySize: i32 = 500000;\n\nconst kMinTreeDepth: i32 = 4;\n\nconst kMaxTreeDepth: i32 = 16;\n", "file_path": "src/main.rs", "rank": 55, "score": 46268.71261764122 }, { "content": "fn NumIters(i: i32) -> i32 {\n\n 2 * TreeSize(kStretchTreeDepth) / TreeSize(i)\n\n}\n", "file_path": "src/main.rs", "rank": 56, "score": 44393.008465202 }, { "content": "fn TreeSize(i: i32) -> i32 {\n\n (1 << (i + 1)) - 1\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 44393.008465202 }, { "content": "#[inline(never)]\n\nfn TimeConstruction(depth: i32) {\n\n let iNumIters = NumIters(depth);\n\n\n\n for _ in 0..iNumIters {\n\n let mut tempTree = immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n Populate(depth, &mut tempTree);\n\n\n\n // destroy tempTree\n\n }\n\n\n\n for _ in 0..iNumIters {\n\n let tempTree = MakeTree(depth);\n\n }\n\n}\n\nconst kStretchTreeDepth: i32 = 18;\n\nconst kLongLivedTreeDepth: i32 = 16;\n\nconst kArraySize: i32 = 500000;\n\nconst kMinTreeDepth: i32 = 4;\n\nconst kMaxTreeDepth: i32 = 16;\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 58, "score": 42857.58609097422 }, { "content": "fn TreeSize(i: i32) -> i32 {\n\n (1 << (i + 1)) - 1\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 59, "score": 40981.88193853501 }, { "content": "fn NumIters(i: i32) -> i32 {\n\n 2 * TreeSize(kStretchTreeDepth) / TreeSize(i)\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 60, "score": 40981.88193853501 }, { "content": "fn TreeSize(i: i32) -> i32 {\n\n (1 << (i + 1)) - 1\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc.rs", "rank": 61, "score": 40981.88193853501 }, { "content": "fn NumIters(i: i32) -> i32 {\n\n 2 * TreeSize(kStretchTreeDepth) / TreeSize(i)\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 62, "score": 40981.88193853501 }, { "content": "fn MakeTree(idepth: i32) -> Gc<Node> {\n\n immix_mutator_yieldpoint();\n\n if idepth <= 0 {\n\n return immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n } else {\n\n let left = MakeTree(idepth - 1);\n\n let right = MakeTree(idepth - 1);\n\n let result = immix_alloc_safe(Node {\n\n left: Some(left),\n\n right: Some(right),\n\n i: 0,\n\n j: 0,\n\n });\n\n result\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 63, "score": 40475.16298634038 }, { "content": "fn TreeSize(i: i32) -> i32 {\n\n (1 << (i + 1)) - 1\n\n}\n\n\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 64, "score": 40028.92366325173 }, { "content": "fn NumIters(i: i32) -> i32 {\n\n 2 * TreeSize(kStretchTreeDepth) / TreeSize(i)\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-bdwgc-incremental.rs", "rank": 65, "score": 40028.92366325173 }, { "content": "fn MakeTree(idepth: i32) -> Gc<Node> {\n\n immix_mutator_yieldpoint();\n\n if idepth <= 0 {\n\n return immix_alloc_safe(Node {\n\n left: None,\n\n right: None,\n\n i: 0,\n\n j: 0,\n\n });\n\n } else {\n\n let left = MakeTree(idepth - 1);\n\n let right = MakeTree(idepth - 1);\n\n let result = immix_alloc_safe(Node {\n\n left: Some(left),\n\n right: Some(right),\n\n i: 0,\n\n j: 0,\n\n });\n\n result\n\n }\n\n}\n", "file_path": "bdwgcvsimmix-bench/benches/gcbench-immix.rs", "rank": 66, "score": 37606.49297969065 }, { "content": "pub const BLOCK_SIZE: usize = 32 * 1024;\n\npub const LINE_SIZE: usize = 128;\n\npub const NUM_LINES_PER_BLOCK: usize = BLOCK_SIZE / LINE_SIZE;\n\n// Objects smaller than MEDIUM_OBJECT are allocated with the\n\n/// `NormalAllocator`, otherwise the `OverflowAllocator` is used.\n\npub const MEDIUM_OBJECT: usize = LINE_SIZE;\n\n\n\n/// Objects larger than LARGE_OBJECT are allocated using the `LargeObjectSpace`.\n\npub const LARGE_OBJECT: usize = 8 * 1024;\n\n/// Whether evacuation should be used or not.\n\npub const USE_EVACUATION: bool = true;\n\n\n\n/// The number of blocks stored into the `EvacAllocator` for evacuation.\n\npub const EVAC_HEADROOM: usize = 5;\n\n\n\n/// Ratio when to trigger evacuation collection.\n\npub const EVAC_TRIGGER_THRESHHOLD: f64 = 0.25;\n", "file_path": "src/constants.rs", "rank": 67, "score": 34394.911150976135 }, { "content": " /// Get pointer to block from `object` pointer.\n\n ///\n\n /// # Safety\n\n /// Does not do anything unsafe but might return wrong pointer\n\n pub unsafe fn get_block_ptr(object: Address) -> *mut Self {\n\n let off = object.to_usize() % BLOCK_SIZE;\n\n (object.to_mut_ptr::<u8>()).offset(-(off as isize)) as *mut ImmixBlock\n\n }\n\n /*pub fn set_gc_object(&mut self, addr: Address) -> bool {\n\n unsafe {\n\n //let f = addr.to_mut_ptr::<[u64; 2]>().read();\n\n let x = self.object_map.set(addr.to_usize(), self.begin());\n\n //debug_assert!(addr.to_mut_ptr::<[u64; 2]>().read() == f);\n\n x\n\n }\n\n }\n\n pub fn unset_gc_object(&mut self, addr: Address) -> bool {\n\n self.object_map.clear(addr.to_usize(), self.begin())\n\n }*/\n\n pub fn new(at: *mut u8) -> &'static mut Self {\n", "file_path": "src/block.rs", "rank": 83, "score": 33924.70361425043 }, { "content": " unsafe {\n\n //let block = memmap::MmapMut::map_anon(0).unwrap();\n\n let ptr = at as *mut Self;\n\n core::ptr::write_bytes(ptr as *mut u8, 0, BLOCK_SIZE);\n\n debug_assert!(ptr as usize % 32 * 1024 == 0);\n\n ptr.write(Self {\n\n line_map: LineMap::new(),\n\n //object_map: ObjectMap::new(),\n\n allocated: false,\n\n hole_count: 0,\n\n evacuation_candidate: false,\n\n });\n\n //(&mut *ptr).line_map.bitmap_begin = (&*ptr).line_map.bitmap_.as_ptr() as *mut usize;\n\n //(&mut *ptr).object_map.bitmap_begin = (&*ptr).object_map.bitmap_.as_ptr() as *mut usize;\n\n\n\n debug_assert!((*ptr).line_map.is_empty());\n\n //assert!((&*ptr).object_map.is_empty());\n\n &mut *ptr\n\n }\n\n }\n", "file_path": "src/block.rs", "rank": 84, "score": 33919.422424887314 }, { "content": " self.hole_count = 0;\n\n self.evacuation_candidate = false;\n\n }\n\n pub fn line_object_mark(&mut self, object: Address) {\n\n self.modify_line(object, true);\n\n }\n\n\n\n pub fn line_object_unmark(&mut self, object: Address) {\n\n self.modify_line(object, false);\n\n }\n\n pub fn line_is_marked(&self, line: usize) -> bool {\n\n self.line_map\n\n .test(self.begin() + (line * LINE_SIZE), self.begin())\n\n }\n\n\n\n pub fn object_to_line_num(object: Address) -> usize {\n\n (object.to_usize() % BLOCK_SIZE) / LINE_SIZE\n\n }\n\n}\n", "file_path": "src/block.rs", "rank": 85, "score": 33918.0793723271 }, { "content": "use crate::constants::*;\n\nuse crate::object::*;\n\nuse crate::util::*;\n\n// LineMap is used for scanning block for holes\n\nspace_bitmap_gen!(LineMap, LINE_SIZE, BLOCK_SIZE as u64);\n\n#[repr(C)]\n\npub struct ImmixBlock {\n\n /// Bitmap for marking lines\n\n pub line_map: LineMap,\n\n /// Bitmap of objects used for conservative marking\n\n ///pub object_map: ObjectMap,\n\n /// Is this block actually allocated\n\n pub allocated: bool,\n\n /// How many holes in this block\n\n pub hole_count: u32,\n\n pub evacuation_candidate: bool,\n\n //pub map: memmap::MmapMut,\n\n}\n\n\n\nimpl ImmixBlock {\n", "file_path": "src/block.rs", "rank": 86, "score": 33917.20248881364 }, { "content": " #[inline]\n\n pub fn is_in_block(&self, p: Address) -> bool {\n\n if self.allocated {\n\n let b = self.begin();\n\n let e = b + BLOCK_SIZE;\n\n b < p.to_usize() && p.to_usize() <= e\n\n } else {\n\n false\n\n }\n\n }\n\n /*#[inline]\n\n pub fn is_gc_object(&self, p: Address) -> bool {\n\n if self.is_in_block(p) {\n\n self.object_map.test(p.to_usize(), self.begin())\n\n } else {\n\n false\n\n }\n\n }*/\n\n pub fn begin(&self) -> usize {\n\n self as *const Self as usize\n", "file_path": "src/block.rs", "rank": 87, "score": 33914.45902788144 }, { "content": " pub fn is_empty(&self) -> bool {\n\n for i in 0..NUM_LINES_PER_BLOCK {\n\n if self\n\n .line_map\n\n .test(self.begin() + (i * LINE_SIZE), self.begin())\n\n {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n /// Update the line counter for the given object.\n\n ///\n\n /// Increment if `increment`, otherwise do a saturating substraction.\n\n #[inline(always)]\n\n fn modify_line(&mut self, object: Address, mark: bool) {\n\n let line_num = Self::object_to_line_num(object);\n\n let b = self.begin();\n\n\n\n let object_ptr = object.to_mut_ptr::<RawGc>();\n", "file_path": "src/block.rs", "rank": 88, "score": 33910.948796009165 }, { "content": " /// holes.\n\n pub fn count_holes_and_marked_lines(&self) -> (usize, usize) {\n\n (self.hole_count as usize, {\n\n let mut count = 0;\n\n for line in 0..NUM_LINES_PER_BLOCK {\n\n if self\n\n .line_map\n\n .test(line * LINE_SIZE + self.begin(), self.begin())\n\n {\n\n count += 1;\n\n }\n\n }\n\n count\n\n })\n\n }\n\n\n\n /// Return the number of holes and available lines in this block.\n\n ///\n\n /// An available line is a line with a count of zero.\n\n ///\n", "file_path": "src/block.rs", "rank": 89, "score": 33910.63136782211 }, { "content": " let mut in_hole = false;\n\n let b = self.begin();\n\n for i in 0..NUM_LINES_PER_BLOCK {\n\n match (in_hole, self.line_map.test(b + (LINE_SIZE * i), b)) {\n\n (false, false) => {\n\n holes += 1;\n\n in_hole = true;\n\n }\n\n (_, _) => {\n\n in_hole = false;\n\n }\n\n }\n\n }\n\n self.hole_count = holes as _;\n\n holes\n\n }\n\n pub fn offset(&self, offset: usize) -> Address {\n\n Address::from(self.begin() + offset)\n\n }\n\n\n", "file_path": "src/block.rs", "rank": 90, "score": 33908.18381606808 }, { "content": " \"Found low index {} and high index {} in block {:p}\",\n\n low_index, high_index, self\n\n );\n\n\n\n debug!(\n\n \"Index offsets: ({},{})\",\n\n low_index * LINE_SIZE,\n\n high_index * LINE_SIZE - 1\n\n );\n\n return Some((\n\n align_usize(low_index * LINE_SIZE, 16) as u16,\n\n (high_index * LINE_SIZE - 1) as u16,\n\n ));\n\n }\n\n debug!(\"Found no hole in block {:p}\", self);\n\n\n\n None\n\n }\n\n pub fn count_holes(&mut self) -> usize {\n\n let mut holes: usize = 0;\n", "file_path": "src/block.rs", "rank": 91, "score": 33908.056054946406 }, { "content": " /// _Note_: You must call count_holes() bevorhand to set the number of\n\n /// holes.\n\n pub fn count_holes_and_available_lines(&self) -> (usize, usize) {\n\n (self.hole_count as usize, {\n\n let mut count = 0;\n\n for line in 0..NUM_LINES_PER_BLOCK {\n\n if !self\n\n .line_map\n\n .test(line * LINE_SIZE + self.begin(), self.begin())\n\n {\n\n count += 1;\n\n }\n\n }\n\n count\n\n })\n\n }\n\n pub fn reset(&mut self) {\n\n self.line_map.clear_all();\n\n // self.object_map.clear_all();\n\n self.allocated = false;\n", "file_path": "src/block.rs", "rank": 92, "score": 33907.05678772185 }, { "content": " }\n\n /// Scan the block for a hole to allocate into.\n\n ///\n\n /// The scan will start at `last_high_offset` bytes into the block and\n\n /// return a tuple of `low_offset`, `high_offset` as the lowest and\n\n /// highest usable offsets for a hole.\n\n ///\n\n /// `None` is returned if no hole was found.\n\n pub fn scan_block(&self, last_high_offset: u16) -> Option<(u16, u16)> {\n\n let last_high_index = last_high_offset as usize / LINE_SIZE;\n\n let mut low_index = NUM_LINES_PER_BLOCK - 1;\n\n debug!(\n\n \"Scanning block {:p} for a hole with last_high_offset {}\",\n\n self, last_high_index\n\n );\n\n for index in (last_high_index + 1)..NUM_LINES_PER_BLOCK {\n\n if !self\n\n .line_map\n\n .test(self.begin() + (index * LINE_SIZE), self.begin())\n\n {\n", "file_path": "src/block.rs", "rank": 93, "score": 33905.53988193826 }, { "content": " unsafe {\n\n let obj = &mut *object_ptr;\n\n\n\n let size = obj.object_size();\n\n\n\n for line in line_num..(line_num + (size / LINE_SIZE) + 1) {\n\n if mark {\n\n self.line_map.set(b + (line * LINE_SIZE), b);\n\n //debug_assert!(self.line_map.test(b + (line * LINE_SIZE), b));\n\n } else {\n\n self.line_map.clear(b + (line * LINE_SIZE), b);\n\n }\n\n }\n\n }\n\n }\n\n /// Return the number of holes and marked lines in this block.\n\n ///\n\n /// A marked line is a line with a count of at least one.\n\n ///\n\n /// _Note_: You must call count_holes() bevorhand to set the number of\n", "file_path": "src/block.rs", "rank": 94, "score": 33903.391595667475 }, { "content": " low_index = index + 1;\n\n break;\n\n }\n\n }\n\n let mut high_index = NUM_LINES_PER_BLOCK;\n\n for index in low_index..NUM_LINES_PER_BLOCK {\n\n if self\n\n .line_map\n\n .test(self.begin() + (LINE_SIZE * index), self.begin())\n\n {\n\n high_index = index;\n\n break;\n\n }\n\n }\n\n\n\n if low_index == high_index && high_index != (NUM_LINES_PER_BLOCK - 1) {\n\n debug!(\"Rescan: Found single line hole? in block {:p}\", self);\n\n return self.scan_block((high_index * LINE_SIZE - 1) as u16);\n\n } else if low_index < (NUM_LINES_PER_BLOCK - 1) {\n\n debug!(\n", "file_path": "src/block.rs", "rank": 95, "score": 33902.04283375384 }, { "content": " /// Returns object size on heap. Must be non null when using from c/c++!\n\n pub heap_size: extern \"C\" fn(*mut u8) -> usize,\n\n /// Traces object for references into GC heap. Might be null when using from c/c++.\n\n pub visit_references: extern \"C\" fn(*mut u8, TracerPtr),\n\n /// If set to true object that uses this RTTI will be pushed to `to_finalize` list and might be finalized at some GC cycle.\n\n pub needs_finalization: bool,\n\n /// Object finalizer. Invoked when object is dead.\n\n pub finalizer: Option<extern \"C\" fn(*mut u8)>,\n\n}\n\n\n\n#[repr(C)]\n\n/// ConservativeTracer is passed into GC callback so users of this library can also provide some region of memory for conservative scan.\n\npub struct ConservativeTracer {\n\n pub(crate) roots: *mut u8,\n\n}\n\n\n\nimpl ConservativeTracer {\n\n pub fn add(&self, start: *mut *mut u8, end: *mut *mut u8) {\n\n unsafe {\n\n (&mut *(self.roots as *mut alloc::vec::Vec<(usize, usize)>))\n", "file_path": "src/object.rs", "rank": 96, "score": 33765.89087823124 }, { "content": " (self.rtti().heap_size)(self as *const Self as *mut u8) + core::mem::size_of::<Self>(),\n\n 16,\n\n )\n\n }\n\n\n\n pub fn data(&self) -> *mut u8 {\n\n unsafe { (self as *const Self as *const u8).add(core::mem::size_of::<Self>()) as *mut u8 }\n\n }\n\n /// Return true if this object is precise allocation\n\n pub fn is_precise_allocation(&self) -> bool {\n\n crate::large_object_space::PreciseAllocation::is_precise(self as *const _ as *mut _)\n\n }\n\n /// Return precise allocation from this object\n\n pub fn precise_allocation(&self) -> *mut crate::large_object_space::PreciseAllocation {\n\n crate::large_object_space::PreciseAllocation::from_cell(self as *const _ as *mut _)\n\n }\n\n pub fn new(vtable: usize) -> Self {\n\n Self {\n\n vtable: TaggedPointer::new(vtable as *mut _),\n\n }\n", "file_path": "src/object.rs", "rank": 97, "score": 33761.19177153462 }, { "content": " GCRTTI {\n\n visit_references: {\n\n extern \"C\" fn visit(data: *mut u8, trace: TracerPtr) {\n\n unsafe {\n\n (*data.add(8).cast::<$t>()).visit_references(&mut *core::mem::transmute::<\n\n [usize; 2],\n\n *mut dyn Tracer,\n\n >(\n\n trace.tracer\n\n ));\n\n }\n\n }\n\n visit\n\n },\n\n finalizer: Some({\n\n extern \"C\" fn fin(data: *mut u8) {\n\n unsafe {\n\n core::ptr::drop_in_place(data.add(8).cast::<$t>());\n\n }\n\n }\n", "file_path": "src/object.rs", "rank": 98, "score": 33756.76380833289 }, { "content": " .push((start as usize, end as usize));\n\n }\n\n }\n\n}\n\n/// Add memory region from `begin` to `end` for scanning for heap objects.\n\n#[no_mangle]\n\npub extern \"C\" fn conservative_roots_add(\n\n tracer: *mut ConservativeTracer,\n\n begin: usize,\n\n end: usize,\n\n) {\n\n unsafe { (&mut *tracer).add(begin as *mut _, end as *mut _) }\n\n}\n\n\n\nimpl<T: HeapObject + ?Sized> Copy for Gc<T> {}\n\nimpl<T: HeapObject + ?Sized> Clone for Gc<T> {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n", "file_path": "src/object.rs", "rank": 99, "score": 33756.56306125376 } ]
Rust
frontend/apps/crates/utils/src/components/module_page.rs
Sheng-Long/ji-cloud
d12741467eb5d0ae15e3cece9d3428ef9d0050e3
/* There are a few fundamental concepts going on here... * 1. The serialized data does _not_ need to be Clone. * rather, it's passed completely to the child * and then the child is free to split it up for Mutable/etc. * (here it is held and taken from an Option) * 2. The loader will be skipped if the url has ?iframe_data=true * in this case, iframe communication is setup and the parent * is expected to post a message with the data (via IframeInit) */ use std::rc::Rc; use std::cell::RefCell; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use futures_signals::{ map_ref, signal::{Mutable,ReadOnlyMutable, SignalExt, Signal}, signal_vec::{MutableVec, SignalVec, SignalVecExt}, CancelableFutureHandle, }; use web_sys::{Url, HtmlElement, Element, HtmlInputElement}; use dominator::{DomBuilder, Dom, html, events, with_node, clone, apply_methods}; use dominator_helpers::{elem,dynamic_class_signal ,with_data_id, spawn_future, AsyncLoader}; use crate::templates; use wasm_bindgen_futures::{JsFuture, spawn_local, future_to_promise}; use serde::de::DeserializeOwned; use crate::{ iframe::*, resize::*, }; use std::future::Future; use async_trait::async_trait; #[async_trait(?Send)] pub trait ModuleRenderer { type Data: DeserializeOwned; async fn load(_self:Rc<Self>) -> Self::Data; fn render(_self: Rc<Self>, data: Self::Data) -> Dom; } pub struct ModulePage<T, R> where T: DeserializeOwned, R: ModuleRenderer<Data = T>, { renderer: Rc<R>, loaded_data: RefCell<Option<T>>, has_loaded_data: Mutable<bool>, wait_iframe_data: bool, loader: AsyncLoader, } impl <T, R> ModulePage <T, R> where T: DeserializeOwned + 'static, R: ModuleRenderer<Data = T> + 'static, { pub fn new(renderer:Rc<R>) -> Rc<Self> { let wait_iframe_data = should_get_iframe_data(); let _self = Rc::new(Self { renderer, loaded_data: RefCell::new(None), has_loaded_data: Mutable::new(false), loader: AsyncLoader::new(), wait_iframe_data, }); let _self_clone = _self.clone(); _self_clone.loader.load(async move { if !wait_iframe_data { let data:T = ModuleRenderer::load(_self.renderer.clone()).await; *_self.loaded_data.borrow_mut() = Some(data); _self.has_loaded_data.set(true); } }); _self_clone } pub fn render(_self: Rc<Self>) -> Dom { elem!(templates::module_page(), { .with_data_id!("module-outer", { .with_data_id!("module-content", { .child_signal(_self.has_loaded_data.signal().map(clone!(_self => move |ready| { if ready { let data = _self.loaded_data.borrow_mut().take().unwrap_throw(); Some(ModuleRenderer::render(_self.renderer.clone(),data)) } else { None } }))) }) .with_node!(elem => { .global_event(clone!(_self => move |evt:events::Resize| { ModuleBounds::set( elem.client_width(), elem.client_height() ); })) }) .after_inserted(clone!(_self => move |elem| { ModuleBounds::set( elem.client_width(), elem.client_height() ); })) }) .global_event(clone!(_self => move |evt:dominator_helpers::events::Message| { if let Ok(msg) = evt.try_serde_data::<IframeInit<T>>() { if !_self.wait_iframe_data { log::warn!("weird... shouldn't have gotten iframe data!"); } *_self.loaded_data.borrow_mut() = Some(msg.data.unwrap_throw()); _self.has_loaded_data.set(true); } else { log::info!("hmmm got other iframe message..."); } })) .after_inserted(clone!(_self => move |elem| { if _self.wait_iframe_data { let target = web_sys::window().unwrap_throw().parent().unwrap_throw().unwrap_throw(); let msg = IframeInit::empty(); target.post_message(&msg.into(), "*"); } })) }) } }
/* There are a few fundamental concepts going on here... * 1. The serialized data does _not_ need to be Clone. * rather, it's passed completely to the child * and then the child is free to split it up for Mutable/etc. * (here it is held and taken from an Option) * 2. The loader will be skipped if the url has ?iframe_data=true * in this case, iframe communication is setup and the parent * is expected to post a message with the data (via IframeInit) */ use std::rc::Rc; use std::cell::RefCell; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use futures_signals::{ map_ref, signal::{Mutable,ReadOnlyMutable, SignalExt, Signal}, signal_vec::{MutableVec, SignalVec, SignalVecExt}, CancelableFutureHandle, }; use web_sys::{Url, HtmlElement, Element, HtmlInputElement}; use dominator::{DomBuilder, Dom, html, events, with_node, clone, apply_methods}; use dominator_helpers::{elem,dynamic_class_signal ,with_data_id, spawn_future, AsyncLoader}; use crate::templates; use wasm_bindgen_futures::{JsFuture, spawn_local, future_to_promise}; use serde::de::DeserializeOwned; use crate::{ iframe::*, resize::*, }; use std::future::Future; use async_trait::async_trait; #[async_trait(?Send)] pub trait ModuleRenderer { type Data: DeserializeOwned; async fn load(_self
data)) } else { None } }))) }) .with_node!(elem => { .global_event(clone!(_self => move |evt:events::Resize| { ModuleBounds::set( elem.client_width(), elem.client_height() ); })) }) .after_inserted(clone!(_self => move |elem| { ModuleBounds::set( elem.client_width(), elem.client_height() ); })) }) .global_event(clone!(_self => move |evt:dominator_helpers::events::Message| { if let Ok(msg) = evt.try_serde_data::<IframeInit<T>>() { if !_self.wait_iframe_data { log::warn!("weird... shouldn't have gotten iframe data!"); } *_self.loaded_data.borrow_mut() = Some(msg.data.unwrap_throw()); _self.has_loaded_data.set(true); } else { log::info!("hmmm got other iframe message..."); } })) .after_inserted(clone!(_self => move |elem| { if _self.wait_iframe_data { let target = web_sys::window().unwrap_throw().parent().unwrap_throw().unwrap_throw(); let msg = IframeInit::empty(); target.post_message(&msg.into(), "*"); } })) }) } }
:Rc<Self>) -> Self::Data; fn render(_self: Rc<Self>, data: Self::Data) -> Dom; } pub struct ModulePage<T, R> where T: DeserializeOwned, R: ModuleRenderer<Data = T>, { renderer: Rc<R>, loaded_data: RefCell<Option<T>>, has_loaded_data: Mutable<bool>, wait_iframe_data: bool, loader: AsyncLoader, } impl <T, R> ModulePage <T, R> where T: DeserializeOwned + 'static, R: ModuleRenderer<Data = T> + 'static, { pub fn new(renderer:Rc<R>) -> Rc<Self> { let wait_iframe_data = should_get_iframe_data(); let _self = Rc::new(Self { renderer, loaded_data: RefCell::new(None), has_loaded_data: Mutable::new(false), loader: AsyncLoader::new(), wait_iframe_data, }); let _self_clone = _self.clone(); _self_clone.loader.load(async move { if !wait_iframe_data { let data:T = ModuleRenderer::load(_self.renderer.clone()).await; *_self.loaded_data.borrow_mut() = Some(data); _self.has_loaded_data.set(true); } }); _self_clone } pub fn render(_self: Rc<Self>) -> Dom { elem!(templates::module_page(), { .with_data_id!("module-outer", { .with_data_id!("module-content", { .child_signal(_self.has_loaded_data.signal().map(clone!(_self => move |ready| { if ready { let data = _self.loaded_data.borrow_mut().take().unwrap_throw(); Some(ModuleRenderer::render(_self.renderer.clone(),
random
[ { "content": "pub fn image_edit_category_child(name:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_CATEGORIES_CHILD, &html_map!{\n\n \"name\" => name\n\n }).unwrap_throw())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 0, "score": 346981.01528432494 }, { "content": "pub fn image_edit_category_parent(name:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_CATEGORIES_PARENT, &html_map!{\n\n \"name\" => name\n\n }).unwrap_throw())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 1, "score": 346952.89271983894 }, { "content": "pub fn image_edit_category_child_end(name:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_CATEGORIES_CHILD_END, &html_map!{\n\n \"name\" => name\n\n }).unwrap_throw())\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 2, "score": 344112.2272217542 }, { "content": "pub fn image_edit_category_summary_child(name:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_CATEGORIES_SUMMARY_CHILD, &html_map!{\n\n \"name\" => name\n\n }).unwrap_throw())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 3, "score": 344112.2272217542 }, { "content": "pub fn image_edit_category_parent_end(name:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_CATEGORIES_PARENT_END, &html_map!{\n\n \"name\" => name\n\n }).unwrap_throw())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 4, "score": 344084.4142304861 }, { "content": "pub fn image_edit_category_summary_parent(name:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_CATEGORIES_SUMMARY_PARENT, &html_map!{\n\n \"name\" => name\n\n }).unwrap_throw())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 5, "score": 344084.41423048615 }, { "content": "pub fn categories() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CATEGORIES))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 6, "score": 327667.7556201839 }, { "content": "pub fn profile() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(PROFILE))\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 7, "score": 327667.75562018395 }, { "content": "pub fn signin() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(SIGNIN))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 8, "score": 327667.75562018395 }, { "content": "pub fn image_add() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(IMAGE_ADD))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 9, "score": 324999.0444181257 }, { "content": "pub fn register_step3() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(REGISTER_STEP3))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 10, "score": 324999.0444181257 }, { "content": "pub fn forgot_password() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(FORGOT_PASSWORD))\n\n}\n\n\n\npub struct Templates {\n\n pub cache: TemplateCache<'static>\n\n}\n\n\n\nimpl fmt::Debug for Templates {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n \n\n f.debug_list()\n\n .entries(self.cache.templates.keys())\n\n .finish()\n\n }\n\n}\n\nimpl Templates {\n\n pub fn new() -> Self {\n\n let cache = TemplateCache::new(&vec![\n\n (SIGNIN, get_template_str(include_str!(\"../../../../../.template_output/user/signin/signin.html\"))),\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 11, "score": 324999.0444181257 }, { "content": "pub fn image_edit() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(IMAGE_EDIT))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 12, "score": 324999.0444181257 }, { "content": "pub fn gallery() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(GALLERY_PAGE))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 13, "score": 324999.0444181257 }, { "content": "pub fn module_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(MODULE_PAGE))\n\n}\n\npub struct Templates {\n\n pub cache: TemplateCache<'static>\n\n}\n\n\n\nimpl fmt::Debug for Templates {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n \n\n f.debug_list()\n\n .entries(self.cache.templates.keys())\n\n .finish()\n\n }\n\n}\n\nimpl Templates {\n\n pub fn new() -> Self {\n\n let cache = TemplateCache::new(&vec![\n\n (MODULE_PAGE, get_template_str(include_str!(\"../../../../../.template_output/_common/module/module-page.html\"))),\n\n ]);\n\n\n\n Self { cache }\n\n }\n\n\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/templates/mod.rs", "rank": 14, "score": 324999.0444181257 }, { "content": "pub fn images_search() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(IMAGES_SEARCH))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 15, "score": 324999.0444181257 }, { "content": "pub fn register_step1() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(REGISTER_STEP1))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 16, "score": 324999.0444181257 }, { "content": "pub fn images_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(IMAGES_PAGE))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 17, "score": 324999.0444181257 }, { "content": "pub fn register_final() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(REGISTER_FINAL))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 18, "score": 324999.0444181257 }, { "content": "pub fn category_menu() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CATEGORY_MENU))\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 19, "score": 324999.0444181257 }, { "content": "pub fn register_start() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(REGISTER_START))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 20, "score": 324999.0444181257 }, { "content": "pub fn register_step2() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(REGISTER_STEP2))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 21, "score": 324999.0444181257 }, { "content": "pub fn profile_email_change() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(PROFILE_EMAIL_CHANGE))\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 22, "score": 322391.7937998754 }, { "content": "pub fn category_label_display() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CATEGORY_LABEL_DISPLAY))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 23, "score": 322391.7937998754 }, { "content": "pub fn category_label_input() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CATEGORY_LABEL_INPUT))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 24, "score": 322391.7937998754 }, { "content": "pub fn image_edit_categories() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(IMAGE_EDIT_CATEGORIES))\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 25, "score": 322391.7937998754 }, { "content": "pub fn card() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CARD))\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/templates.rs", "rank": 26, "score": 322391.7937998754 }, { "content": "pub fn player() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(PLAYER))\n\n}\n\n\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/templates.rs", "rank": 27, "score": 322391.7937998754 }, { "content": "pub fn send_email_confirmation() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(SEND_EMAIL_CONFIRMATION))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 28, "score": 322391.7937998754 }, { "content": "pub fn got_email_confirmation() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(GOT_EMAIL_CONFIRMATION))\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 29, "score": 322391.7937998754 }, { "content": "pub fn image_edit_meta() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(IMAGE_EDIT_META))\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 30, "score": 322391.7937998754 }, { "content": "pub fn edit_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_PAGE))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 31, "score": 322391.7937998754 }, { "content": "pub fn edit_module_right() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_MODULE_RIGHT))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 32, "score": 319843.9047738247 }, { "content": "pub fn edit_delete_popup() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_DELETE_POPUP))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 33, "score": 319843.9047738247 }, { "content": "pub fn module_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(MODULE_PAGE))\n\n}\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/templates.rs", "rank": 34, "score": 319843.9047738247 }, { "content": "pub fn hover_card() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(HOVER_CARD))\n\n}\n\npub struct Templates {\n\n pub cache: TemplateCache<'static>\n\n}\n\n\n\nimpl fmt::Debug for Templates {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n \n\n f.debug_list()\n\n .entries(self.cache.templates.keys())\n\n .finish()\n\n }\n\n}\n\nimpl Templates {\n\n pub fn new() -> Self {\n\n let cache = TemplateCache::new(&vec![\n\n (MODULE_PAGE, get_template_str(include_str!(\"../../../../../../../.template_output/_common/module/module-page.html\"))),\n\n (PLAYER, get_template_str(include_str!(\"../../../../../../../.template_output/module/memory/play/player.html\"))),\n\n (CARD, get_template_str(include_str!(\"../../../../../../../.template_output/module/memory/play/memory-card.html\"))),\n\n (HOVER_CARD, get_template_str(include_str!(\"../../../../../../../.template_output/module/memory/play/hover-card.html\"))),\n\n ]);\n\n\n\n Self { cache }\n\n }\n\n\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/templates.rs", "rank": 35, "score": 319843.9047738247 }, { "content": "pub fn edit_module_selection() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_MODULE_SELECTION))\n\n}\n\n\n\npub struct Templates {\n\n pub cache: TemplateCache<'static>\n\n}\n\n\n\nimpl fmt::Debug for Templates {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n \n\n f.debug_list()\n\n .entries(self.cache.templates.keys())\n\n .finish()\n\n }\n\n}\n\nimpl Templates {\n\n pub fn new() -> Self {\n\n let cache = TemplateCache::new(&vec![\n\n (GALLERY_PAGE, get_template_str(include_str!(\"../../../../../../.template_output/jig/gallery/jig-creator-one.html\"))),\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 36, "score": 319843.9047738247 }, { "content": "pub fn edit_menu_section() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_MENU_SECTION))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 37, "score": 319843.9047738247 }, { "content": "pub fn edit_module_left() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_MODULE_LEFT))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 38, "score": 319843.9047738247 }, { "content": "pub fn edit_sidebar_section() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_SIDEBAR_SECTION))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 39, "score": 319843.9047738247 }, { "content": "pub fn card_edit_text() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CARD_EDIT_TEXT))\n\n}\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/templates.rs", "rank": 40, "score": 317353.37285175885 }, { "content": "pub fn edit_module_drag_slot() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(EDIT_MODULE_DRAG_SLOT))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/templates.rs", "rank": 41, "score": 317353.37285175885 }, { "content": "pub fn card_edit_preview() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(CARD_EDIT_PREVIEW))\n\n}\n\n\n\npub mod duplicate {\n\n use super::*;\n\n pub fn step_1_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(DUPLICATE_STEP_1_PAGE))\n\n }\n\n pub fn step_1_tooltip() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(DUPLICATE_STEP_1_TOOLTIP))\n\n }\n\n pub fn step_1_error() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(DUPLICATE_STEP_1_ERROR))\n\n }\n\n pub fn step_2_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(DUPLICATE_STEP_2_PAGE))\n\n }\n\n pub fn step_4_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(DUPLICATE_STEP_4_PAGE))\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/templates.rs", "rank": 42, "score": 317353.37285175885 }, { "content": "pub fn mode_choose_page() -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem_plain(MODE_CHOOSE_PAGE))\n\n}\n\n\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/templates.rs", "rank": 43, "score": 317353.37285175885 }, { "content": "pub fn get_request_with_headers<A: AsRef<str>, B: AsRef<str>>(url: &str, pairs: &[(A, B)], data:Option<impl Serialize>) -> Result<web_sys::Request, JsValue> {\n\n \n\n let mut req_init = web_sys::RequestInit::new();\n\n req_init.method(\"POST\");\n\n req_init.credentials(web_sys::RequestCredentials::Include);\n\n\n\n let req = match data {\n\n None => {\n\n let req = web_sys::Request::new_with_str_and_init(url, &req_init)?;\n\n\n\n req\n\n },\n\n Some(data) => {\n\n let json_str = serde_json::to_string(&data).map_err(|err| JsValue::from_str(&err.to_string()))?;\n\n //req_init.mode(web_sys::RequestMode::Cors);\n\n req_init.body(Some(&JsValue::from_str(&json_str)));\n\n let req = web_sys::Request::new_with_str_and_init(url, &req_init)?;\n\n\n\n req.headers().set(\"Content-Type\", \"application/json\")?;\n\n\n", "file_path": "frontend/apps/crates/utils/src/fetch.rs", "rank": 44, "score": 311436.0648368393 }, { "content": "pub fn category_sub(id:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(CATEGORY_SUB, &html_map!(\n\n \"id\" => id,\n\n )).unwrap())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 45, "score": 307638.02317858534 }, { "content": "pub fn should_get_iframe_data() -> bool { \n\n let url:String = dominator::routing::url().get_cloned();\n\n let url:web_sys::Url = web_sys::Url::new(&url).unwrap_throw();\n\n let params = url.search_params();\n\n\n\n match params.get(\"iframe_data\") {\n\n None => false,\n\n Some(value) => {\n\n if value == \"true\" {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n }\n\n}\n", "file_path": "frontend/apps/crates/utils/src/iframe.rs", "rank": 46, "score": 304167.09109911084 }, { "content": "pub fn category_main(id:&str, selected:bool) -> HtmlElement {\n\n if selected {\n\n TEMPLATES.with(|t| t.cache.render_elem(CATEGORY_MAIN_SELECTED, &html_map!(\n\n \"id\" => id,\n\n )).unwrap())\n\n } else {\n\n TEMPLATES.with(|t| t.cache.render_elem(CATEGORY_MAIN_DESELECTED, &html_map!(\n\n \"id\" => id,\n\n )).unwrap())\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 47, "score": 297676.8767720993 }, { "content": "pub fn checkbox(id:&str, label:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(CHECKBOX, &html_map!{\n\n \"label\" => label,\n\n \"id\" => id\n\n }).unwrap())\n\n}\n\npub struct Templates {\n\n pub cache: TemplateCache<'static>\n\n}\n\n\n\nimpl fmt::Debug for Templates {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n \n\n f.debug_list()\n\n .entries(self.cache.templates.keys())\n\n .finish()\n\n }\n\n}\n\nimpl Templates {\n\n pub fn new() -> Self {\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 48, "score": 295631.24577642244 }, { "content": "pub fn checkbox(id:&str, label:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(CHECKBOX, &html_map!{\n\n \"label\" => label,\n\n \"id\" => id\n\n }).unwrap())\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/templates.rs", "rank": 49, "score": 295631.24577642244 }, { "content": "pub fn image_edit_overview(name:&str, description:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_EDIT_OVERVIEW, &html_map!{\n\n \"name\" => name,\n\n \"description\" => description,\n\n }).unwrap_throw())\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 50, "score": 290814.67659955274 }, { "content": "pub fn image_grid_item_green(src:&str, label:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_GRID_ITEM_GREEN, &html_map!{\n\n \"src\" => src,\n\n \"label\" => label,\n\n }).unwrap_throw())\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 51, "score": 288485.057261404 }, { "content": "pub fn image_grid_item_red(src:&str, label:&str) -> HtmlElement {\n\n TEMPLATES.with(|t| t.cache.render_elem(IMAGE_GRID_ITEM_RED, &html_map!{\n\n \"src\" => src,\n\n \"label\" => label,\n\n }).unwrap_throw())\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/templates.rs", "rank": 52, "score": 288485.057261404 }, { "content": "fn api_get_query<'a, T: Serialize>(endpoint:&'a str, method:Method, data: Option<T>) -> (String, Option<T>) {\n\n\n\n let api_url = SETTINGS.get().unwrap().remote_target.api_url();\n\n\n\n if method == Method::Get {\n\n if let Some(data) = data {\n\n let query = serde_qs::to_string(&data).unwrap_throw();\n\n let url = format!(\"{}{}?{}\", api_url, endpoint, query);\n\n (url, None)\n\n } else {\n\n let url = format!(\"{}{}\", api_url, endpoint);\n\n (url, None)\n\n }\n\n } else {\n\n let url = format!(\"{}{}\", api_url, endpoint);\n\n (url, data)\n\n }\n\n}\n\n\n\npub async fn api_upload_file(endpoint:&str, file:&File, method:Method) -> Result<(), anyhow::Error> {\n", "file_path": "frontend/apps/crates/utils/src/fetch.rs", "rank": 53, "score": 283133.38444697316 }, { "content": "pub fn load_csrf_token() -> Option<String> {\n\n get_local_storage()\n\n .unwrap_throw()\n\n .get(CSRF_STORAGE_NAME)\n\n .unwrap_throw()\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/storage.rs", "rank": 54, "score": 273032.7590113095 }, { "content": "pub trait ModeStateExt {\n\n type MutableContainer;\n\n fn into_mutable(self, step:usize, jig_id: String, module_id: String) -> Self::MutableContainer;\n\n}\n\n\n\npub struct GameState {\n\n pub jig_id: String,\n\n pub module_id: String,\n\n //outer option is for \"loading\", inner option is for \"no module chosen\"\n\n pub mode: Mutable<Option<Option<GameMode>>>, \n\n pub mode_state: Rc<RefCell<Option<ModeState>>>,\n\n}\n\n\n\n\n\nimpl GameState {\n\n pub fn new(jig_id:String, module_id: String) -> Self {\n\n Self {\n\n jig_id,\n\n module_id,\n\n mode: Mutable::new(None),\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/data/data.rs", "rank": 55, "score": 259165.46160734203 }, { "content": "pub fn module_kind_from_str(kind:&str) -> Option<ModuleKind> {\n\n match kind {\n\n \"poster\" => Some(ModuleKind::Poster),\n\n \"design-page\" => Some(ModuleKind::DesignPage),\n\n \"memory-game\" => Some(ModuleKind::MemoryGame),\n\n _ => None\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/routes.rs", "rank": 56, "score": 256147.43310855416 }, { "content": "pub trait ModuleKindExt {\n\n fn get_thumbnail(self) -> String;\n\n}\n\n\n\nimpl ModuleKindExt for Option<ModuleKind> {\n\n fn get_thumbnail(self) -> String {\n\n\n\n let media_url = unsafe {\n\n SETTINGS.get_unchecked().remote_target.media_ui_url()\n\n };\n\n let icon_path = {\n\n match self {\n\n None => \"JIG_Gear@2x.png\",\n\n Some(kind) => {\n\n match kind {\n\n ModuleKind::Poster => \"icn-module-poster2.png\",\n\n ModuleKind::MemoryGame => \"module-memory-game.svg\",\n\n _ => {\n\n panic!(\"don't have the icon for that module kind!\");\n\n }\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/pages/edit/data.rs", "rank": 57, "score": 248923.99057038454 }, { "content": "pub fn upload_url(path:&str) -> String {\n\n format!(\"{}/{}\", SETTINGS.get().unwrap().remote_target.upload_url(), path)\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 58, "score": 237457.68813646823 }, { "content": "pub fn media_url(path:&str) -> String {\n\n format!(\"{}/{}\", SETTINGS.get().unwrap().remote_target.media_url(), path)\n\n}\n\n\n\n// moved to fetch in order to force consolidation at the compiler level\n\n/*\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 59, "score": 237457.68813646823 }, { "content": "pub fn api_url(path:&str) -> String {\n\n format!(\"{}{}\", SETTINGS.get().unwrap().remote_target.api_url(), path)\n\n}\n\n*/\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 60, "score": 237457.68813646823 }, { "content": "pub fn upload_image_url(path:&str) -> String {\n\n format!(\"{}/image/{}\", SETTINGS.get().unwrap().remote_target.upload_url(), path)\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 61, "score": 235379.1766026985 }, { "content": "pub fn create_category(parent:Rc<MutableCategory>) {\n\n spawn_local(async move {\n\n let parent_id = {\n\n if parent.parent.is_none() {\n\n None\n\n } else {\n\n Some(parent.id.as_ref())\n\n }\n\n };\n\n let resp = _create(super::data::EMPTY_NAME.to_string(), parent_id).await;\n\n\n\n match resp {\n\n Ok(resp) => {\n\n let _ = MutableCategory::append_child(None, resp.id.0.to_string(), Some(parent.clone()));\n\n }, \n\n Err(err) => {\n\n log::info!(\"{}\", serde_json::to_string(&err).unwrap());\n\n }\n\n }\n\n })\n\n}\n\n\n\n\n", "file_path": "frontend/apps/crates/entry/admin/src/pages/categories/actions.rs", "rank": 62, "score": 233207.88201968512 }, { "content": "/// Creates a [`TcpListener`] via `listenfd` if possible.\n\n///\n\n/// [`TcpListener`]: https://doc.rust-lang.org/stable/std/net/struct.TcpListener.html\n\npub fn get_tcp_fd() -> Option<TcpListener> {\n\n #[cfg(feature = \"listenfd\")]\n\n let fd = listenfd::ListenFd::from_env().take_tcp_listener(0).unwrap();\n\n\n\n #[cfg(not(feature = \"listenfd\"))]\n\n let fd = None;\n\n\n\n fd\n\n}\n\n\n", "file_path": "backend/core/src/http.rs", "rank": 63, "score": 231826.33229289786 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main_js() {\n\n setup_logger();\n\n let settings = utils::settings::init();\n\n //init dom stuff\n\n\n\n let router = router::Router::new();\n\n dominator::append_dom(&dominator::body(), router.render());\n\n /*\n\n\n\n let page = page::Page::new();\n\n\n\n dominator::append_dom(&dominator::body(), page.render());\n\n\t*/\n\n}\n\n\n\n\n\n\n\n\n\n// enable logging and panic hook only during debug builds\n\ncfg_if! {\n", "file_path": "frontend/apps/crates/entry/admin/src/lib.rs", "rank": 65, "score": 207377.69019029982 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main_js() {\n\n setup_logger();\n\n let settings = utils::settings::init();\n\n //init dom stuff\n\n\n\n let router = router::Router::new();\n\n dominator::append_dom(&dominator::body(), router.render());\n\n /*\n\n\n\n let page = page::Page::new();\n\n\n\n dominator::append_dom(&dominator::body(), page.render());\n\n\t*/\n\n}\n\n\n\n\n\n\n\n\n\n// enable logging and panic hook only during debug builds\n\ncfg_if! {\n", "file_path": "frontend/apps/crates/entry/jig/play/src/lib.rs", "rank": 66, "score": 205757.83431205127 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main_js() {\n\n setup_logger();\n\n let settings = utils::settings::init();\n\n //init dom stuff\n\n\n\n let router = router::Router::new();\n\n dominator::append_dom(&dominator::body(), router.render());\n\n /*\n\n\n\n let page = page::Page::new();\n\n\n\n dominator::append_dom(&dominator::body(), page.render());\n\n\t*/\n\n}\n\n\n\n\n\n\n\n\n\n// enable logging and panic hook only during debug builds\n\ncfg_if! {\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/lib.rs", "rank": 67, "score": 205757.83431205127 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main_js() {\n\n setup_logger();\n\n let settings = utils::settings::init();\n\n //init dom stuff\n\n\n\n let router = router::Router::new();\n\n dominator::append_dom(&dominator::body(), router.render());\n\n /*\n\n\n\n let page = page::Page::new();\n\n\n\n dominator::append_dom(&dominator::body(), page.render());\n\n\t*/\n\n}\n\n\n\n\n\n\n\n\n\n// enable logging and panic hook only during debug builds\n\ncfg_if! {\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/lib.rs", "rank": 68, "score": 204175.71839150245 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main_js() {\n\n setup_logger();\n\n let settings = utils::settings::init();\n\n //init dom stuff\n\n\n\n let router = router::Router::new();\n\n dominator::append_dom(&dominator::body(), router.render());\n\n /*\n\n\n\n let page = page::Page::new();\n\n\n\n dominator::append_dom(&dominator::body(), page.render());\n\n\t*/\n\n}\n\n\n\n\n\n\n\n\n\n// enable logging and panic hook only during debug builds\n\ncfg_if! {\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/lib.rs", "rank": 69, "score": 204175.71839150245 }, { "content": "pub fn module_page_str() -> String {\n\n TEMPLATES.with(|t| t.cache.render_plain(MODULE_PAGE).to_string())\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/templates.rs", "rank": 70, "score": 196177.35657076183 }, { "content": "pub fn window_height() -> f64 {\n\n web_sys::window()\n\n .unwrap_throw()\n\n .inner_height()\n\n .unwrap_throw()\n\n .as_f64()\n\n .unwrap_throw()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Scrolling {\n\n pub on_timestamp_diff: Mutable<Option<OnTimestampDiff>>,\n\n pub y: Mutable<f64>,\n\n}\n\n\n\nimpl Scrolling {\n\n pub fn new() -> Self {\n\n Self {\n\n on_timestamp_diff: Mutable::new(None),\n\n y: Mutable::new(0.0),\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/pages/edit/scrolling.rs", "rank": 71, "score": 196177.35657076183 }, { "content": "pub fn save_csrf_token(csrf:&str) {\n\n let local_storage = get_local_storage().unwrap_throw();\n\n\n\n local_storage.set(CSRF_STORAGE_NAME, csrf).unwrap_throw()\n\n}\n", "file_path": "frontend/apps/crates/utils/src/storage.rs", "rank": 72, "score": 194943.6585337512 }, { "content": "pub fn get_local_storage() -> Result<Storage, JsValue> {\n\n window().unwrap()\n\n .local_storage()?\n\n .ok_or(JsValue::from_str(\"could not get local storage!\"))\n\n}\n", "file_path": "frontend/apps/crates/utils/src/storage.rs", "rank": 73, "score": 188128.7970667038 }, { "content": "pub fn module_kind_to_str(kind:ModuleKind) -> &'static str {\n\n match kind {\n\n ModuleKind::Poster => \"poster\",\n\n ModuleKind::DesignPage => \"design-page\",\n\n ModuleKind::MemoryGame => \"memory-game\",\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/routes.rs", "rank": 74, "score": 186652.3127616593 }, { "content": "pub fn module_kind_to_label(kind:ModuleKind) -> &'static str {\n\n match kind {\n\n ModuleKind::Poster => \"Poster\",\n\n ModuleKind::DesignPage => \"Design\",\n\n ModuleKind::MemoryGame => \"Memory Game\",\n\n }\n\n}\n\nimpl From<Route> for String {\n\n fn from(route:Route) -> Self {\n\n match route {\n\n Route::NoAuth => \"/no-auth\".to_string(),\n\n\n\n Route::User(route) => {\n\n match route {\n\n UserRoute::Profile(ProfileSection::Landing) => \"/user/profile\".to_string(),\n\n UserRoute::Profile(ProfileSection::ChangeEmail) => \"/user/profile/change-email\".to_string(),\n\n UserRoute::ContinueRegistration(user) => {\n\n let user = serde_json::to_string(&user).unwrap_throw();\n\n let query = FirebaseUserQuery { user };\n\n let query = serde_qs::to_string(&query).unwrap_throw();\n", "file_path": "frontend/apps/crates/utils/src/routes.rs", "rank": 75, "score": 186652.3127616593 }, { "content": "pub fn do_success(page:&SigninPage, csrf:String) {\n\n storage::save_csrf_token(&csrf);\n\n\n\n let route:String = Route::User(UserRoute::Profile(ProfileSection::Landing)).into();\n\n dominator::routing::go_to_url(&route);\n\n\n\n ///generally speaking this kind of thing isn't necessary\n\n ///futures will just resolve and be dropped as part of the flow\n\n ///but because the oauth flow here opens a separate window\n\n ///it's more at risk to leave dangling Futures\n\n ///specifically, here, dangling futures which hold the Rc that holds it\n\n ///thereby creating a cycle, we need to break by cancelling that future\n\n ///see: https://github.com/jewish-interactive/ji-cloud/issues/78\n\n page.loader.cancel();\n\n}\n\n\n\npub async fn signin_google() -> Result<String, Option<SigninStatus>> {\n\n let token_promise = unsafe { firebase_signin_google() };\n\n signin(token_promise, SigninKind::Google).await\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/pages/signin/actions.rs", "rank": 76, "score": 185208.6965500187 }, { "content": "pub fn ui<T: AsRef<str>>(path:T) -> String {\n\n media_url(&format!(\"{}/{}\", MEDIA_UI_PATH, path.as_ref()))\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 77, "score": 181853.78387702495 }, { "content": "// stolen from the stdlib and modified (to work on stable)\n\nfn split_once<'a>(s: &'a str, delimiter: char) -> Option<(&'a str, &'a str)> {\n\n let start = s.find(delimiter)?;\n\n let end = start + delimiter.len_utf8();\n\n Some((&s[..start], &s[end..]))\n\n}\n\n\n", "file_path": "backend/api/src/extractor.rs", "rank": 78, "score": 180302.35047822385 }, { "content": "// fixme: replace with const-generics once stable\n\npub trait Scope {\n\n fn scope() -> UserScope;\n\n}\n\n\n\npub(crate) struct ScopeManageCategory;\n\n\n\nimpl Scope for ScopeManageCategory {\n\n fn scope() -> UserScope {\n\n UserScope::ManageCategory\n\n }\n\n}\n\n\n\npub(crate) struct ScopeManageImage;\n\n\n\nimpl Scope for ScopeManageImage {\n\n fn scope() -> UserScope {\n\n UserScope::ManageImage\n\n }\n\n}\n\n\n", "file_path": "backend/api/src/extractor.rs", "rank": 79, "score": 171095.7332081644 }, { "content": "fn drag_callback(name:String) -> impl Fn(events::DragStart) {\n\n move |evt:events::DragStart| {\n\n if let Some(data_transfer) = evt.data_transfer() {\n\n data_transfer.set_data(\"module_kind\", &name);\n\n data_transfer.set_drop_effect(\"all\");\n\n } else {\n\n log::error!(\"no data transfer - use a real computer!!!\");\n\n }\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/pages/edit/module_selector.rs", "rank": 80, "score": 170531.987083976 }, { "content": "pub fn normalize(value: f64, min: f64, max: f64) -> f64 {\n\n if min == max {\n\n 0.0\n\n } else {\n\n ((value - min) * (1.0 / (max - min))).max(0.0).min(1.0)\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/pages/edit/scrolling.rs", "rank": 81, "score": 169759.91735669636 }, { "content": "/// Represents a A endpoint that the backend will support, and how to call it.\n\npub trait ApiEndpoint {\n\n /// The request type for this endpoint.\n\n type Req: Serialize;\n\n\n\n /// The response type for this endpoint.\n\n type Res: DeserializeOwned + Serialize;\n\n\n\n /// The error type for this endpoint.\n\n type Err: DeserializeOwned + Serialize;\n\n\n\n /// The path to the endpoint.\n\n const PATH: &'static str;\n\n\n\n /// The method used to make a request to the endpoint.\n\n const METHOD: Method;\n\n}\n\n\n\n/// Category endpoints.\n\npub mod category;\n\n\n", "file_path": "shared/rust/src/api/endpoints.rs", "rank": 82, "score": 168675.15362433624 }, { "content": "/// Initializes sentry with the given dsn and remote_target\n\npub fn init(\n\n dsn: Option<&str>,\n\n remote_target: config::RemoteTarget,\n\n) -> anyhow::Result<sentry::ClientInitGuard> {\n\n let dsn = dsn.unwrap_or(\"\");\n\n let options = sentry::ClientOptions {\n\n dsn: sentry::IntoDsn::into_dsn(dsn)?,\n\n environment: Some(std::borrow::Cow::Borrowed(remote_target.as_str())),\n\n ..Default::default()\n\n };\n\n\n\n let options = sentry::apply_defaults(options);\n\n\n\n Ok(sentry::init(options))\n\n}\n", "file_path": "backend/core/src/sentry.rs", "rank": 83, "score": 168205.72021465912 }, { "content": "pub fn check_no_db(\n\n token_string: &str,\n\n csrf: &str,\n\n key: DecodingKey,\n\n) -> Result<Option<AuthClaims>, Error> {\n\n let claims = get_claims(token_string, key)?;\n\n if claims.csrf.as_deref() == Some(csrf) {\n\n Ok(Some(claims))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\npub async fn check_no_csrf(\n\n db: &PgPool,\n\n token_string: &str,\n\n key: DecodingKey<'_>,\n\n) -> anyhow::Result<Option<AuthClaims>> {\n\n let claims = get_claims(token_string, key)\n\n .map_err(|e| anyhow::anyhow!(\"{:?}\", e))\n\n .unwrap();\n\n\n\n match db::user::exists(db, claims.id).await? {\n\n true => Ok(Some(claims)),\n\n false => Ok(None),\n\n }\n\n}\n", "file_path": "backend/api/src/jwt.rs", "rank": 84, "score": 166990.25977423135 }, { "content": "pub fn reply_signin_auth(\n\n user_id: Uuid,\n\n jwt_encoding_key: &EncodingKey,\n\n local_insecure: bool,\n\n) -> anyhow::Result<(String, Cookie<'static>)> {\n\n let csrf: String = thread_rng().sample_iter(&Alphanumeric).take(16).collect();\n\n\n\n let claims = AuthClaims {\n\n id: user_id,\n\n csrf: Some(csrf.clone()),\n\n };\n\n\n\n let jwt = jwt::encode(&jwt::Header::default(), &claims, jwt_encoding_key)?;\n\n\n\n let mut cookie = CookieBuilder::new(JWT_COOKIE_NAME, jwt)\n\n .http_only(true)\n\n .same_site(SameSite::Lax)\n\n .max_age(MAX_SIGNIN_COOKIE_DURATION);\n\n\n\n if !local_insecure {\n\n cookie = cookie.domain(COOKIE_DOMAIN);\n\n }\n\n\n\n Ok((csrf, cookie.finish()))\n\n}\n", "file_path": "backend/api/src/extractor.rs", "rank": 85, "score": 165804.5015738739 }, { "content": "pub fn generate_images(\n\n original: DynamicImage,\n\n kind: ImageKind,\n\n) -> anyhow::Result<(Vec<u8>, Vec<u8>, Vec<u8>)> {\n\n let resized = {\n\n let mut buffer = Vec::new();\n\n let (width, height) = kind.size();\n\n original\n\n .resize(width, height, FilterType::Nearest)\n\n .write_to(&mut buffer, ImageOutputFormat::Png)?;\n\n buffer\n\n };\n\n\n\n let thumbnail = {\n\n let mut buffer = Vec::new();\n\n let (width, height) = ImageKind::THUMBNAIL_SIZE;\n\n original\n\n .thumbnail(width, height)\n\n .write_to(&mut buffer, ImageOutputFormat::Png)?;\n\n buffer\n", "file_path": "backend/api/src/image_ops.rs", "rank": 86, "score": 165804.5015738739 }, { "content": "fn get_image_kind() -> ImageKind {\n\n let document:web_sys::Document = \n\n web_sys::window()\n\n .unwrap_throw()\n\n .document()\n\n .unwrap_throw();\n\n\n\n let input:HtmlInputElement = document.select(\"input[name='img_kind']:checked\");\n\n match input.value().as_ref() {\n\n \"sticker\" => ImageKind::Sticker,\n\n \"canvas\" => ImageKind::Canvas,\n\n _ => panic!(\"unknown img kind!\")\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/pages/images/add/dom.rs", "rank": 87, "score": 164324.7695521339 }, { "content": "#[cfg(feature = \"db\")]\n\npub fn read_sql_proxy() -> bool {\n\n std::env::args().any(|s| s == \"sqlproxy\")\n\n}\n\n\n\n/// Settings that are accessed at runtime (as compared to startup time)\n\n#[derive(Clone)]\n\npub struct RuntimeSettings {\n\n firebase_no_auth: bool,\n\n\n\n /// The port that the api runs on.\n\n pub api_port: u16,\n\n\n\n /// The code that the pages api runs on.\n\n pub pages_port: u16,\n\n\n\n /// When the server started.\n\n pub epoch: Duration,\n\n\n\n /// Used to encode jwt tokens.\n\n pub jwt_encoding_key: EncodingKey,\n", "file_path": "backend/core/src/settings.rs", "rank": 88, "score": 159705.17570647673 }, { "content": "fn get_profile_string(profile:&UserProfile) -> String {\n\n format!(\"{:?}\", profile)\n\n\n\n}\n\n\n\n\n\npub struct ProfileEmailChangePage {\n\n}\n\nimpl ProfileEmailChangePage {\n\n pub fn new() -> Rc<Self> {\n\n let _self = Rc::new(Self { });\n\n\n\n _self\n\n }\n\n \n\n pub fn render(_self: Rc<Self>) -> Dom {\n\n elem!(templates::profile_email_change(), { })\n\n }\n\n\n\n}\n", "file_path": "frontend/apps/crates/entry/user/src/pages/profile/dom.rs", "rank": 89, "score": 158312.64488034972 }, { "content": "pub fn init() -> anyhow::Result<()> {\n\n TermLogger::init(LevelFilter::Info, Config::default(), TerminalMode::Mixed)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "backend/api/src/logger.rs", "rank": 90, "score": 156572.44171612264 }, { "content": "#[cfg(not(feature = \"local\"))]\n\npub fn init_logger() -> anyhow::Result<()> {\n\n SimpleLogger::init(LevelFilter::Info, Config::default())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "backend/pages/src/logger.rs", "rank": 91, "score": 155415.3101465018 }, { "content": "fn bearer_token(headers: &HeaderMap) -> Option<&str> {\n\n let header: &HeaderValue = headers.get(header::AUTHORIZATION)?;\n\n\n\n split_once(header.to_str().ok()?, ' ')\n\n .filter(|(kind, _)| kind.eq_ignore_ascii_case(\"bearer\"))\n\n .map(|(_, token)| token)\n\n}\n\n\n\npub struct AuthError;\n\n\n\nimpl From<AuthError> for actix_web::Error {\n\n fn from(_other: AuthError) -> Self {\n\n HttpResponse::Unauthorized().into()\n\n }\n\n}\n\n\n\npub enum StatusError {\n\n Auth,\n\n Forbidden,\n\n InternalServerError,\n", "file_path": "backend/api/src/extractor.rs", "rank": 92, "score": 150963.10226754355 }, { "content": "// attempts to grab a uuid out of a string in the shape:\n\n// Key (<key>)=(<uuid>)<postfix>\n\nfn extract_uuid(s: &str) -> Option<Uuid> {\n\n // <uuid>)<postfix)\n\n let s = s.split(\"(\").nth(2)?;\n\n let s = &s[0..s.find(\")\")?];\n\n s.parse().ok()\n\n}\n\n\n", "file_path": "backend/api/src/http/endpoints/image.rs", "rank": 93, "score": 150963.10226754355 }, { "content": "fn csrf_header(headers: &HeaderMap) -> Option<&str> {\n\n headers.get(CSRF_HEADER_NAME)?.to_str().ok()\n\n}\n\n\n\n#[repr(transparent)]\n\npub struct WrapAuthClaimsNoDb(pub AuthClaims);\n\n\n\nimpl FromRequest for WrapAuthClaimsNoDb {\n\n type Error = AuthError;\n\n type Future = future::Ready<Result<Self, Self::Error>>;\n\n type Config = ();\n\n fn from_request(\n\n req: &actix_web::HttpRequest,\n\n _payload: &mut actix_web::dev::Payload,\n\n ) -> Self::Future {\n\n let cookie = req.cookie(JWT_COOKIE_NAME);\n\n let csrf = csrf_header(req.headers());\n\n let settings: &Data<RuntimeSettings> = req.app_data().expect(\"Settings??\");\n\n\n\n let (cookie, csrf) = match (cookie, csrf) {\n", "file_path": "backend/api/src/extractor.rs", "rank": 94, "score": 150963.10226754355 }, { "content": "pub fn env_bool(key: &str) -> bool {\n\n env::var(key).map_or(false, |it| [\"true\", \"1\", \"y\"].contains(&it.as_ref()))\n\n}\n", "file_path": "backend/core/src/env.rs", "rank": 95, "score": 150527.0869695946 }, { "content": "fn try_insecure_decode(token: &str) -> Option<FirebaseId> {\n\n let claims: jwkkeys::Claims = jsonwebtoken::dangerous_insecure_decode(&token).ok()?.claims;\n\n let user_id = claims.sub;\n\n Some(FirebaseId(user_id))\n\n}\n\n\n\npub struct FirebaseUser {\n\n pub id: FirebaseId,\n\n}\n\n\n\npub struct FirebaseId(pub String);\n\n\n", "file_path": "backend/api/src/extractor.rs", "rank": 96, "score": 149880.65882177354 }, { "content": "/// Get the port to run the server on.\n\npub fn get_addr(default: u16) -> SocketAddr {\n\n let port = std::env::var(\"PORT\").map_or(default, |it| it.parse().unwrap_or(default));\n\n\n\n ([0, 0, 0, 0], port).into()\n\n}\n", "file_path": "backend/core/src/http.rs", "rank": 97, "score": 149424.18407088332 }, { "content": "pub fn configure(config: &mut ServiceConfig) {\n\n config\n\n .route(\"/user.*\", web::get().to(spa::user_template))\n\n .route(\"/user\", web::get().to(spa::user_template))\n\n .route(\"/admin.*\", web::get().to(spa::admin_template))\n\n .route(\"/admin\", web::get().to(spa::admin_template))\n\n .route(\"/jig.*\", web::get().to(spa::jig_template))\n\n .route(\"/jig\", web::get().to(spa::jig_template))\n\n .route(\n\n \"/module/{kind}/{page_kind}/{jig_id}/{module_id}\",\n\n web::get().to(spa::module_template),\n\n )\n\n .route(\"/\", web::get().to(direct_template_home))\n\n .route(\"/no-auth\", web::get().to(direct_template_no_auth))\n\n .route(\"/info\", web::get().to(info_template))\n\n .route(\"/epoch\", web::get().to(epoch_page));\n\n}\n", "file_path": "backend/pages/src/server/routes.rs", "rank": 98, "score": 149424.18407088332 } ]
Rust
crates/augmented/application/audio-processor-standalone/src/options.rs
yamadapc/augmented-audio
2f662cd8aa1a0ba46445f8f41c8483ae2dc552d3
use std::ffi::OsString; use clap::ArgMatches; pub enum RenderingOptions { Online { input_file: Option<String>, }, Offline { input_file: String, output_file: String, }, } pub struct MidiOptions { pub input_file: Option<String>, } pub struct Options { midi: MidiOptions, rendering: RenderingOptions, } impl Options { pub fn rendering(&self) -> &RenderingOptions { &self.rendering } pub fn midi(&self) -> &MidiOptions { &self.midi } } pub fn parse_options(supports_midi: bool) -> Options { parse_options_from(supports_midi, &mut std::env::args_os()) } fn parse_options_from<I, T>(supports_midi: bool, args: I) -> Options where I: IntoIterator<Item = T>, T: Into<OsString> + Clone, { let app = clap::App::new("audio-processor-standalone"); let mut app = app .arg(clap::Arg::from_usage( "-i, --input-file=[INPUT_PATH] 'An input audio file to process'", )) .arg(clap::Arg::from_usage( "-o, --output-file=[OUTPUT_PATH] 'If specified, will render offline into this file (WAV)'", )); if supports_midi { app = app .arg(clap::Arg::from_usage( "--midi-input-file=[MIDI_INPUT_FILE] 'If specified, this MIDI file will be passed through the processor'", )); } let matches = app.get_matches_from(args); let midi_options = parse_midi_options(&matches); let rendering = parse_rendering_options(&matches); Options { midi: midi_options, rendering, } } fn parse_midi_options(matches: &ArgMatches) -> MidiOptions { MidiOptions { input_file: matches.value_of("midi-input-file").map(|s| s.into()), } } fn parse_rendering_options(matches: &ArgMatches) -> RenderingOptions { if matches.is_present("output-file") { if !matches.is_present("input-file") { log::error!("Please specify `--input-file`"); std::process::exit(1); } let input_path = matches.value_of("input-file").map(|s| s.into()).unwrap(); let output_path = matches.value_of("output-file").map(|s| s.into()).unwrap(); RenderingOptions::Offline { input_file: input_path, output_file: output_path, } } else { RenderingOptions::Online { input_file: matches.value_of("input-file").map(|s| s.into()), } } } #[cfg(test)] mod test { use super::*; #[test] fn test_parse_empty_options() { let options = parse_options_from::<Vec<String>, String>(false, vec![]); assert!(options.midi().input_file.is_none()); assert!(matches!( options.rendering(), RenderingOptions::Online { .. } )); } #[test] fn test_parse_online_options() { let options = parse_options_from::<Vec<String>, String>( false, vec!["program".into(), "--input-file".into(), "test.mp3".into()], ); assert!(options.midi().input_file.is_none()); assert!(matches!( options.rendering(), RenderingOptions::Online { .. } )); match options.rendering() { RenderingOptions::Online { input_file } => { assert_eq!(input_file.as_ref().unwrap(), "test.mp3"); } _ => {} } } #[test] fn test_parse_midi_options() { let options = parse_options_from::<Vec<String>, String>( true, vec![ "program".into(), "--midi-input-file".into(), "bach.mid".into(), ], ); assert!(options.midi().input_file.is_some()); assert_eq!(options.midi().input_file.as_ref().unwrap(), "bach.mid") } #[test] fn test_parse_offline_options() { let options = parse_options_from::<Vec<String>, String>( false, vec![ "program".into(), "--input-file".into(), "test.mp3".into(), "--output-file".into(), "test.wav".into(), ], ); assert!(options.midi().input_file.is_none()); assert!(matches!( options.rendering(), RenderingOptions::Offline { .. } )); match options.rendering() { RenderingOptions::Offline { input_file, output_file, } => { assert_eq!(input_file, "test.mp3"); assert_eq!(output_file, "test.wav"); } _ => {} } } }
use std::ffi::OsString; use clap::ArgMatches; pub enum RenderingOptions { Online { input_file: Option<String>, }, Offline { input_file: String, output_file: String, }, } pub struct MidiOptions { pub input_file: Option<String>, } pub struct Options { midi: MidiOptions, rendering: RenderingOptions, } impl Options { pub fn rendering(&self) -> &RenderingOptions { &self.rendering } pub fn midi(&self) -> &MidiOptions { &self.midi } } pub fn parse_options(supports_midi: bool) -> Options { parse_options_from(supports_midi, &mut std::env::args_os()) }
fn parse_midi_options(matches: &ArgMatches) -> MidiOptions { MidiOptions { input_file: matches.value_of("midi-input-file").map(|s| s.into()), } } fn parse_rendering_options(matches: &ArgMatches) -> RenderingOptions { if matches.is_present("output-file") { if !matches.is_present("input-file") { log::error!("Please specify `--input-file`"); std::process::exit(1); } let input_path = matches.value_of("input-file").map(|s| s.into()).unwrap(); let output_path = matches.value_of("output-file").map(|s| s.into()).unwrap(); RenderingOptions::Offline { input_file: input_path, output_file: output_path, } } else { RenderingOptions::Online { input_file: matches.value_of("input-file").map(|s| s.into()), } } } #[cfg(test)] mod test { use super::*; #[test] fn test_parse_empty_options() { let options = parse_options_from::<Vec<String>, String>(false, vec![]); assert!(options.midi().input_file.is_none()); assert!(matches!( options.rendering(), RenderingOptions::Online { .. } )); } #[test] fn test_parse_online_options() { let options = parse_options_from::<Vec<String>, String>( false, vec!["program".into(), "--input-file".into(), "test.mp3".into()], ); assert!(options.midi().input_file.is_none()); assert!(matches!( options.rendering(), RenderingOptions::Online { .. } )); match options.rendering() { RenderingOptions::Online { input_file } => { assert_eq!(input_file.as_ref().unwrap(), "test.mp3"); } _ => {} } } #[test] fn test_parse_midi_options() { let options = parse_options_from::<Vec<String>, String>( true, vec![ "program".into(), "--midi-input-file".into(), "bach.mid".into(), ], ); assert!(options.midi().input_file.is_some()); assert_eq!(options.midi().input_file.as_ref().unwrap(), "bach.mid") } #[test] fn test_parse_offline_options() { let options = parse_options_from::<Vec<String>, String>( false, vec![ "program".into(), "--input-file".into(), "test.mp3".into(), "--output-file".into(), "test.wav".into(), ], ); assert!(options.midi().input_file.is_none()); assert!(matches!( options.rendering(), RenderingOptions::Offline { .. } )); match options.rendering() { RenderingOptions::Offline { input_file, output_file, } => { assert_eq!(input_file, "test.mp3"); assert_eq!(output_file, "test.wav"); } _ => {} } } }
fn parse_options_from<I, T>(supports_midi: bool, args: I) -> Options where I: IntoIterator<Item = T>, T: Into<OsString> + Clone, { let app = clap::App::new("audio-processor-standalone"); let mut app = app .arg(clap::Arg::from_usage( "-i, --input-file=[INPUT_PATH] 'An input audio file to process'", )) .arg(clap::Arg::from_usage( "-o, --output-file=[OUTPUT_PATH] 'If specified, will render offline into this file (WAV)'", )); if supports_midi { app = app .arg(clap::Arg::from_usage( "--midi-input-file=[MIDI_INPUT_FILE] 'If specified, this MIDI file will be passed through the processor'", )); } let matches = app.get_matches_from(args); let midi_options = parse_midi_options(&matches); let rendering = parse_rendering_options(&matches); Options { midi: midi_options, rendering, } }
function_block-function_prefix_line
[ { "content": "pub fn render_to_xml<C: Component>(mut root: C) -> String {\n\n use xml::writer::{EmitterConfig, EventWriter, XmlEvent};\n\n\n\n let bytes = Vec::new();\n\n let buf_sink = BufWriter::new(bytes);\n\n let mut writer = EmitterConfig::new()\n\n .perform_indent(true)\n\n .create_writer(buf_sink);\n\n\n\n let root_tag = root.tag();\n\n let root_tag_str = root_tag.to_string();\n\n\n\n let event: XmlEvent = XmlEvent::start_element(&*root_tag_str).into();\n\n writer.write(event);\n\n log::info!(\"Rendering {:?}\", root_tag_str);\n\n\n\n {\n\n let empty_props: Box<dyn Props + 'static> = Box::new(());\n\n let mut ctx = RenderContext::new(&empty_props).into();\n\n let node = root.render(ctx);\n", "file_path": "crates/spikes/augmented-ui/src/component.rs", "rank": 1, "score": 292619.3153873754 }, { "content": "pub fn shallow_render_to_xml<C: Component>(mut root: C) -> String {\n\n use xml::writer::{EmitterConfig, EventWriter, XmlEvent};\n\n\n\n let bytes = Vec::new();\n\n let buf_sink = BufWriter::new(bytes);\n\n let mut writer = EmitterConfig::new()\n\n .perform_indent(true)\n\n .create_writer(buf_sink);\n\n\n\n let root_tag = root.tag();\n\n let root_tag_str = root_tag.to_string();\n\n\n\n let event: XmlEvent = XmlEvent::start_element(&*root_tag_str).into();\n\n writer.write(event);\n\n log::info!(\"Rendering {:?}\", root_tag_str);\n\n\n\n {\n\n let empty_props: Box<dyn Props + 'static> = Box::new(());\n\n let mut ctx = RenderContext::new(&empty_props).into();\n\n let node = root.render(ctx);\n", "file_path": "crates/spikes/augmented-ui/src/component.rs", "rank": 2, "score": 288964.70802239666 }, { "content": "/// Render a processor offline into a file.\n\npub fn run_offline_render<Processor>(options: OfflineRenderOptions<Processor>)\n\nwhere\n\n Processor: StandaloneProcessor,\n\n{\n\n let OfflineRenderOptions {\n\n mut app,\n\n handle,\n\n input_path,\n\n output_path,\n\n midi_input_file,\n\n } = options;\n\n\n\n let _ = wisual_logger::try_init_from_env();\n\n\n\n let handle = handle.unwrap_or_else(|| audio_garbage_collector::handle());\n\n\n\n log::info!(\n\n \"Rendering offline input={} output={}\",\n\n input_path,\n\n output_path\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/offline.rs", "rank": 3, "score": 288777.5225584931 }, { "content": "/// Internal main function used by `audio_processor_main`.\n\nfn standalone_main(mut app: impl StandaloneProcessor, handle: Option<&Handle>) {\n\n let options = options::parse_options(app.supports_midi());\n\n\n\n let midi_input_file = options.midi().input_file.as_ref().map(|midi_input_file| {\n\n let file_contents = std::fs::read(midi_input_file).expect(\"Failed to read input MIDI file\");\n\n let (_, midi_file) = augmented_midi::parse_midi_file::<String, Vec<u8>>(&file_contents)\n\n .expect(\"Failed to parse input MIDI file\");\n\n midi_file\n\n });\n\n\n\n match options.rendering() {\n\n RenderingOptions::Online { .. } => {\n\n log::info!(\"Starting stand-alone online rendering with default IO config\");\n\n let _handles = standalone_start(app, handle);\n\n std::thread::park();\n\n }\n\n RenderingOptions::Offline {\n\n input_file: input_path,\n\n output_file: output_path,\n\n } => {\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/lib.rs", "rank": 4, "score": 243799.41005904746 }, { "content": "pub fn audio_thread_set_options(input_device_id: String, output_device_id: String) -> Result<i32> {\n\n let actor_system_thread = ActorSystemThread::current();\n\n actor_system_thread.spawn_result(async move {\n\n let audio_thread = AudioThread::from_registry();\n\n audio_thread\n\n .send(AudioThreadMessage::SetOptions {\n\n host_id: AudioHostId::Default,\n\n input_device_id: if input_device_id == \"default\" {\n\n Some(AudioDeviceId::Default)\n\n } else {\n\n Some(AudioDeviceId::Id(input_device_id))\n\n },\n\n output_device_id: if output_device_id == \"default\" {\n\n AudioDeviceId::Default\n\n } else {\n\n AudioDeviceId::Id(output_device_id)\n\n },\n\n })\n\n .await\n\n .unwrap().unwrap();\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 5, "score": 239387.72230361655 }, { "content": "/// Check if there's a non-null main CFBundle.\n\npub fn has_main_bundle() -> bool {\n\n unsafe {\n\n let main_bundle = CFBundleGetMainBundle();\n\n !main_bundle.is_null()\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 7, "score": 221395.79323642707 }, { "content": "pub fn get_version() -> String {\n\n format!(\n\n \"{}-{}-{}\",\n\n env!(\"PROFILE\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"GIT_REV_SHORT\")\n\n )\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/utils.rs", "rank": 8, "score": 221266.02822837693 }, { "content": "/// Build a `String` from a `CFStringRef`.\n\nfn string_from_cfstring(url_cfstring: CFStringRef) -> Option<String> {\n\n unsafe {\n\n let length = CFStringGetLength(url_cfstring) + 1;\n\n let mut output_str = String::with_capacity(length as usize);\n\n for _ in 0..length {\n\n output_str.push(' ');\n\n }\n\n let output_str = CString::new(output_str).ok()?;\n\n let output_str = output_str.into_raw();\n\n let result = CFStringGetCString(url_cfstring, output_str, length, kCFStringEncodingUTF8);\n\n if result == 0 {\n\n return None;\n\n }\n\n let output_str = CString::from_raw(output_str);\n\n let output_str = output_str.to_str().ok()?;\n\n Some(output_str.to_string())\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 9, "score": 216527.35450164595 }, { "content": "pub fn audio_io_get_input_devices() -> Result<String> {\n\n let devices_list = AudioIOService::devices_list(None)?;\n\n let result = serde_json::to_string(&devices_list)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 10, "score": 213496.6111977687 }, { "content": "pub fn parse_midi_file<\n\n 'a,\n\n StringRepr: Borrow<str> + From<&'a str>,\n\n Buffer: Borrow<[u8]> + From<&'a [u8]>,\n\n>(\n\n input: Input<'a>,\n\n) -> Result<'a, MIDIFile<StringRepr, Buffer>> {\n\n let (input, chunks) = many0(parse_chunk)(input)?;\n\n Ok((input, MIDIFile { chunks }))\n\n}\n\n\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 11, "score": 211840.2935137294 }, { "content": "/// Build 'RunOptions' from Clap matches\n\npub fn parse_run_options(matches: ArgMatches) -> Option<RunOptions> {\n\n let matches = matches.subcommand_matches(\"run\")?;\n\n let plugin_path = matches.value_of(\"plugin\")?.to_string();\n\n let input_audio = matches.value_of(\"input\").map(|i| i.to_string());\n\n let output_audio = matches.value_of(\"output\").map(|value| value.to_string());\n\n let open_editor = matches.is_present(\"editor\");\n\n let watch = matches.is_present(\"watch\");\n\n\n\n // Audio thread options\n\n let audio_host_id = matches.value_of(\"host-id\").map(|value| value.to_string());\n\n let output_device_id = matches\n\n .value_of(\"output-device-id\")\n\n .map(|value| value.to_string());\n\n let buffer_size = matches\n\n .value_of(\"buffer-size\")\n\n .map(|value| value.parse().expect(\"Invalid buffer size\"));\n\n let sample_rate = matches\n\n .value_of(\"sample-rate\")\n\n .map(|value| value.parse().expect(\"Invalid sample rate\"));\n\n let input_device_id = matches\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/options/mod.rs", "rank": 12, "score": 208404.30535274214 }, { "content": "pub fn set_is_playing(value: bool) -> Result<i32> {\n\n with_state0(|state| {\n\n state\n\n .processor_handle\n\n .is_playing\n\n .store(value, Ordering::Relaxed);\n\n })\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 13, "score": 203840.57515972102 }, { "content": "pub fn initialize_midi_host(\n\n app: &mut impl StandaloneProcessor,\n\n handle: Option<&Handle>,\n\n) -> (Option<MidiHost>, Option<MidiContext>) {\n\n let midi_host = app.midi().and(handle).map(|handle| {\n\n // MIDI set-up\n\n let mut midi_host = MidiHost::default_with_handle(handle);\n\n midi_host.start_midi().expect(\"Failed to start MIDI host\");\n\n midi_host\n\n });\n\n let midi_context = midi_host.as_ref().map(|midi_host| {\n\n let midi_message_queue = midi_host.messages().clone();\n\n let midi_audio_thread_handler = MidiAudioThreadHandler::default();\n\n MidiContext {\n\n midi_audio_thread_handler,\n\n midi_message_queue,\n\n }\n\n });\n\n (midi_host, midi_context)\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/general.rs", "rank": 14, "score": 202816.3724662204 }, { "content": "pub fn initialize_midi_host(\n\n _app: &mut impl StandaloneProcessor,\n\n _handle: Option<&Handle>,\n\n) -> (Option<MidiHost>, Option<MidiContext>) {\n\n (None, None)\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/ios.rs", "rank": 15, "score": 202816.3724662204 }, { "content": "pub fn flush_midi_events(\n\n _midi_context: Option<&mut MidiContext>,\n\n _processor: &impl StandaloneProcessor,\n\n) {\n\n}\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/ios.rs", "rank": 16, "score": 202816.3724662204 }, { "content": "pub fn flush_midi_events(\n\n midi_context: Option<&mut MidiContext>,\n\n processor: &mut impl StandaloneProcessor,\n\n) {\n\n if let Some(MidiContext {\n\n midi_audio_thread_handler,\n\n midi_message_queue,\n\n }) = midi_context\n\n {\n\n if let Some(midi_handler) = processor.midi() {\n\n midi_audio_thread_handler.collect_midi_messages(midi_message_queue);\n\n midi_handler.process_midi_events(midi_audio_thread_handler.buffer());\n\n midi_audio_thread_handler.clear();\n\n }\n\n }\n\n}\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/general.rs", "rank": 17, "score": 202816.3724662204 }, { "content": "pub fn parse_chunk<\n\n 'a,\n\n StringRepr: Borrow<str> + From<&'a str>,\n\n Buffer: Borrow<[u8]> + From<Input<'a>>,\n\n>(\n\n input: Input<'a>,\n\n) -> Result<'a, MIDIFileChunk<StringRepr, Buffer>> {\n\n let (input, chunk_name) = take(4u32)(input)?;\n\n let chunk_name: &str = std::str::from_utf8(chunk_name)\n\n .map_err(|err| Err::Failure(Error::from_external_error(input, ErrorKind::Fail, err)))?;\n\n\n\n let (input, chunk_length) = parse_chunk_length(input)?;\n\n let (input, chunk_body) = take(chunk_length)(input)?;\n\n\n\n let (_, chunk) = match chunk_name {\n\n \"MThd\" => {\n\n assert_eq!(chunk_length, 6);\n\n parse_header_body(chunk_body)\n\n }\n\n \"MTrk\" => {\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 18, "score": 202462.25879069063 }, { "content": "/// Check if there's a non-null CFBundle with this identifier.\n\npub fn has_bundle(bundle_identifier: &str) -> bool {\n\n unsafe {\n\n let bundle_identifier = make_cfstring(bundle_identifier);\n\n if let Some(bundle_identifier) = bundle_identifier {\n\n let bundle = CFBundleGetBundleWithIdentifier(bundle_identifier);\n\n !bundle.is_null()\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 19, "score": 202420.79242020243 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"TestHostPlugin - RMS\");\n\n let mut oscillator = augmented_oscillator::Oscillator::sine(44100.0);\n\n oscillator.set_frequency(440.0);\n\n let mut output_buffer = Vec::new();\n\n output_buffer.resize(400000, 0.0);\n\n for sample in &mut output_buffer {\n\n *sample = oscillator.get();\n\n oscillator.tick();\n\n }\n\n\n\n group.bench_function(\"`abs` - stress 10s of audio at 44.1kHz\", |b| {\n\n b.iter(|| rms_abs(black_box(&mut output_buffer)))\n\n });\n\n\n\n group.bench_function(\"`sq root - stress 10s of audio` at 44.1kHz\", |b| {\n\n b.iter(|| rms_pow(black_box(&mut output_buffer)))\n\n });\n\n\n\n let mut oscillator = augmented_oscillator::Oscillator::sine(44100.0);\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/benches/rms/mod.rs", "rank": 20, "score": 202189.65885504318 }, { "content": "/// Start the offline rendering command, rendering to an output file\n\nfn run_offline_rendering(run_options: RunOptions) {\n\n log::info!(\"Running offline rendering\");\n\n let output_file_path = run_options.output_audio().clone().unwrap();\n\n let (audio_settings, _) = get_audio_options(&run_options);\n\n let offline_renderer = OfflineRenderer::new(\n\n audio_settings,\n\n &run_options\n\n .input_audio()\n\n .clone()\n\n .expect(\"The \\\"--input\\\" flag is required for offline rendering\"),\n\n &output_file_path,\n\n run_options.plugin_path(),\n\n );\n\n offline_renderer.run().expect(\"Failed to render audio\");\n\n}\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/mod.rs", "rank": 21, "score": 199344.83209384928 }, { "content": "pub fn set_input_file_path(path: String) -> Result<i32> {\n\n send_host_message(SetAudioFilePathMessage(PathBuf::from(path)));\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 22, "score": 198939.02657982605 }, { "content": "pub fn set_vst_file_path(path: String) -> Result<i32> {\n\n send_host_message(LoadPluginMessage {\n\n plugin_path: PathBuf::from(path),\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 23, "score": 198939.02657982605 }, { "content": "pub fn parse_header_body<StringRepr: Borrow<str>, Buffer: Borrow<[u8]>>(\n\n input: Input,\n\n) -> Result<MIDIFileChunk<StringRepr, Buffer>> {\n\n let (input, format) = be_u16(input)?;\n\n let format = match format {\n\n 0 => Ok(MIDIFileFormat::Single),\n\n 1 => Ok(MIDIFileFormat::Simultaneous),\n\n 2 => Ok(MIDIFileFormat::Sequential),\n\n _ => Ok(MIDIFileFormat::Unknown),\n\n }?;\n\n let (input, num_tracks) = be_u16(input)?;\n\n let (input, division_word) = be_u16(input)?;\n\n\n\n let division_type = division_word >> 15;\n\n let (input, division) = match division_type {\n\n 0 => {\n\n let ticks_per_quarter_note = (division_word << 1) >> 1;\n\n Ok((\n\n input,\n\n MIDIFileDivision::TicksPerQuarterNote {\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 24, "score": 198184.38600028423 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"TestHostPlugin - RunningRMSProcessor\");\n\n let mut oscillator = augmented_oscillator::Oscillator::sine(44100.0);\n\n oscillator.set_frequency(440.0);\n\n let mut audio_buffer = VecAudioBuffer::new();\n\n audio_buffer.resize(2, 512, 0.0);\n\n for frame in audio_buffer.frames_mut() {\n\n frame[0] = oscillator.get();\n\n frame[1] = oscillator.get();\n\n oscillator.tick();\n\n }\n\n let garbage_collector = audio_garbage_collector::GarbageCollector::default();\n\n let mut processor = RunningRMSProcessor::new_with_duration(\n\n garbage_collector.handle(),\n\n Duration::from_millis(300),\n\n );\n\n processor.prepare(AudioProcessorSettings {\n\n sample_rate: 44100.,\n\n input_channels: 2,\n\n output_channels: 2,\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/benches/running_rms_processor/mod.rs", "rank": 25, "score": 197895.23229492892 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"CpalVstBufferHandler\");\n\n let mut oscillator = augmented_oscillator::Oscillator::sine(44100.0);\n\n oscillator.set_frequency(440.0);\n\n let mut output_buffer = VecAudioBuffer::new();\n\n output_buffer.resize(2, 512, 0.0);\n\n for sample_index in 0..output_buffer.num_samples() {\n\n let sample = oscillator.get();\n\n oscillator.tick();\n\n for channel_index in 0..output_buffer.num_channels() {\n\n output_buffer.set(channel_index, sample_index, sample)\n\n }\n\n }\n\n let settings = AudioProcessorSettings::new(1000.0, 2, 2, 512);\n\n let mut buffer_handler = CpalVstBufferHandler::new(settings);\n\n\n\n group.bench_function(\n\n \"cpal buffer conversion - 512 samples 11ms to process at 44.1kHz\",\n\n |b| {\n\n b.iter(|| {\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/benches/cpal_buffer_conversion/mod.rs", "rank": 26, "score": 197895.23229492892 }, { "content": "pub fn audio_node_create(audio_processor_name: String) -> Result<u32> {\n\n let processor: Result<NodeType<f32>> = match audio_processor_name.as_str() {\n\n \"delay\" => Ok(Box::new(audio_processor_time::MonoDelayProcessor::default())),\n\n \"filter\" => Ok(Box::new(augmented_dsp_filters::rbj::FilterProcessor::new(\n\n augmented_dsp_filters::rbj::FilterType::LowPass,\n\n ))),\n\n \"gain\" => Ok(Box::new(\n\n audio_processor_utility::gain::GainProcessor::default(),\n\n )),\n\n \"pan\" => Ok(Box::new(\n\n audio_processor_utility::pan::PanProcessor::default(),\n\n )),\n\n _ => Err(anyhow::Error::msg(\"Failed to create processor\")),\n\n };\n\n let processor = processor?;\n\n\n\n let index = crate::graph::audio_node_create_raw(processor);\n\n\n\n Ok(index as u32)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 27, "score": 196673.03495937018 }, { "content": "/// A default main function for an [`AudioProcessor`] and [`MidiEventHandler`].\n\n///\n\n/// Run an [`AudioProcessor`] / [`MidiEventHandler`] as a stand-alone cpal app and forward MIDI\n\n/// messages received on all inputs to it. Same as `audio_processor_main`, but requires\n\n/// [`MidiEventHandler`] to support MIDI.\n\n///\n\n/// Will internally create [`cpal::Stream`], [`audio_processor_standalone_midi::MidiHost`] and park the current thread. If the thread\n\n/// is unparked the function will exit and the audio/MIDI threads will stop once these structures\n\n/// are dropped.\n\npub fn audio_processor_main_with_midi<\n\n Processor: AudioProcessor<SampleType = f32> + MidiEventHandler + Send + 'static,\n\n>(\n\n audio_processor: Processor,\n\n handle: &Handle,\n\n) {\n\n let app = StandaloneProcessorImpl::new(audio_processor);\n\n standalone_main(app, Some(handle));\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/lib.rs", "rank": 28, "score": 194870.73213459886 }, { "content": "pub fn do_xml_render<W: std::io::Write>(writer: &mut xml::writer::EventWriter<W>, node: &Node) {\n\n // Early exit for groups\n\n if let Some(children) = node.children() {\n\n for child in children {\n\n do_xml_render(writer, child);\n\n }\n\n return;\n\n }\n\n\n\n let tag_str = node.tag().to_string();\n\n let event: XmlEvent = XmlEvent::start_element(&*tag_str).into();\n\n writer.write(event);\n\n\n\n if let Node::Component { inner, props } = node {\n\n let mut ctx = RenderContext::new(props).into();\n\n let child = inner.render(ctx);\n\n do_xml_render(writer, &child);\n\n }\n\n\n\n let event: XmlEvent = XmlEvent::end_element().into();\n\n writer.write(event);\n\n}\n\n\n", "file_path": "crates/spikes/augmented-ui/src/component.rs", "rank": 29, "score": 192994.74670558944 }, { "content": "pub fn get_events_sink(sink: StreamSink<String>) -> Result<i32> {\n\n std::thread::spawn(move || loop {\n\n sink.add(\"MESSAGE\".to_string());\n\n std::thread::sleep(Duration::from_millis(1000));\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 30, "score": 191791.03901044774 }, { "content": "/// Start an [`AudioProcessor`] / [`MidiEventHandler`] as a stand-alone cpal app and forward MIDI\n\n/// messages received on all inputs to it.\n\n///\n\n/// Returns the [`cpal::Stream`]s and [`MidiHost`]. The audio-thread will keep running until these are\n\n/// dropped.\n\npub fn audio_processor_start_with_midi<\n\n Processor: AudioProcessor<SampleType = f32> + MidiEventHandler + Send + 'static,\n\n>(\n\n audio_processor: Processor,\n\n handle: &Handle,\n\n) -> StandaloneHandles {\n\n let app = StandaloneProcessorImpl::new(audio_processor);\n\n standalone_start(app, Some(handle))\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/mod.rs", "rank": 31, "score": 190261.41564675525 }, { "content": "pub fn time<T>(label: &str, body: impl FnOnce() -> T) -> T {\n\n let start = Instant::now();\n\n let result = body();\n\n log::info!(\"{} duration={}ms\", label, start.elapsed().as_millis());\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_time() {\n\n let result = time(\"test_time\", || 10_i32.pow(2));\n\n assert_eq!(result, 100);\n\n }\n\n}\n", "file_path": "crates/augmented/ops/augmented-metrics/src/lib.rs", "rank": 32, "score": 188805.9836189408 }, { "content": "fn find_target(config_path: &str, cargo_package: &CargoToml) -> Option<String> {\n\n let config_path = std::fs::canonicalize(Path::new(config_path)).ok()?;\n\n let mut config_dir = config_path.parent()?;\n\n loop {\n\n log::info!(\"Searching for target in {:?}\", config_dir);\n\n let mut read_dir = config_dir.read_dir().ok()?;\n\n let target_dir =\n\n read_dir.find(|item| item.is_ok() && item.as_ref().unwrap().file_name() == \"target\");\n\n if let Some(Ok(target_dir)) = target_dir {\n\n return Some(String::from(\n\n target_dir\n\n .path()\n\n .join(format!(\"release/lib{}.dylib\", cargo_package.lib.name))\n\n .to_str()?,\n\n ));\n\n } else {\n\n config_dir = config_dir.parent()?;\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/development/bundler/src/main.rs", "rank": 33, "score": 188590.36228844774 }, { "content": "pub fn get_raw_window_handle(parent: *mut c_void) -> RawWindowHandle {\n\n let parent_id = parent as id;\n\n let parent_window = unsafe { msg_send![parent_id, window] };\n\n RawWindowHandle::MacOS(MacOSHandle {\n\n ns_window: parent_window,\n\n ns_view: parent,\n\n ..MacOSHandle::empty()\n\n })\n\n}\n", "file_path": "crates/augmented/gui/iced-editor/src/macos/mod.rs", "rank": 34, "score": 188262.0366558745 }, { "content": "/// Set all samples of an AudioBuffer to Zero::zero\n\npub fn clear<Buffer, SampleType>(buf: &mut Buffer)\n\nwhere\n\n Buffer: AudioBuffer<SampleType = SampleType>,\n\n SampleType: num::Zero,\n\n{\n\n for sample in buf.slice_mut() {\n\n *sample = SampleType::zero();\n\n }\n\n}\n\n\n\n/// An AudioBuffer that stores samples as interleaved frames, used for CPAL compatibility.\n\n///\n\n/// Example layout:\n\n///\n\n/// [\n\n/// 0, 0, // <- left_sample, right_sample,\n\n/// ...,\n\n/// ]\n\npub struct InterleavedAudioBuffer<'a, SampleType> {\n\n num_channels: usize,\n", "file_path": "crates/augmented/audio/audio-processor-traits/src/audio_buffer.rs", "rank": 35, "score": 187391.0529371197 }, { "content": "pub fn get_raw_window_handle(parent: *mut c_void) -> RawWindowHandle {\n\n let parent_id = parent as id;\n\n let parent_window = unsafe { msg_send![parent_id, window] };\n\n RawWindowHandle::MacOS(MacOSHandle {\n\n ns_window: parent_window,\n\n ns_view: parent,\n\n ..MacOSHandle::empty()\n\n })\n\n}\n", "file_path": "crates/spikes/example-iced-xcode-integration/src/macos/mod.rs", "rank": 36, "score": 186358.5036169143 }, { "content": "/// Entry-point for the run plug-in command. Mostly kicks-off other work:\n\n///\n\n/// * Parses options\n\n/// * Creates the host, audio and other threads\n\n/// * Loads the audio-file (blocking before starting the plug-in)\n\n/// * Loads the audio-plugin\n\n/// * Creates a window for the plug-in & blocks on it (if specified)\n\n/// * Otherwise parks the current thread forever\n\npub fn run_test(run_options: RunOptions) {\n\n if run_options.output_audio().is_some() {\n\n run_offline_rendering(run_options);\n\n return;\n\n }\n\n\n\n let actor_system_thread = ActorSystemThread::current();\n\n\n\n let (audio_settings, audio_thread_options) = get_audio_options(&run_options);\n\n let mut host = actor_system_thread.spawn_result(async move {\n\n TestPluginHost::new(audio_settings, audio_thread_options, false)\n\n });\n\n host.set_mono_input(run_options.use_mono_input());\n\n run_load_audio_file(&run_options, &mut host);\n\n run_initialize_plugin(&run_options, &mut host);\n\n\n\n let instance = host.plugin_instance();\n\n let host = actor_system_thread.spawn_result(async move { host.start() });\n\n // This needs to be kept around otherwise the watcher will stop when dropped\n\n let _maybe_watcher = run_initialize_file_watch_thread(&run_options, host.clone());\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/mod.rs", "rank": 37, "score": 185769.53866797837 }, { "content": "pub fn start_logging_midi_host() -> Result<Vec<MidiInputConnection<Data>>> {\n\n fn callback(_timestamp: u64, bytes: &[u8], _data: &mut Data) {\n\n let message = rimd::MidiMessage::from_bytes(Vec::from(bytes));\n\n match message.status() {\n\n Status::NoteOff => {\n\n log::info!(\"Note off - {:?}\", message.data)\n\n }\n\n Status::NoteOn => {\n\n log::info!(\"Note on - {:?}\", message.data)\n\n }\n\n Status::PolyphonicAftertouch => {\n\n log::info!(\"Polyphonic aftertouch - {:?}\", message.data)\n\n }\n\n Status::ControlChange => {\n\n log::info!(\"Size: {:?}\", bytes);\n\n log::info!(\"Control change - {:?}\", message.data)\n\n }\n\n Status::ProgramChange => {\n\n log::info!(\"Program change - {:?}\", message.data)\n\n }\n", "file_path": "crates/scripts/midi-logger-host/src/midi_host/mod.rs", "rank": 38, "score": 184424.54877941884 }, { "content": "pub fn get_output_config(\n\n options: &AudioThreadOptions,\n\n output_device: &cpal::Device,\n\n) -> Result<cpal::StreamConfig, AudioThreadError> {\n\n let output_config = output_device.default_output_config()?;\n\n let sample_format = output_config.sample_format();\n\n let mut output_config: cpal::StreamConfig = output_config.into();\n\n output_config.buffer_size = options.buffer_size.clone().into();\n\n output_config.channels = options.num_channels as u16;\n\n output_config.sample_rate = options.sample_rate;\n\n\n\n if sample_format != cpal::SampleFormat::F32 {\n\n return Err(AudioThreadError::UnsupportedSampleFormat);\n\n }\n\n Ok(output_config)\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 39, "score": 182085.06224731065 }, { "content": "pub fn get_input_config(\n\n options: &AudioThreadOptions,\n\n input_device: &cpal::Device,\n\n) -> Result<cpal::StreamConfig, AudioThreadError> {\n\n let input_config = input_device.default_input_config()?;\n\n let sample_format = input_config.sample_format();\n\n let mut input_config: cpal::StreamConfig = input_config.into();\n\n input_config.buffer_size = options.buffer_size.clone().into();\n\n input_config.channels = options.num_channels as u16;\n\n input_config.sample_rate = options.sample_rate;\n\n\n\n if sample_format != cpal::SampleFormat::F32 {\n\n return Err(AudioThreadError::UnsupportedSampleFormat);\n\n }\n\n Ok(input_config)\n\n}\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 40, "score": 182085.06224731065 }, { "content": "pub fn get_cpal_input_device(\n\n host: &cpal::Host,\n\n input_device_id: &AudioDeviceId,\n\n) -> Result<cpal::Device, AudioThreadError> {\n\n let maybe_device = match &input_device_id {\n\n AudioDeviceId::Default => host.default_input_device(),\n\n AudioDeviceId::Id(id) => find_cpal_input_device_by_name(host, id),\n\n };\n\n\n\n match maybe_device {\n\n Some(device) => Ok(device),\n\n None => Err(AudioThreadError::OutputDeviceNotFoundError),\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 42, "score": 180182.32932064353 }, { "content": "pub fn get_cpal_output_device(\n\n host: &cpal::Host,\n\n output_device_id: &AudioDeviceId,\n\n) -> Result<cpal::Device, AudioThreadError> {\n\n let maybe_device = match &output_device_id {\n\n AudioDeviceId::Default => host.default_output_device(),\n\n AudioDeviceId::Id(id) => find_cpal_output_device_by_name(host, id),\n\n };\n\n\n\n match maybe_device {\n\n Some(device) => Ok(device),\n\n None => Err(AudioThreadError::OutputDeviceNotFoundError),\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 43, "score": 180182.32932064353 }, { "content": "pub fn run_all_snapshot_tests(list_crates_service: ListCratesService, update_snapshots: bool) {\n\n let augmented_crates = list_crates_service.find_augmented_crates();\n\n augmented_crates\n\n .into_iter()\n\n .filter(|(_, manifest)| manifest.has_snapshot_tests())\n\n .for_each(|(path, manifest)| run_snapshot_tests(&path, manifest, update_snapshots));\n\n}\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/snapshot_tests_service/mod.rs", "rank": 45, "score": 177921.90514725583 }, { "content": "pub fn parse_midi<'a, Buffer: Borrow<[u8]> + From<Input<'a>>>(\n\n input: Input<'a>,\n\n) -> Result<'a, Vec<MIDIMessage<Buffer>>> {\n\n many0(|input| parse_midi_event(input, &mut ParserState::default()))(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_variable_length_quantity_length_1() {\n\n assert_eq!(127, parse_variable_length_num(&[0x7F]).unwrap().1);\n\n }\n\n\n\n #[test]\n\n fn test_parse_variable_length_quantity_length_more_than_2() {\n\n assert_eq!(128, parse_variable_length_num(&[0x81, 0x00]).unwrap().1);\n\n assert_eq!(\n\n 16384,\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 46, "score": 177367.38690987462 }, { "content": "#[derive(Debug)]\n\nstruct MIDIBytes {\n\n bytes: Vec<u8>,\n\n}\n\n\n\nimpl MidiMessageLike for MIDIBytes {\n\n fn is_midi(&self) -> bool {\n\n true\n\n }\n\n\n\n fn bytes(&self) -> Option<&[u8]> {\n\n Some(&self.bytes)\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/offline.rs", "rank": 47, "score": 175569.33173681077 }, { "content": "fn midi_callback(timestamp: u64, bytes: &[u8], context: &mut MidiCallbackContext) {\n\n if bytes.len() > 3 {\n\n log::trace!(\n\n \"Received a 3+ bytes long MIDI message. It'll be ignored. {:?}\",\n\n bytes\n\n );\n\n return;\n\n }\n\n\n\n log::trace!(\"Handling midi message: {:?}\", bytes);\n\n let mut message_data: [u8; 3] = [0, 0, 0];\n\n for (i, b) in bytes.iter().enumerate() {\n\n message_data[i] = *b;\n\n }\n\n\n\n let message = MidiMessageEntry(Owned::new(\n\n &context.handle,\n\n MidiMessageWrapper {\n\n message_data,\n\n timestamp,\n", "file_path": "crates/augmented/application/audio-processor-standalone-midi/src/host.rs", "rank": 48, "score": 175436.02020910737 }, { "content": "pub fn parse_midi_event<'a, Buffer: Borrow<[u8]> + From<Input<'a>>>(\n\n input: Input<'a>,\n\n state: &mut ParserState,\n\n) -> Result<'a, MIDIMessage<Buffer>> {\n\n let (tmp_input, tmp_status) = be_u8(input)?;\n\n let (input, status) = if tmp_status >= 0x7F {\n\n state.last_status = Some(tmp_status);\n\n Ok((tmp_input, tmp_status))\n\n } else if let Some(status) = state.last_status {\n\n Ok((input, status))\n\n } else {\n\n Err(Err::Error(Error::new(input, ErrorKind::Fail)))\n\n }?;\n\n\n\n let status_start = status & 0b1111_0000;\n\n let (input, message) = if status_start == 0b1000_0000 {\n\n let channel = parse_channel(status);\n\n let (input, note) = be_u8(input)?;\n\n let (input, velocity) = be_u8(input)?;\n\n Ok((\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 49, "score": 175222.72652883286 }, { "content": "pub fn close_window(handle: RawWindowHandle) -> Option<Rectangle> {\n\n if let RawWindowHandle::MacOS(MacOSHandle {\n\n ns_window, ns_view, ..\n\n }) = handle\n\n {\n\n unsafe {\n\n let ns_window = ns_window as id;\n\n let ns_view = ns_view as id;\n\n let window_frame = get_window_frame(ns_window);\n\n\n\n ns_view.removeFromSuperview();\n\n ns_window.close();\n\n let _ = Box::from_raw(ns_view);\n\n let _ = Box::from_raw(ns_window);\n\n\n\n return Some(window_frame);\n\n }\n\n }\n\n\n\n None\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/main_content_view/plugin_editor_window/view/macos/mod.rs", "rank": 50, "score": 174712.45753673406 }, { "content": "/// Build a CFStringRef out of a &str ref.\n\nfn make_cfstring(s: &str) -> Option<CFStringRef> {\n\n unsafe {\n\n let allocator = CFAllocatorGetDefault();\n\n let c_str = CString::new(s).ok()?;\n\n let cfstring_ref =\n\n CFStringCreateWithCString(allocator, c_str.as_ptr(), kCFStringEncodingUTF8);\n\n\n\n if cfstring_ref.is_null() {\n\n return None;\n\n }\n\n\n\n Some(cfstring_ref)\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 51, "score": 174047.96533518675 }, { "content": "/// Attempt to set a hint on codec based on the input file extension\n\nfn try_set_audio_file_hint(hint: &mut Hint, audio_input_path: &Path) -> Option<()> {\n\n let extension = audio_input_path.extension()?;\n\n let extension_str = extension.to_str()?;\n\n hint.with_extension(extension_str);\n\n Some(())\n\n}\n\n\n", "file_path": "crates/augmented/audio/audio-processor-file/src/audio_file_processor/file_io.rs", "rank": 52, "score": 173901.61593636233 }, { "content": "/// Set all samples of an AudioBuffer to a constant\n\npub fn set_all<Buffer, SampleType>(buf: &mut Buffer, value: SampleType)\n\nwhere\n\n Buffer: AudioBuffer<SampleType = SampleType>,\n\n SampleType: Clone,\n\n{\n\n for sample in buf.slice_mut() {\n\n *sample = value.clone();\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/audio/audio-processor-traits/src/audio_buffer.rs", "rank": 53, "score": 173450.55275490868 }, { "content": "fn section_heading<'a, T: Into<String>>(label: T) -> impl Into<Element<'a, Message>> {\n\n let text = Text::new(label);\n\n Column::with_children(vec![\n\n Container::new(text)\n\n .style(Container0::default())\n\n .padding(Spacing::base_spacing())\n\n .into(),\n\n horizontal_rule().into(),\n\n ])\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-settings-gui/src/lib.rs", "rank": 54, "score": 171417.81892269367 }, { "content": "pub fn run_snapshot_tests(_path: &str, manifest: CargoToml, update_snapshots: bool) {\n\n let crate_name = manifest.package.name;\n\n log::info!(\"Running snapshot tests for {}\", crate_name);\n\n let metadata = manifest.package.metadata.unwrap();\n\n let examples = metadata.augmented.unwrap().processor_examples.unwrap();\n\n\n\n for example in examples {\n\n run_example_snapshot_tests(&crate_name, &example, update_snapshots);\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/snapshot_tests_service/mod.rs", "rank": 55, "score": 170382.19549346322 }, { "content": "/// Load the VST plug-in & exit the process on failure\n\nfn run_initialize_plugin(run_options: &RunOptions, host: &mut TestPluginHost) {\n\n let path = Path::new(run_options.plugin_path());\n\n log::info!(\"Loading VST from: {}...\", path.to_str().unwrap());\n\n if let Err(err) = host.load_plugin(path) {\n\n log::error!(\"Failed to load plugin {}\", err);\n\n exit(1);\n\n }\n\n log::info!(\"Initializing audio thread\");\n\n if let Err(err) = host.start_audio() {\n\n log::error!(\"Failed to start host: {}\", err);\n\n exit(1);\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/mod.rs", "rank": 56, "score": 169898.20574434893 }, { "content": "// https://en.wikipedia.org/wiki/Variable-length_quantity\n\npub fn parse_variable_length_num(input: Input) -> Result<u32> {\n\n use nom::bytes::complete::*;\n\n\n\n let mut result: u32 = 0;\n\n\n\n let (input, parts) = take_till(|b| b & 0b10000000 == 0)(input)?;\n\n let (input, extra_part) = take(1u8)(input)?;\n\n\n\n let mut i = parts.len() + 1;\n\n for part in parts.iter().chain(extra_part.iter()) {\n\n i -= 1;\n\n let part = (part << 1) >> 1;\n\n let part32 = part as u32;\n\n result += part32 << (i * 7);\n\n }\n\n\n\n Ok((input, result))\n\n}\n\n\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 57, "score": 168924.63004000837 }, { "content": "/// Load the audio input file & exit the process on failure\n\nfn run_load_audio_file(run_options: &RunOptions, host: &mut TestPluginHost) {\n\n if let Some(input_audio) = run_options.input_audio() {\n\n if let Err(err) = host.set_audio_file_path(PathBuf::from(input_audio)) {\n\n log::error!(\"Failed to set input file-path {}\", err);\n\n exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/mod.rs", "rank": 58, "score": 168258.47719301016 }, { "content": "/// Builds chunks containing MIDI messages over each block, aligned with their\n\n/// timing and a 120bpm tempo.\n\nfn build_midi_input_blocks(\n\n settings: &AudioProcessorSettings,\n\n total_blocks: usize,\n\n midi_input_file: MIDIFile<String, Vec<u8>>,\n\n) -> Vec<Vec<MIDIBytes>> {\n\n let tempo = 120_f32;\n\n let ticks_per_quarter_note = midi_input_file.ticks_per_quarter_note() as f32;\n\n let chunks = midi_input_file.chunks;\n\n let track_events: Vec<MIDITrackEvent<Vec<u8>>> = chunks\n\n .into_iter()\n\n .filter_map(|chunk| match chunk {\n\n MIDIFileChunk::Track { events } => {\n\n let events = convert_to_absolute_time(events);\n\n Some(events)\n\n }\n\n _ => None,\n\n })\n\n .flatten()\n\n .sorted_by_key(|event| event.delta_time)\n\n .collect();\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/offline.rs", "rank": 59, "score": 166966.0516028645 }, { "content": "/// Build RunOptions parser\n\npub fn build_run_command<'a, 'b>() -> App<'a, 'b> {\n\n clap::App::new(\"run\")\n\n .about(\"Process audio\")\n\n .arg(clap::Arg::from_usage(\n\n \"-p, --plugin=<PLUGIN_PATH> 'An audio-plugin to load'\",\n\n ))\n\n .arg(clap::Arg::from_usage(\n\n \"-i, --input=[INPUT_PATH] 'An audio file to process'\",\n\n ))\n\n .arg(clap::Arg::from_usage(\n\n \"-o, --output=[OUTPUT_PATH] 'If specified, will render offline into file'\",\n\n ))\n\n .arg(clap::Arg::from_usage(\n\n \"-e, --editor 'Open the editor window'\",\n\n ))\n\n .arg(clap::Arg::from_usage(\n\n \"-w, --watch 'Watch and reload the VST when it changes'\",\n\n ))\n\n .arg(clap::Arg::from_usage(\n\n \"--host-id=[HOST_ID] 'Audio host name'\",\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/options/mod.rs", "rank": 60, "score": 165863.4052581108 }, { "content": "pub fn parse_sysex_event<'a, Buffer: Borrow<[u8]> + From<Input<'a>>>(\n\n input: Input<'a>,\n\n) -> Result<'a, MIDISysExEvent<Buffer>> {\n\n let (input, _) = alt((tag([0xF7]), tag([0xF0])))(input)?;\n\n let (input, bytes) = take_till(|b| b == 0xF7)(input)?;\n\n let (input, _) = take(1u8)(input)?;\n\n Ok((\n\n input,\n\n MIDISysExEvent {\n\n message: bytes.into(),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 61, "score": 162661.9025925916 }, { "content": "pub fn parse_track_event<'a, Buffer: Borrow<[u8]> + From<Input<'a>>>(\n\n input: Input<'a>,\n\n state: &mut ParserState,\n\n) -> Result<'a, MIDITrackEvent<Buffer>> {\n\n let (input, delta_time) = parse_variable_length_num(input)?;\n\n let (input, event) = alt((\n\n |input| parse_meta_event(input).map(|(input, event)| (input, MIDITrackInner::Meta(event))),\n\n |input| {\n\n parse_sysex_event(input).map(|(input, event)| (input, MIDITrackInner::SysEx(event)))\n\n },\n\n |input| {\n\n parse_midi_event(input, state)\n\n .map(|(input, event)| (input, MIDITrackInner::Message(event)))\n\n },\n\n ))(input)?;\n\n\n\n match event {\n\n MIDITrackInner::Meta(_) => {\n\n state.last_status = None;\n\n }\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 62, "score": 162661.9025925916 }, { "content": "pub fn parse_meta_event<'a, Buffer: Borrow<[u8]> + From<Input<'a>>>(\n\n input: Input<'a>,\n\n) -> Result<'a, MIDIMetaEvent<Buffer>> {\n\n let (input, _) = tag([0xFF])(input)?;\n\n let (input, meta_type) = be_u8(input)?;\n\n let (input, length) = parse_variable_length_num(input)?;\n\n let (input, bytes) = take(length)(input)?;\n\n\n\n Ok((\n\n input,\n\n MIDIMetaEvent {\n\n meta_type,\n\n length,\n\n bytes: bytes.into(),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 63, "score": 162661.9025925916 }, { "content": "fn section_heading<'a, T: Into<String>>(label: T) -> impl Into<Element<'a, Message>> {\n\n let text = Text::new(label);\n\n Column::with_children(vec![\n\n Container::new(text)\n\n .style(Container0::default())\n\n .padding(Spacing::base_spacing())\n\n .into(),\n\n horizontal_rule().into(),\n\n ])\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/audio_io_settings/view.rs", "rank": 64, "score": 161853.83309669822 }, { "content": "/// Perform sample rate conversion of a buffer using linear interpolation\n\npub fn convert_sample_rate(input_rate: f32, input: &[f32], output_rate: f32, output: &mut [f32]) {\n\n if (output_rate - input_rate).abs() < f32::EPSILON {\n\n for (idx, sample) in input.iter().enumerate() {\n\n output[idx] = *sample;\n\n }\n\n return;\n\n }\n\n\n\n // Up-sample -> Output has higher sample rate\n\n if output_rate > input_rate {\n\n // Number of input samples per output sample\n\n // ex. output:88kHz - input:44kHz = 0.5 input samples per output sample\n\n let input_samples_per_output = input_rate / output_rate;\n\n\n\n for (sample_index, output_sample) in output.iter_mut().enumerate() {\n\n let input_index: f32 = input_samples_per_output * (sample_index as f32);\n\n let input_index_floor = input_index.floor() as usize;\n\n\n\n let base_sample = input[input_index_floor];\n\n let next_sample = if input_index_floor + 1 < input.len() {\n", "file_path": "crates/augmented/dsp/convert-sample-rate/src/lib.rs", "rank": 65, "score": 161765.01817884046 }, { "content": "pub fn initialize_logger() {\n\n let _ = wisual_logger::try_init_from_env();\n\n}\n\n\n\nuniffi_macros::include_scaffolding!(\"augmented\");\n", "file_path": "crates/apps/recording_buddy/src/lib.rs", "rank": 66, "score": 158891.43153835915 }, { "content": "pub fn initialize_logger() {\n\n let _ = wisual_logger::try_init_from_env();\n\n}\n\n\n\npub struct AudioGuiInitialModel {\n\n host_ids: Vec<String>,\n\n input_ids: Vec<String>,\n\n output_ids: Vec<String>,\n\n}\n\n\n", "file_path": "crates/spikes/AugmentedNative/src/lib.rs", "rank": 67, "score": 158891.43153835915 }, { "content": "pub fn main() {\n\n let window = WindowDesc::new(|| make_ui()).title(\"External Event Demo\");\n\n\n\n let launcher = AppLauncher::with_window(window);\n\n let event_sink = launcher.get_external_handle();\n\n\n\n let garbage_collector = GarbageCollector::default();\n\n let processor = BufferAnalyserProcessor::new(garbage_collector.handle());\n\n let queue_handle = processor.queue();\n\n let _audio_streams = audio_processor_start(processor);\n\n thread::spawn(move || generate_audio_updates(event_sink, queue_handle));\n\n\n\n launcher\n\n .launch(AudioData(Vec::new()))\n\n .expect(\"launch failed\");\n\n}\n\n\n", "file_path": "crates/spikes/example-druid-audio-viz/src/main.rs", "rank": 68, "score": 157071.20553112536 }, { "content": "pub fn audio_node_set_parameter(\n\n _audio_node_id: i32,\n\n _parameter_name: String,\n\n _parameter_value: f32,\n\n) -> Result<i32> {\n\n todo!()\n\n}\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 69, "score": 157071.20553112536 }, { "content": "///! Try to set-up the logger and ignore errors\n\npub fn init_from_env() {\n\n let _ = try_init_from_env();\n\n}\n", "file_path": "crates/augmented/ops/wisual-logger/src/lib.rs", "rank": 70, "score": 157071.20553112536 }, { "content": "fn get_cli_version() -> String {\n\n format!(\n\n \"{}-{}-{}\",\n\n env!(\"PROFILE\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"GIT_REV_SHORT\")\n\n )\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/main.rs", "rank": 71, "score": 155184.90899193235 }, { "content": "pub fn convert_wav_file_to_mp3(\n\n wav_file_path: &str,\n\n mp3_file_path: &str,\n\n) -> std::io::Result<ExitStatus> {\n\n let mut result = Command::new(\"lame\")\n\n .arg(wav_file_path)\n\n .arg(mp3_file_path)\n\n .spawn()?;\n\n result.wait()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::convert_wav_file_to_mp3;\n\n\n\n #[test]\n\n fn it_can_encode_mp3() {\n\n let crate_dir = env!(\"CARGO_MANIFEST_DIR\");\n\n let input_path = format!(\"{}/test-inputs/synth.wav\", crate_dir);\n\n let output_path = format!(\"{}/test-inputs/synth.mp3\", crate_dir);\n\n let exit_code = convert_wav_file_to_mp3(&input_path, &output_path).unwrap();\n\n assert!(exit_code.success());\n\n }\n\n}\n", "file_path": "crates/augmented/audio/lame-mp3/src/lib.rs", "rank": 72, "score": 153638.79365797428 }, { "content": "/// Get the path to a resource\n\npub fn get_path(\n\n bundle_identifier: &str,\n\n resource_name: &str,\n\n resource_type: Option<&str>,\n\n sub_dir_name: Option<&str>,\n\n) -> Option<PathBuf> {\n\n let resource_name = make_cfstring(resource_name)?;\n\n let resource_type = resource_type\n\n .map(|resource_type| make_cfstring(resource_type))\n\n .flatten()\n\n .unwrap_or(std::ptr::null());\n\n let sub_dir_name = sub_dir_name\n\n .map(|sub_dir_name| make_cfstring(sub_dir_name))\n\n .flatten()\n\n .unwrap_or(std::ptr::null());\n\n\n\n unsafe {\n\n log::debug!(\"Getting bundle {}\", bundle_identifier);\n\n let bundle_identifier = make_cfstring(bundle_identifier)?;\n\n let main_bundle = CFBundleGetBundleWithIdentifier(bundle_identifier);\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 73, "score": 153638.79365797428 }, { "content": "/// Create a sine wave buffer with this duration\n\npub fn oscillator_buffer(\n\n sample_rate: f32,\n\n frequency: f32,\n\n length: Duration,\n\n generator_fn: fn(f32) -> f32,\n\n) -> Vec<f32> {\n\n let mut source = Oscillator::new(generator_fn);\n\n source.set_sample_rate(sample_rate);\n\n source.set_frequency(frequency);\n\n let mut output = Vec::new();\n\n let length_samples = (length.as_secs_f32() * sample_rate).ceil();\n\n output.resize(length_samples as usize, 0.0);\n\n for sample in &mut output {\n\n *sample = source.next_sample();\n\n }\n\n output\n\n}\n", "file_path": "crates/augmented/testing/audio-processor-testing-helpers/src/generators.rs", "rank": 74, "score": 153638.79365797428 }, { "content": "/// Start a processor using CPAL.\n\npub fn standalone_start(\n\n mut app: impl StandaloneProcessor,\n\n handle: Option<&Handle>,\n\n) -> StandaloneHandles {\n\n let _ = wisual_logger::try_init_from_env();\n\n\n\n let (midi_host, mut midi_context) = initialize_midi_host(&mut app, handle);\n\n\n\n // Audio set-up\n\n let host = cpal::default_host();\n\n log::info!(\"Using host: {}\", host.id().name());\n\n let buffer_size = 512;\n\n let sample_rate = 44100;\n\n let accepts_input = app.options().accepts_input;\n\n let input_tuple = if accepts_input {\n\n Some(configure_input_device(&host, buffer_size, sample_rate))\n\n } else {\n\n None\n\n };\n\n let (output_device, output_config) = configure_output_device(host, buffer_size, sample_rate);\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/mod.rs", "rank": 75, "score": 152024.0672477629 }, { "content": "/// Test two buffers have equivalent RMS levels\n\npub fn test_level_equivalence(\n\n input_buffer: &[f32],\n\n output_buffer: &[f32],\n\n input_window_size: usize,\n\n output_window_size: usize,\n\n threshold: f32,\n\n) {\n\n let input_chunks = input_buffer.chunks(input_window_size);\n\n let output_chunks = output_buffer.chunks(output_window_size);\n\n assert!(!input_buffer.is_empty());\n\n assert!(!output_buffer.is_empty());\n\n // assert!((input_chunks.len() as i32 - output_chunks.len() as i32).abs() < 2);\n\n for (input_chunk, output_chunk) in input_chunks.zip(output_chunks) {\n\n let input_level = util::rms_level(input_chunk);\n\n let output_level = util::rms_level(output_chunk);\n\n let diff = (input_level - output_level).abs();\n\n\n\n assert!(diff < threshold);\n\n }\n\n}\n", "file_path": "crates/augmented/testing/audio-processor-testing-helpers/src/lib.rs", "rank": 76, "score": 152018.83006545246 }, { "content": "pub fn get_cpal_host(host_id: &AudioHostId) -> cpal::Host {\n\n match &host_id {\n\n AudioHostId::Default => cpal::default_host(),\n\n AudioHostId::Id(id) => find_cpal_host_by_name(id).unwrap_or_else(cpal::default_host),\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 77, "score": 150818.87251552573 }, { "content": "pub fn builder<StoryMessage: 'static + Debug + Clone + Send>() -> OptionsBuilder<StoryMessage> {\n\n OptionsBuilder { stories: vec![] }\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-storybook/src/lib.rs", "rank": 78, "score": 150747.8315842076 }, { "content": "fn initialize_menu() -> *mut Object {\n\n use cocoa::appkit::{NSMenu, NSMenuItem};\n\n use cocoa::base::selector;\n\n use cocoa::foundation::NSProcessInfo;\n\n\n\n unsafe {\n\n let menubar = NSMenu::new(nil);\n\n let app_menu_item = NSMenuItem::new(nil);\n\n menubar.addItem_(app_menu_item);\n\n\n\n let app_menu = NSMenu::new(nil);\n\n let process_name = NSProcessInfo::processInfo(nil).processName();\n\n\n\n // About menu item\n\n let about_item_prefix = NSString::alloc(nil).init_str(\"About \");\n\n let about_item_title = about_item_prefix.stringByAppendingString_(process_name);\n\n let about_item = menu_item(\n\n about_item_title,\n\n selector(\"orderFrontStandardAboutPanel:\"),\n\n None,\n", "file_path": "crates/spikes/augmented-ui/examples/macos_window.rs", "rank": 79, "score": 150536.8824391923 }, { "content": "pub fn run_list_devices() {\n\n let hosts = cpal::available_hosts();\n\n hosts.iter().for_each(|host_id| {\n\n if print_host_devices(host_id).is_err() {\n\n log::error!(\"Error listing devices for host {}\", host_id.name());\n\n }\n\n });\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/list_devices.rs", "rank": 80, "score": 148954.0454770797 }, { "content": "pub fn deinitialize() -> Result<i32> {\n\n let mut handles = STATE.lock().unwrap();\n\n *handles = None;\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 81, "score": 147720.0348400332 }, { "content": "pub fn initialize() -> Result<i32> {\n\n let mut handles = STATE.lock().unwrap();\n\n *handles = Some(State::new());\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 82, "score": 147720.0348400332 }, { "content": "pub fn read_file_contents(\n\n audio_file: &mut ProbeResult,\n\n) -> Result<SymphoniaAudioBuffer<f32>, AudioFileError> {\n\n let audio_file_stream = audio_file\n\n .format\n\n .default_track()\n\n .ok_or(AudioFileError::OpenStreamError)?;\n\n let mut decoder = symphonia::default::get_codecs()\n\n .make(&audio_file_stream.codec_params, &Default::default())?;\n\n let audio_file_stream_id = audio_file_stream.id;\n\n\n\n let mut audio_buffer: Vec<SymphoniaAudioBuffer<f32>> = Vec::new();\n\n metrics::time(\"AudioFileProcessor - Reading file packages\", || loop {\n\n match audio_file.format.next_packet().ok() {\n\n None => break,\n\n Some(packet) => {\n\n if packet.track_id() != audio_file_stream_id {\n\n break;\n\n }\n\n\n", "file_path": "crates/augmented/audio/audio-processor-file/src/audio_file_processor/file_io.rs", "rank": 83, "score": 147503.02434695832 }, { "content": "#[derive(Debug)]\n\nstruct MIDIBytes {\n\n bytes: Vec<u8>,\n\n}\n\n\n", "file_path": "crates/augmented/data/augmented-midi/examples/play_midi.rs", "rank": 84, "score": 147236.91318328743 }, { "content": "fn with_state0(f: impl FnOnce(&State) -> ()) -> Result<i32> {\n\n with_state(|state| {\n\n f(state);\n\n Ok(0)\n\n })\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 85, "score": 146110.36544346804 }, { "content": "pub fn run_file_watch_loop(\n\n rx: Receiver<DebouncedEvent>,\n\n run_options: RunOptions,\n\n host: Addr<TestPluginHost>,\n\n) -> ! {\n\n let inner = || -> Result<(), std::io::Error> {\n\n let mut current_hash = get_file_hash(run_options.plugin_path().as_ref())?;\n\n loop {\n\n match rx.recv() {\n\n Ok(_) => {\n\n let new_hash = get_file_hash(run_options.plugin_path().as_ref())?;\n\n if new_hash == current_hash {\n\n log::warn!(\"Ignoring event due to same plugin hash\");\n\n continue;\n\n } else {\n\n log::info!(\n\n \"Received file change event. Plug-in will be reloaded content_hash={}\",\n\n new_hash\n\n );\n\n current_hash = new_hash;\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/file_watch.rs", "rank": 86, "score": 146102.47668735136 }, { "content": "pub fn black() -> Color {\n\n rgb(19, 19, 19)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 87, "score": 145924.91259678864 }, { "content": "pub fn yellow() -> Color {\n\n rgb(240, 187, 104)\n\n}\n\n\n\npub struct Colors;\n\n\n\nimpl Colors {\n\n pub fn text() -> Color {\n\n white()\n\n }\n\n\n\n pub fn success() -> Color {\n\n green()\n\n }\n\n\n\n pub fn error() -> Color {\n\n red()\n\n }\n\n\n\n pub fn warning() -> Color {\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 88, "score": 145924.91259678864 }, { "content": "pub fn red() -> Color {\n\n rgb(199, 84, 80)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 89, "score": 145924.91259678864 }, { "content": "pub fn green() -> Color {\n\n rgb(73, 190, 84)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 90, "score": 145924.91259678864 }, { "content": "pub fn initialize_logger() -> Result<i32> {\n\n let _ = wisual_logger::try_init_from_env();\n\n log::info!(\"Rust logger initialized\");\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 91, "score": 145899.8088327994 }, { "content": "pub fn initialize_audio() -> Result<i32> {\n\n send_host_message(StartMessage);\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 92, "score": 145899.8088327994 }, { "content": "pub fn stop_playback() -> Result<i32> {\n\n if let Some(audio_file_processor) =\n\n ProcessorHandleRegistry::current().get::<Shared<AudioFileProcessorHandle>>(\"audio-file\")\n\n {\n\n audio_file_processor.stop();\n\n }\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 93, "score": 145899.8088327994 }, { "content": "pub fn start_playback() -> Result<i32> {\n\n if let Some(audio_file_processor) =\n\n ProcessorHandleRegistry::current().get::<Shared<AudioFileProcessorHandle>>(\"audio-file\")\n\n {\n\n audio_file_processor.play();\n\n }\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 94, "score": 145899.8088327994 }, { "content": "fn render_to_db(render: f32) -> f32 {\n\n let reference_amplitude = 1e-1;\n\n 60.0 * (render / reference_amplitude).log10()\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/main_content_view/volume_meter/mod.rs", "rank": 95, "score": 145485.28315059005 }, { "content": "pub fn convert_audio_file_sample_rate(\n\n audio_file_contents: &SymphoniaAudioBuffer<f32>,\n\n output_rate: f32,\n\n channel_number: usize,\n\n) -> Vec<f32> {\n\n let audio_file_channel = audio_file_contents.chan(channel_number);\n\n let input_rate = audio_file_contents.spec().rate as f32;\n\n let audio_file_duration = audio_file_channel.len() as f32 / input_rate;\n\n\n\n let output_size = (audio_file_duration * output_rate).ceil() as usize;\n\n let mut channel = Vec::new();\n\n channel.resize(output_size, 0.0);\n\n let audio_file_channel = audio_file_contents.chan(channel_number);\n\n\n\n // Convert sample rate from audio file to in-memory\n\n log::info!(\n\n \"Converting sample_rate channel={} input_rate={} output_rate={}\",\n\n channel_number,\n\n input_rate,\n\n output_rate\n", "file_path": "crates/augmented/audio/audio-processor-file/src/audio_file_processor/file_io.rs", "rank": 96, "score": 144749.8139553435 }, { "content": "fn sine_osc(oscillator: &mut Oscillator<f32>, output_buffer: &mut [f32]) {\n\n for i in output_buffer {\n\n *i = oscillator.next_sample();\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/audio/oscillator/benches/sine_oscillator_benchmark.rs", "rank": 97, "score": 144466.27396171674 }, { "content": "pub fn light_gray() -> Color {\n\n rgb(60, 60, 60)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 98, "score": 144364.40114935924 }, { "content": "pub fn create_transport_runtime() -> Runtime {\n\n log::info!(\"Creating tokio event-loop\");\n\n let runtime = tokio::runtime::Builder::new_multi_thread()\n\n .thread_name(\"ws-transport-tokio\")\n\n .worker_threads(1)\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n runtime\n\n}\n\n\n\nasync fn run_websockets_accept_loop(\n\n listener: TcpListener,\n\n input_sender: Sender<Message>,\n\n current_id: AtomicCell<u32>,\n\n connections: ConnectionMap,\n\n) {\n\n log::info!(\"Waiting for ws connections\");\n\n while let Ok((stream, _)) = listener.accept().await {\n\n let peer = stream\n", "file_path": "crates/augmented/gui/webview-transport/src/websockets/tokio_websockets.rs", "rank": 99, "score": 144364.40114935924 } ]
Rust
stm32-gen-features/src/lib.rs
topisani/embassy
43a7226d8b8e5a2dabae8afbaf9e81651b59ca6e
use std::{iter::FilterMap, path::Path, slice::Iter}; const SUPPORTED_FAMILIES: [&str; 10] = [ "stm32f0", "stm32f1", "stm32f4", "stm32g0", "stm32l0", "stm32l1", "stm32l4", "stm32h7", "stm32wb55", "stm32wl55", ]; const SEPARATOR_START: &str = "# BEGIN GENERATED FEATURES\n"; const SEPARATOR_END: &str = "# END GENERATED FEATURES\n"; const HELP: &str = "# Generated by stm32-gen-features. DO NOT EDIT.\n"; fn is_supported(name: &str) -> bool { SUPPORTED_FAMILIES .iter() .any(|family| name.starts_with(family)) } type SupportedIter<'a> = FilterMap< Iter<'a, (String, Vec<String>)>, fn(&(String, Vec<String>)) -> Option<(&String, &Vec<String>)>, >; trait FilterSupported { fn supported(&self) -> SupportedIter; } impl FilterSupported for &[(String, Vec<String>)] { fn supported(&self) -> SupportedIter { self.iter() .filter_map(|(name, cores)| is_supported(name).then(|| (name, cores))) } } pub fn chip_names_and_cores() -> Vec<(String, Vec<String>)> { glob::glob("../stm32-data/data/chips/*.yaml") .unwrap() .filter_map(|entry| entry.map_err(|e| eprintln!("{:?}", e)).ok()) .filter_map(|entry| { if let Some(name) = entry.file_stem().and_then(|stem| stem.to_str()) { Some((name.to_lowercase(), chip_cores(&entry))) } else { eprintln!("{:?} is not a regular file", entry); None } }) .collect() } fn chip_cores(path: &Path) -> Vec<String> { let file_contents = std::fs::read_to_string(path).unwrap(); let doc = &yaml_rust::YamlLoader::load_from_str(&file_contents).unwrap()[0]; doc["cores"] .as_vec() .unwrap_or_else(|| panic!("{:?}:[cores] is not an array", path)) .iter() .enumerate() .map(|(i, core)| { core["name"] .as_str() .unwrap_or_else(|| panic!("{:?}:[cores][{}][name] is not a string", path, i)) .to_owned() }) .collect() } pub fn embassy_stm32_needed_data(names_and_cores: &[(String, Vec<String>)]) -> String { let mut result = String::new(); for (chip_name, cores) in names_and_cores.supported() { if cores.len() > 1 { for core_name in cores.iter() { result += &format!( "{chip}_{core} = [ \"stm32-metapac/{chip}_{core}\" ]\n", chip = chip_name, core = core_name ); } } else { result += &format!("{chip} = [ \"stm32-metapac/{chip}\" ]\n", chip = chip_name); } } result } pub fn stm32_metapac_needed_data(names_and_cores: &[(String, Vec<String>)]) -> String { let mut result = String::new(); for (chip_name, cores) in names_and_cores { if cores.len() > 1 { for core_name in cores { result += &format!("{}_{} = []\n", chip_name, core_name); } } else { result += &format!("{} = []\n", chip_name); } } result } fn split_cargo_toml_contents(contents: &str) -> (&str, &str) { let (before, remainder) = contents .split_once(SEPARATOR_START) .unwrap_or_else(|| panic!("missing \"{}\" tag", SEPARATOR_START)); let (_, after) = remainder .split_once(SEPARATOR_END) .unwrap_or_else(|| panic!("missing \"{}\" tag", SEPARATOR_END)); (before, after) } pub fn generate_cargo_toml_file(previous_text: &str, new_contents: &str) -> String { let (before, after) = split_cargo_toml_contents(previous_text); before.to_owned() + SEPARATOR_START + HELP + new_contents + SEPARATOR_END + after } #[cfg(test)] mod tests { use super::*; #[test] fn stm32f407vg_is_supported() { assert!(is_supported("stm32f407vg")) } #[test] fn abcdef_is_not_supported() { assert!(!is_supported("abcdef")) } #[test] #[ignore] fn stm32f407vg_yaml_file_exists_and_is_supported() { assert!(chip_names_and_cores() .as_slice() .supported() .into_iter() .any(|(name, _)| { name == "stm32f407vg" })) } #[test] fn keeps_text_around_separators() { let initial = "\ before # BEGIN GENERATED FEATURES # END GENERATED FEATURES after "; let expected = "\ before # BEGIN GENERATED FEATURES # Generated by stm32-gen-features. DO NOT EDIT. a = [\"b\"] # END GENERATED FEATURES after "; let new_contents = String::from("a = [\"b\"]\n"); assert_eq!(generate_cargo_toml_file(initial, &new_contents), expected); } #[test] #[should_panic] fn does_not_generate_if_separators_are_missing() { let initial = "\ before # END GENERATED FEATURES after "; let new_contents = String::from("a = [\"b\"]\n"); generate_cargo_toml_file(initial, &new_contents); } }
use std::{iter::FilterMap, path::Path, slice::Iter}; const SUPPORTED_FAMILIES: [&str; 10] = [ "stm32f0", "stm32f1", "stm32f4", "stm32g0", "stm32l0", "stm32l1", "stm32l4", "stm32h7", "stm32wb55", "stm32wl55", ]; const SEPARATOR_START: &str = "# BEGIN GENERATED FEATURES\n"; const SEPARATOR_END: &str = "# END GENERATED FEATURES\n"; const HELP: &str = "# Generated by stm32-gen-features. DO NOT EDIT.\n"; fn is_supported(name: &str) -> bool { SUPPORTED_FAMILIES .iter() .any(|family| name.starts_with(family)) } type SupportedIter<'a> = FilterMap< Iter<'a, (String, Vec<String>)>, fn(&(String, Vec<String>)) -> Option<(&String, &Vec<String>)>, >; trait FilterSupported { fn supported(&self) -> SupportedIter; } impl FilterSupported for &[(String, Vec<String>)] { fn supported(&self) -> SupportedIter { self.iter() .filter_map(|(name, cores)| is_supported(name).then(|| (name, cores))) } } pub fn chip_names_and_cores() -> Vec<(String, Vec<String>)> { glob::glob("../stm32-data/data/chips/*.yaml") .unwrap() .filter_map(|entry| entry.map_err(|e| eprintln!("{:?}", e)).ok()) .filter_map(|entry| { if let Some(name) = entry.file_stem().and_then(|stem| stem.to_str()) { Some((name.to_lowercase(), chip_cores(&entry))) } else { eprintln!("{:?} is not a regular file", entry); None } }) .collect() } fn chip_cores(path: &Path) -> Vec<String> { let file_contents = std::fs::read_to_string(path).unwrap(); let doc = &yaml_rust::YamlLoader::load_from_str(&file_contents).unwrap()[0]; doc["cores"] .as_vec() .unwrap_or_else(|| panic!("{:?}:[cores] is not an array", path)) .iter() .enumerate() .map(|(i, core)| { core["name"] .as_str() .unwrap_or_else(|| panic!("{:?}:[cores][{}][name] is not a string", path, i)) .to_owned() }) .collect() } pub fn embassy_stm32_needed_data(names_and_cores: &[(String, Vec<String>)]) -> String { let mut result = String::new(); for (chip_name, cores) in names_and_cores.supported() { if cores.len() > 1 { for core_name in cores.iter() { result += &format!( "{chip}_{core} = [ \"stm32-metapac/{chip}_{core}\" ]\n", chip = chip_name, core = core_name ); } } else { result += &format!("{chip} = [ \"stm32-metapac/{chip}\" ]\n", chip = chip_name); } } result } pub fn stm32_metapac_needed_data(names_and_cores: &[(String, Vec<String>)]) -> String { let mut result = String::new(); for (chip_name, cores) in names_and_cores { if cores.len() > 1 { for core_name in cores { result += &format!("{}_{} = []\n", chip_name, core_name); } } else { result += &format!("{} = []\n", chip_name); } } result } fn split_cargo_toml_contents(contents: &str) -> (&str, &str) { let (before, remainder) = contents .split_once(SEPARATOR_START) .unwrap_or_else(|| panic!("missing \"{}\" tag", SEPARATOR_START)); let (_, after) = remainder .split_once(SEPARATOR_END) .unwrap_or_else(|| panic!("missing \"{}\" tag", SEPARATOR_END)); (before, after) } pub fn generate_cargo_toml_file(previous_text: &str, new_contents: &str) -> String { let (before, after) = split_cargo_toml_contents(previous_text); before.to_owned() + SEPARATOR_START + HELP + new_contents + SEPARATOR_END + after } #[cfg(test)] mod tests { use super::*; #[test] fn stm32f407vg_is_supported() { assert!(is_supported("stm32f407vg")) } #[test] fn abcdef_is_not_supported() { assert!(!is_supported("abcdef")) } #[test] #[ignore] fn stm32f407vg_yaml_file_exists_and_is_supported() { assert!(chip_names_and_cores() .as_slice() .supported() .into_iter() .any(|(name, _)| { name == "stm32f407vg" })) } #[test] fn keeps_text_around_separators() { let initial = "\ before # BEGIN GENERATED FEATURES # END GENERATED FEATURES after "; let expected = "\ before # BEGIN GENERATED FEATURES # Generated by stm32-gen-features. DO NOT EDIT. a = [\"b\"] # END GENERATED FEATURES after "; let new_contents = String::from("a = [\"b\"]\n"); assert_eq!(generate_cargo_toml_file(initial, &new_contents), expected); } #[test] #[should_panic]
}
fn does_not_generate_if_separators_are_missing() { let initial = "\ before # END GENERATED FEATURES after "; let new_contents = String::from("a = [\"b\"]\n"); generate_cargo_toml_file(initial, &new_contents); }
function_block-full_function
[ { "content": "/// Update a Cargo.toml file\n\n///\n\n/// Update the content between \"# BEGIN GENERATED FEATURES\" and \"# END GENERATED FEATURES\"\n\n/// with the given content\n\nfn update_cargo_file(path: &str, new_contents: &str) {\n\n let previous_text = std::fs::read_to_string(path).unwrap();\n\n let new_text = generate_cargo_toml_file(&previous_text, new_contents);\n\n std::fs::write(path, new_text).unwrap();\n\n}\n", "file_path": "stm32-gen-features/src/main.rs", "rank": 4, "score": 363100.14314079145 }, { "content": "/// Low power blocking wait loop using WFE/SEV.\n\npub fn low_power_wait_until(mut condition: impl FnMut() -> bool) {\n\n while !condition() {\n\n // WFE might \"eat\" an event that would have otherwise woken the executor.\n\n cortex_m::asm::wfe();\n\n }\n\n // Retrigger an event to be transparent to the executor.\n\n cortex_m::asm::sev();\n\n}\n", "file_path": "embassy-hal-common/src/lib.rs", "rank": 5, "score": 360980.7708723203 }, { "content": "fn make_table(out: &mut String, name: &str, data: &Vec<Vec<String>>) {\n\n write!(\n\n out,\n\n \"#[macro_export]\n\nmacro_rules! {} {{\n\n ($($pat:tt => $code:tt;)*) => {{\n\n macro_rules! __{}_inner {{\n\n $(($pat) => $code;)*\n\n ($_:tt) => {{}}\n\n }}\n\n\",\n\n name, name\n\n )\n\n .unwrap();\n\n\n\n for row in data {\n\n write!(out, \" __{}_inner!(({}));\\n\", name, row.join(\",\")).unwrap();\n\n }\n\n\n\n write!(\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 8, "score": 345029.13418125064 }, { "content": "/// Splits a bounded mpsc channel into a `Sender` and `Receiver`.\n\n///\n\n/// All data sent on `Sender` will become available on `Receiver` in the same\n\n/// order as it was sent.\n\n///\n\n/// The `Sender` can be cloned to `send` to the same channel from multiple code\n\n/// locations. Only one `Receiver` is valid.\n\n///\n\n/// If the `Receiver` is disconnected while trying to `send`, the `send` method\n\n/// will return a `SendError`. Similarly, if `Sender` is disconnected while\n\n/// trying to `recv`, the `recv` method will return a `RecvError`.\n\n///\n\n/// Note that when splitting the channel, the sender and receiver cannot outlive\n\n/// their channel. The following will therefore fail compilation:\n\n////\n\n/// ```compile_fail\n\n/// use embassy::channel::mpsc;\n\n/// use embassy::channel::mpsc::{Channel, WithThreadModeOnly};\n\n///\n\n/// let (sender, receiver) = {\n\n/// let mut channel = Channel::<WithThreadModeOnly, u32, 3>::with_thread_mode_only();\n\n/// mpsc::split(&mut channel)\n\n/// };\n\n/// ```\n\npub fn split<M, T, const N: usize>(\n\n channel: &mut Channel<M, T, N>,\n\n) -> (Sender<M, T, N>, Receiver<M, T, N>)\n\nwhere\n\n M: MutexKind,\n\n{\n\n let sender = Sender { channel };\n\n let receiver = Receiver { channel };\n\n channel.lock(|c| {\n\n c.register_receiver();\n\n c.register_sender();\n\n });\n\n (sender, receiver)\n\n}\n\n\n\nimpl<'ch, M, T, const N: usize> Receiver<'ch, M, T, N>\n\nwhere\n\n M: MutexKind,\n\n{\n\n /// Receives the next value for this receiver.\n", "file_path": "embassy/src/channel/mpsc.rs", "rank": 9, "score": 280717.38361411536 }, { "content": "pub fn in_thread_mode() -> bool {\n\n #[cfg(feature = \"std\")]\n\n return Some(\"main\") == std::thread::current().name();\n\n\n\n #[cfg(not(feature = \"std\"))]\n\n return cortex_m::peripheral::SCB::vect_active()\n\n == cortex_m::peripheral::scb::VectActive::ThreadMode;\n\n}\n\n\n\n/// A \"mutex\" that does nothing and cannot be shared between threads.\n\npub struct NoopMutex<T> {\n\n inner: T,\n\n}\n\n\n\nimpl<T> NoopMutex<T> {\n\n pub const fn new(value: T) -> Self {\n\n NoopMutex { inner: value }\n\n }\n\n}\n\n\n", "file_path": "embassy/src/blocking_mutex/mod.rs", "rank": 10, "score": 277374.45571949915 }, { "content": "fn make_peripheral_counts(out: &mut String, data: &BTreeMap<String, u8>) {\n\n write!(\n\n out,\n\n \"#[macro_export]\n\nmacro_rules! peripheral_count {{\n\n \"\n\n )\n\n .unwrap();\n\n for (name, count) in data {\n\n write!(out, \"({}) => ({});\\n\", name, count,).unwrap();\n\n }\n\n write!(out, \" }}\\n\").unwrap();\n\n}\n\n\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 12, "score": 259011.1078365564 }, { "content": "fn is_primary_source(path: &Path) -> bool {\n\n let mut current = path;\n\n\n\n loop {\n\n let current_file_name = current.file_name().unwrap().to_str().unwrap();\n\n if current_file_name == \"target\"\n\n || current_file_name == \"stm32-metapac-gen\"\n\n || current_file_name == \"stm32-data\"\n\n {\n\n return false;\n\n }\n\n\n\n if let Some(path) = current.parent() {\n\n current = path.into();\n\n if current == root_dir() {\n\n return true;\n\n }\n\n } else {\n\n return false;\n\n }\n\n }\n\n}\n\n\n", "file_path": "xtask/src/main.rs", "rank": 13, "score": 258163.04908110117 }, { "content": "fn make_dma_channel_counts(out: &mut String, data: &BTreeMap<String, u8>) {\n\n write!(\n\n out,\n\n \"#[macro_export]\n\nmacro_rules! dma_channels_count {{\n\n \"\n\n )\n\n .unwrap();\n\n for (name, count) in data {\n\n write!(out, \"({}) => ({});\\n\", name, count,).unwrap();\n\n }\n\n write!(out, \" }}\\n\").unwrap();\n\n}\n\n\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 14, "score": 256121.69753888514 }, { "content": "pub trait IntoClassSet<B: UsbBus, C: ClassSet<B>> {\n\n fn into_class_set(self) -> C;\n\n}\n\n\n\npub struct ClassSet1<B, C1>\n\nwhere\n\n B: UsbBus,\n\n C1: UsbClass<B>,\n\n{\n\n class: C1,\n\n _bus: PhantomData<B>,\n\n}\n\n\n\npub struct ClassSet2<B, C1, C2>\n\nwhere\n\n B: UsbBus,\n\n C1: UsbClass<B>,\n\n C2: UsbClass<B>,\n\n{\n\n class1: C1,\n", "file_path": "embassy-hal-common/src/usb/mod.rs", "rank": 15, "score": 254786.8569382241 }, { "content": "pub trait ClassSet<B: UsbBus>: Send {\n\n fn poll_all(&mut self, device: &mut UsbDevice<'_, B>) -> bool;\n\n}\n\n\n", "file_path": "embassy-hal-common/src/usb/mod.rs", "rank": 17, "score": 249462.18275416666 }, { "content": "/// Trait for a USB State that has a serial class inside\n\npub trait SerialState<'bus, 'a, B: UsbBus, I> {\n\n fn get_serial(&mut self) -> &mut UsbSerial<'bus, 'a, B>;\n\n}\n\n\n\nimpl<'bus, 'a, B: UsbBus> SerialState<'bus, 'a, B, Index0>\n\n for ClassSet1<B, UsbSerial<'bus, 'a, B>>\n\n{\n\n fn get_serial(&mut self) -> &mut UsbSerial<'bus, 'a, B> {\n\n &mut self.class\n\n }\n\n}\n\n\n\nimpl<'bus, 'a, B, C2> SerialState<'bus, 'a, B, Index0> for ClassSet2<B, UsbSerial<'bus, 'a, B>, C2>\n\nwhere\n\n B: UsbBus,\n\n C2: UsbClass<B>,\n\n{\n\n fn get_serial(&mut self) -> &mut UsbSerial<'bus, 'a, B> {\n\n &mut self.class1\n\n }\n", "file_path": "embassy-hal-common/src/usb/mod.rs", "rank": 18, "score": 241085.81822022283 }, { "content": "pub fn is_config_up() -> bool {\n\n STACK.borrow().borrow().as_ref().unwrap().config_up\n\n}\n\n\n\npub async fn run() {\n\n futures::future::poll_fn(|cx| {\n\n Stack::with(|stack| stack.poll(cx));\n\n Poll::<()>::Pending\n\n })\n\n .await\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 19, "score": 240383.583446107 }, { "content": "pub fn is_init() -> bool {\n\n STACK.borrow().borrow().is_some()\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 20, "score": 240383.583446107 }, { "content": "pub fn is_link_up() -> bool {\n\n STACK.borrow().borrow().as_ref().unwrap().link_up\n\n}\n\n\n", "file_path": "embassy-net/src/stack.rs", "rank": 21, "score": 240383.583446107 }, { "content": "fn ifreq_for(name: &str) -> ifreq {\n\n let mut ifreq = ifreq {\n\n ifr_name: [0; libc::IF_NAMESIZE],\n\n ifr_data: 0,\n\n };\n\n for (i, byte) in name.as_bytes().iter().enumerate() {\n\n ifreq.ifr_name[i] = *byte as libc::c_char\n\n }\n\n ifreq\n\n}\n\n\n", "file_path": "examples/std/src/tuntap.rs", "rank": 22, "score": 236737.97404787803 }, { "content": " pub trait DacPin<T: Instance, const C: u8>: OptionalPin {}\n\n}\n\n\n", "file_path": "embassy-stm32/src/dac/mod.rs", "rank": 23, "score": 230964.36334421916 }, { "content": "/// Initialize embassy_net.\n\n/// This function must be called from thread mode.\n\npub fn init<const ADDR: usize, const SOCK: usize, const NEIGH: usize>(\n\n device: &'static mut dyn Device,\n\n configurator: &'static mut dyn Configurator,\n\n resources: &'static mut StackResources<ADDR, SOCK, NEIGH>,\n\n) {\n\n #[cfg(feature = \"medium-ethernet\")]\n\n let medium = device.capabilities().medium;\n\n\n\n #[cfg(feature = \"medium-ethernet\")]\n\n let ethernet_addr = if medium == Medium::Ethernet {\n\n device.ethernet_address()\n\n } else {\n\n [0, 0, 0, 0, 0, 0]\n\n };\n\n\n\n let mut b = InterfaceBuilder::new(DeviceAdapter::new(device));\n\n b = b.ip_addrs(&mut resources.addresses[..]);\n\n\n\n #[cfg(feature = \"medium-ethernet\")]\n\n if medium == Medium::Ethernet {\n", "file_path": "embassy-net/src/stack.rs", "rank": 24, "score": 221195.403820251 }, { "content": "#[inline]\n\nfn rfbusyms() -> bool {\n\n let pwr = pac::PWR;\n\n unsafe { pwr.sr2().read().rfbusyms() == pac::pwr::vals::Rfbusyms::BUSY }\n\n}\n\n*/\n\n\n\n/// Sub-GHz radio peripheral\n\npub struct SubGhz<'d, Tx, Rx> {\n\n spi: Spi<'d, SUBGHZSPI, Tx, Rx>,\n\n}\n\n\n\nimpl<'d, Tx, Rx> SubGhz<'d, Tx, Rx> {\n\n fn pulse_radio_reset() {\n\n let rcc = pac::RCC;\n\n unsafe {\n\n rcc.csr().modify(|w| w.set_rfrst(true));\n\n rcc.csr().modify(|w| w.set_rfrst(false));\n\n }\n\n }\n\n\n", "file_path": "embassy-stm32/src/subghz/mod.rs", "rank": 25, "score": 213074.65526455297 }, { "content": "#[inline]\n\nfn rfbusys() -> bool {\n\n // safety: atmoic read with no side-effects\n\n //unsafe { (*pac::PWR::ptr()).sr2.read().rfbusys().is_busy() }\n\n let pwr = pac::PWR;\n\n unsafe { pwr.sr2().read().rfbusys() == pac::pwr::vals::Rfbusys::BUSY }\n\n}\n\n\n\n/*\n\n/// Returns `true` if the radio is busy or NSS is low.\n\n///\n\n/// See RM0461 Rev 4 section 5.3 page 181 \"Radio busy management\" for more\n\n/// details.\n", "file_path": "embassy-stm32/src/subghz/mod.rs", "rank": 26, "score": 213074.65526455297 }, { "content": "fn gen_memory_x(out_dir: &PathBuf, chip: &Chip) {\n\n let mut memory_x = String::new();\n\n\n\n let flash_bytes = chip\n\n .flash\n\n .regions\n\n .get(\"BANK_1\")\n\n .unwrap()\n\n .bytes\n\n .unwrap_or(chip.flash.bytes);\n\n let flash_origin = chip.flash.regions.get(\"BANK_1\").unwrap().base;\n\n\n\n let ram_bytes = chip\n\n .ram\n\n .regions\n\n .get(\"SRAM\")\n\n .unwrap()\n\n .bytes\n\n .unwrap_or(chip.ram.bytes);\n\n let ram_origin = chip.ram.regions.get(\"SRAM\").unwrap().base;\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 27, "score": 210230.5433822253 }, { "content": "pub trait DacPin<T: Instance, const C: u8>: sealed::DacPin<T, C> + 'static {}\n\n\n\nimpl<T: Instance, const C: u8> DacPin<T, C> for NoPin {}\n\nimpl<T: Instance, const C: u8> sealed::DacPin<T, C> for NoPin {}\n\n\n\ncrate::pac::peripherals!(\n\n (dac, $inst:ident) => {\n\n impl crate::dac::sealed::Instance for peripherals::$inst {\n\n fn regs() -> &'static crate::pac::dac::Dac {\n\n &crate::pac::$inst\n\n }\n\n }\n\n\n\n impl crate::dac::Instance for peripherals::$inst {}\n\n };\n\n);\n\n\n\ncrate::pac::peripheral_pins!(\n\n ($inst:ident, dac, DAC, $pin:ident, OUT1) => {\n\n impl DacPin<peripherals::$inst, 1> for peripherals::$pin {}\n", "file_path": "embassy-stm32/src/dac/mod.rs", "rank": 28, "score": 207805.64607235388 }, { "content": "pub trait Steal {\n\n unsafe fn steal() -> Self;\n\n}\n\n\n\nmacro_rules! unsafe_impl_unborrow_tuples {\n\n ($($t:ident),+) => {\n\n unsafe impl<$($t),+> Unborrow for ($($t),+)\n\n where\n\n $(\n\n $t: Unborrow<Target = $t>\n\n ),+\n\n {\n\n type Target = ($($t),+);\n\n unsafe fn unborrow(self) -> Self::Target {\n\n self\n\n }\n\n }\n\n\n\n\n\n };\n", "file_path": "embassy/src/util/mod.rs", "rank": 29, "score": 207617.37515643222 }, { "content": " pub trait TimerType {}\n\n}\n\n\n", "file_path": "embassy-nrf/src/timer.rs", "rank": 30, "score": 205809.71866409434 }, { "content": "pub trait MuxChannel: sealed::MuxChannel + super::Channel {\n\n type Mux;\n\n}\n\n\n\npac::dma_channels! {\n\n ($channel_peri:ident, $dma_peri:ident, $version:ident, $channel_num:expr, {dmamux: $dmamux:ident, dmamux_channel: $dmamux_channel:expr}) => {\n\n impl sealed::MuxChannel for peripherals::$channel_peri {\n\n const DMAMUX_CH_NUM: u8 = $dmamux_channel;\n\n const DMAMUX_REGS: pac::dmamux::Dmamux = pac::$dmamux;\n\n }\n\n impl MuxChannel for peripherals::$channel_peri {\n\n type Mux = $dmamux;\n\n }\n\n };\n\n}\n\n\n\n/// safety: must be called only once\n\npub(crate) unsafe fn init() {}\n", "file_path": "embassy-stm32/src/dma/dmamux.rs", "rank": 31, "score": 205372.16482565063 }, { "content": "/// Any object implementing this trait guarantees exclusive access to the data contained\n\n/// within the mutex for the duration of the lock.\n\n/// Adapted from https://github.com/rust-embedded/mutex-trait.\n\npub trait Mutex {\n\n /// Data protected by the mutex.\n\n type Data;\n\n\n\n fn new(data: Self::Data) -> Self;\n\n\n\n /// Creates a critical section and grants temporary access to the protected data.\n\n fn lock<R>(&self, f: impl FnOnce(&Self::Data) -> R) -> R;\n\n}\n\n\n\n/// A \"mutex\" based on critical sections\n\n///\n\n/// # Safety\n\n///\n\n/// **This Mutex is only safe on single-core systems.**\n\n///\n\n/// On multi-core systems, a `CriticalSection` **is not sufficient** to ensure exclusive access.\n\npub struct CriticalSectionMutex<T> {\n\n inner: UnsafeCell<T>,\n\n}\n", "file_path": "embassy/src/blocking_mutex/mod.rs", "rank": 32, "score": 204265.63835674775 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static crate::pac::dac::Dac;\n\n }\n\n\n", "file_path": "embassy-stm32/src/dac/mod.rs", "rank": 33, "score": 204259.71572516707 }, { "content": " pub trait Channel {}\n\n}\n\n\n", "file_path": "embassy-stm32/src/dma/mod.rs", "rank": 34, "score": 204259.71572516707 }, { "content": " pub trait Instance {\n\n fn regs(&self) -> crate::pac::usart::Usart;\n\n }\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 35, "score": 204259.71572516707 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static crate::pac::spi::Spi;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 36, "score": 204259.71572516707 }, { "content": " pub trait Common {\n\n fn regs() -> &'static crate::pac::adccommon::AdcCommon;\n\n }\n\n\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 37, "score": 204259.71572516707 }, { "content": "pub trait Configurator {\n\n fn poll(&mut self, iface: &mut Interface, sockets: &mut SocketSet, timestamp: Instant)\n\n -> Event;\n\n}\n", "file_path": "embassy-net/src/config/mod.rs", "rank": 38, "score": 204259.71572516707 }, { "content": " pub trait Instance {\n\n fn regs() -> &'static crate::pac::adc::Adc;\n\n fn common_regs() -> &'static crate::pac::adccommon::AdcCommon;\n\n }\n\n\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 39, "score": 204259.71572516707 }, { "content": "#[allow(unused)]\n\npub fn config() -> Config {\n\n let mut config = Config::default();\n\n config.rcc.sys_ck = Some(400.mhz().into());\n\n config.rcc.pll1.q_ck = Some(100.mhz().into());\n\n config.rcc.enable_dma1 = true;\n\n config\n\n}\n", "file_path": "examples/stm32h7/src/example_common.rs", "rank": 40, "score": 201628.81519272574 }, { "content": "pub trait TimerType: sealed::TimerType {}\n\n\n\npub enum Awaitable {}\n\npub enum NotAwaitable {}\n\n\n\nimpl sealed::TimerType for Awaitable {}\n\nimpl sealed::TimerType for NotAwaitable {}\n\nimpl TimerType for Awaitable {}\n\nimpl TimerType for NotAwaitable {}\n\n\n\npub struct Timer<'d, T: Instance, I: TimerType = NotAwaitable> {\n\n phantom: PhantomData<(&'d mut T, I)>,\n\n}\n\n\n\nimpl<'d, T: Instance> Timer<'d, T, Awaitable> {\n\n pub fn new_awaitable(\n\n timer: impl Unborrow<Target = T> + 'd,\n\n irq: impl Unborrow<Target = T::Interrupt> + 'd,\n\n ) -> Self {\n\n unborrow!(irq);\n", "file_path": "embassy-nrf/src/timer.rs", "rank": 41, "score": 201316.39942943476 }, { "content": "/// Trait representing a radio switch for boards using the Sx127x radio. One some\n\n/// boards, this will be a dummy implementation that does nothing.\n\npub trait RadioSwitch {\n\n fn set_tx(&mut self);\n\n fn set_rx(&mut self);\n\n}\n\n\n\n/// Semtech Sx127x radio peripheral\n\npub struct Sx127xRadio<SPI, CS, RESET, E, I, RFS>\n\nwhere\n\n SPI: Transfer<u8, Error = E> + Write<u8, Error = E> + 'static,\n\n E: 'static,\n\n CS: OutputPin + 'static,\n\n RESET: OutputPin + 'static,\n\n I: WaitForRisingEdge + 'static,\n\n RFS: RadioSwitch + 'static,\n\n{\n\n radio: LoRa<SPI, CS, RESET>,\n\n rfs: RFS,\n\n irq: I,\n\n}\n\n\n", "file_path": "embassy-lora/src/sx127x/mod.rs", "rank": 42, "score": 201067.83343120664 }, { "content": " pub trait RccPeripheral {\n\n fn frequency() -> crate::time::Hertz;\n\n fn reset();\n\n fn enable();\n\n fn disable();\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/rcc/mod.rs", "rank": 43, "score": 201057.8158414649 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Clocks;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Clocks {\n\n let rcc = pac::RCC;\n\n let (sys_clk, sw) = match cfgr.mux {\n\n ClockSrc::HSI16 => {\n\n // Enable HSI16\n\n unsafe {\n\n rcc.cr().write(|w| w.set_hsion(true));\n\n while !rcc.cr().read().hsirdy() {}\n\n }\n\n\n\n (HSI_FREQ, 0x01)\n\n }\n\n ClockSrc::HSE32 => {\n\n // Enable HSE32\n", "file_path": "embassy-stm32/src/rcc/wl5x/mod.rs", "rank": 44, "score": 198006.48476831737 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Clocks;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Clocks {\n\n let rcc = pac::RCC;\n\n let (sys_clk, sw) = match cfgr.mux {\n\n ClockSrc::HSI16 => {\n\n // Enable HSI16\n\n unsafe {\n\n rcc.cr().write(|w| w.set_hsion(true));\n\n while !rcc.cr().read().hsirdy() {}\n\n }\n\n\n\n (HSI_FREQ, 0x01)\n\n }\n\n ClockSrc::HSE(freq) => {\n\n // Enable HSE\n", "file_path": "embassy-stm32/src/rcc/l4/mod.rs", "rank": 45, "score": 198006.48476831737 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Clocks;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n // `cfgr` is almost always a constant, so make sure it can be constant-propagated properly by\n\n // marking this function and all `Config` constructors and setters as `#[inline]`.\n\n // This saves ~900 Bytes for the `pwr.rs` example.\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Clocks {\n\n let rcc = pac::RCC;\n\n let (sys_clk, sw) = match cfgr.mux {\n\n ClockSrc::MSI(range) => {\n\n // Set MSI range\n\n unsafe {\n\n rcc.icscr().write(|w| w.set_msirange(range.into()));\n\n }\n\n\n\n // Enable MSI\n\n unsafe {\n", "file_path": "embassy-stm32/src/rcc/l1/mod.rs", "rank": 46, "score": 198006.48476831737 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Clocks;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Clocks {\n\n let rcc = pac::RCC;\n\n let (sys_clk, sw) = match cfgr.mux {\n\n ClockSrc::HSI16(div) => {\n\n // Enable HSI16\n\n let div: u8 = div.into();\n\n unsafe {\n\n rcc.cr().write(|w| {\n\n w.set_hsidiv(div);\n\n w.set_hsion(true)\n\n });\n\n while !rcc.cr().read().hsirdy() {}\n\n }\n\n\n", "file_path": "embassy-stm32/src/rcc/g0/mod.rs", "rank": 47, "score": 198006.48476831737 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Clocks;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n // `cfgr` is almost always a constant, so make sure it can be constant-propagated properly by\n\n // marking this function and all `Config` constructors and setters as `#[inline]`.\n\n // This saves ~900 Bytes for the `pwr.rs` example.\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Clocks {\n\n let rcc = pac::RCC;\n\n let (sys_clk, sw) = match cfgr.mux {\n\n ClockSrc::MSI(range) => {\n\n // Set MSI range\n\n unsafe {\n\n rcc.icscr().write(|w| w.set_msirange(range.into()));\n\n }\n\n\n\n // Enable MSI\n\n unsafe {\n", "file_path": "embassy-stm32/src/rcc/l0/mod.rs", "rank": 48, "score": 198006.48476831737 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Clocks;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Clocks {\n\n let rcc = pac::RCC;\n\n let (sys_clk, sw) = match cfgr.mux {\n\n ClockSrc::HSI16 => {\n\n // Enable HSI16\n\n unsafe {\n\n rcc.cr().write(|w| w.set_hsion(true));\n\n while !rcc.cr().read().hsirdy() {}\n\n }\n\n\n\n (HSI_FREQ, 0x01)\n\n }\n\n ClockSrc::HSE(freq) => {\n\n // Enable HSE\n", "file_path": "embassy-stm32/src/rcc/wb/mod.rs", "rank": 49, "score": 198006.48476831737 }, { "content": "fn find_reg<'c>(rcc: &'c ir::IR, reg_regex: &str, field_name: &str) -> Option<(&'c str, &'c str)> {\n\n let reg_regex = Regex::new(reg_regex).unwrap();\n\n\n\n for (name, fieldset) in &rcc.fieldsets {\n\n // Workaround for some families that prefix register aliases with C1_, which does\n\n // not help matching for clock name.\n\n if !name.starts_with(\"C1\") && !name.starts_with(\"C2\") && reg_regex.is_match(name) {\n\n for field in &fieldset.fields {\n\n if field_name == field.name {\n\n return Some((name.as_str(), field.name.as_str()));\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "stm32-metapac-gen/src/lib.rs", "rank": 50, "score": 194389.48827907062 }, { "content": " pub trait Instance: RccPeripheral {\n\n fn regs() -> crate::pac::i2c::I2c;\n\n\n\n fn state_number() -> usize;\n\n }\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 51, "score": 194079.14542478486 }, { "content": "/// Creates a future which copies all the bytes from one object to another.\n\n///\n\n/// The returned future will copy all the bytes read from this `AsyncBufRead` into the\n\n/// `writer` specified. This future will only complete once the `reader` has hit\n\n/// EOF and all bytes have been written to and flushed from the `writer`\n\n/// provided.\n\n///\n\n/// On success the number of bytes is returned.\n\n///\n\n/// # Examples\n\n///\n\n/// ``` ignore\n\n/// # futures::executor::block_on(async {\n\n/// use futures::io::{self, AsyncWriteExt, Cursor};\n\n///\n\n/// let reader = Cursor::new([1, 2, 3, 4]);\n\n/// let mut writer = Cursor::new(vec![0u8; 5]);\n\n///\n\n/// let bytes = io::copy_buf(reader, &mut writer).await?;\n\n/// writer.close().await?;\n\n///\n\n/// assert_eq!(bytes, 4);\n\n/// assert_eq!(writer.into_inner(), [1, 2, 3, 4, 0]);\n\n/// # Ok::<(), Box<dyn std::error::Error>>(()) }).unwrap();\n\n/// ```\n\npub fn copy_buf<R, W>(reader: R, writer: &mut W) -> CopyBuf<'_, R, W>\n\nwhere\n\n R: AsyncBufRead,\n\n W: AsyncWrite + Unpin + ?Sized,\n\n{\n\n CopyBuf {\n\n reader,\n\n writer,\n\n amt: 0,\n\n }\n\n}\n\n\n\n/// Future for the [`copy_buf()`] function.\n\n#[pin_project]\n\n#[derive(Debug)]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct CopyBuf<'a, R, W: ?Sized> {\n\n #[pin]\n\n reader: R,\n\n writer: &'a mut W,\n", "file_path": "embassy/src/io/util/copy_buf.rs", "rank": 52, "score": 192441.7499691627 }, { "content": "pub trait Channel: sealed::Channel {\n\n type ReadFuture<'a>: Future<Output = ()> + 'a\n\n where\n\n Self: 'a;\n\n\n\n type WriteFuture<'a>: Future<Output = ()> + 'a\n\n where\n\n Self: 'a;\n\n\n\n fn read<'a>(\n\n &'a mut self,\n\n request: Request,\n\n src: *mut u8,\n\n buf: &'a mut [u8],\n\n ) -> Self::ReadFuture<'a>;\n\n\n\n fn write<'a>(\n\n &'a mut self,\n\n request: Request,\n\n buf: &'a [u8],\n", "file_path": "embassy-stm32/src/dma/mod.rs", "rank": 53, "score": 190595.82391260364 }, { "content": " pub trait TXD1Pin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 54, "score": 188363.4034966691 }, { "content": " pub trait TXD0Pin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 55, "score": 188363.4034966691 }, { "content": "pub trait AsyncWriteExt: AsyncWrite {\n\n fn write_all<'a>(&'a mut self, buf: &'a [u8]) -> WriteAll<'a, Self>\n\n where\n\n Self: Unpin,\n\n {\n\n WriteAll::new(self, buf)\n\n }\n\n\n\n fn write_byte(&mut self, byte: u8) -> WriteByte<Self>\n\n where\n\n Self: Unpin,\n\n {\n\n WriteByte::new(self, byte)\n\n }\n\n\n\n fn write<'a>(&'a mut self, buf: &'a [u8]) -> Write<'a, Self>\n\n where\n\n Self: Unpin,\n\n {\n\n Write::new(self, buf)\n\n }\n\n}\n\n\n\nimpl<R: AsyncWrite + ?Sized> AsyncWriteExt for R {}\n", "file_path": "embassy/src/io/util/mod.rs", "rank": 56, "score": 188363.4034966691 }, { "content": " pub trait CRSPin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 57, "score": 188363.4034966691 }, { "content": " pub trait RXD1Pin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 58, "score": 188363.4034966691 }, { "content": " pub trait RXD0Pin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 59, "score": 188363.4034966691 }, { "content": " pub trait MDIOPin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 60, "score": 188363.4034966691 }, { "content": " pub trait MDCPin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 61, "score": 188363.4034966691 }, { "content": " pub trait AdcPin<T: Instance> {\n\n fn channel(&self) -> u8;\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 62, "score": 187674.60429997335 }, { "content": " pub trait TxDma<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 63, "score": 187674.60429997335 }, { "content": " pub trait RxDma<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 64, "score": 187674.60429997335 }, { "content": " pub trait TxDma<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 65, "score": 187674.60429997335 }, { "content": " pub trait RxDma<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 66, "score": 187674.60429997335 }, { "content": " pub trait RefClkPin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 67, "score": 185687.51087673652 }, { "content": " pub trait TXEnPin: GpioPin {\n\n fn configure(&mut self);\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 68, "score": 185687.51087673652 }, { "content": " pub trait RxDmaChannel<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n}\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 69, "score": 184880.0819844879 }, { "content": " pub trait TxDmaChannel<T: Instance> {\n\n fn request(&self) -> dma::Request;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 70, "score": 184880.0819844879 }, { "content": "pub trait Instance: sealed::Instance + 'static {\n\n type Interrupt: Interrupt;\n\n}\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 71, "score": 184560.2153293198 }, { "content": "pub trait Common: sealed::Common + 'static {}\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 72, "score": 184560.2153293198 }, { "content": "pub trait Instance: sealed::Instance + 'static {}\n\n\n", "file_path": "embassy-stm32/src/dac/mod.rs", "rank": 73, "score": 184560.2153293198 }, { "content": "pub trait Instance: sealed::Instance + 'static {}\n", "file_path": "embassy-stm32/src/adc/mod.rs", "rank": 74, "score": 184560.2153293198 }, { "content": "pub trait AsyncBufReadExt: AsyncBufRead {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<Result<usize>>\n\n where\n\n Self: Unpin,\n\n {\n\n let mut this = &mut *self;\n\n let rbuf = ready!(Pin::new(&mut this).poll_fill_buf(cx))?;\n\n let n = min(buf.len(), rbuf.len());\n\n buf[..n].copy_from_slice(&rbuf[..n]);\n\n Pin::new(&mut this).consume(n);\n\n Poll::Ready(Ok(n))\n\n }\n\n\n\n fn read_while<'a, F: Fn(u8) -> bool>(\n\n &'a mut self,\n\n buf: &'a mut [u8],\n", "file_path": "embassy/src/io/util/mod.rs", "rank": 75, "score": 183122.85106676276 }, { "content": " pub trait TxPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 76, "score": 181765.69301383433 }, { "content": " pub trait CtsPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 77, "score": 181765.69301383433 }, { "content": " pub trait RtsPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 78, "score": 181765.69301383433 }, { "content": "pub trait Instance: sealed::Instance + RccPeripheral {}\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 79, "score": 181765.69301383433 }, { "content": " pub trait RxPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 80, "score": 181765.69301383433 }, { "content": " pub trait CkPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n\n\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 81, "score": 181765.69301383433 }, { "content": "pub trait Instance: sealed::Instance + RccPeripheral {\n\n type Interrupt: Interrupt;\n\n}\n", "file_path": "embassy-stm32/src/usart/mod.rs", "rank": 82, "score": 181765.69301383433 }, { "content": " pub trait SclPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 83, "score": 181765.69301383433 }, { "content": " pub trait SdaPin<T: Instance>: Pin {\n\n fn af_num(&self) -> u8;\n\n }\n\n\n", "file_path": "embassy-stm32/src/i2c/mod.rs", "rank": 84, "score": 181765.69301383433 }, { "content": " pub trait SckPin<T: Instance>: OptionalPin {\n\n fn af_num(&self) -> u8;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 85, "score": 179089.80039390174 }, { "content": " pub trait MosiPin<T: Instance>: OptionalPin {\n\n fn af_num(&self) -> u8;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 86, "score": 179089.80039390174 }, { "content": "pub trait RccPeripheral: sealed::RccPeripheral + 'static {}\n\n\n\ncrate::pac::peripheral_rcc!(\n\n ($inst:ident, $clk:ident, $enable:ident, $reset:ident, $perien:ident, $perirst:ident) => {\n\n impl sealed::RccPeripheral for peripherals::$inst {\n\n fn frequency() -> crate::time::Hertz {\n\n critical_section::with(|_| {\n\n unsafe {\n\n let freqs = get_freqs();\n\n freqs.$clk\n\n }\n\n })\n\n }\n\n fn enable() {\n\n critical_section::with(|_| {\n\n unsafe {\n\n crate::pac::RCC.$enable().modify(|w| w.$perien(true));\n\n }\n\n })\n\n }\n", "file_path": "embassy-stm32/src/rcc/mod.rs", "rank": 87, "score": 179089.80039390174 }, { "content": " pub trait MisoPin<T: Instance>: OptionalPin {\n\n fn af_num(&self) -> u8;\n\n }\n\n\n", "file_path": "embassy-stm32/src/spi/mod.rs", "rank": 88, "score": 179089.80039390174 }, { "content": "pub trait TXD1Pin: sealed::TXD1Pin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 89, "score": 176525.14058392798 }, { "content": "pub trait TXD0Pin: sealed::TXD0Pin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 90, "score": 176525.14058392798 }, { "content": "pub trait RXD1Pin: sealed::RXD1Pin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 91, "score": 176525.14058392798 }, { "content": "pub trait MDIOPin: sealed::MDIOPin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 92, "score": 176525.14058392798 }, { "content": "pub trait CRSPin: sealed::CRSPin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 93, "score": 176525.14058392798 }, { "content": "pub trait MDCPin: sealed::MDCPin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 94, "score": 176525.14058392798 }, { "content": "pub trait RXD0Pin: sealed::RXD0Pin + 'static {}\n\n\n", "file_path": "embassy-stm32/src/eth/v2/mod.rs", "rank": 95, "score": 176525.14058392798 }, { "content": "/// Random-number Generator\n\npub trait Rng {\n\n type Error;\n\n\n\n #[rustfmt::skip]\n\n type RngFuture<'a>: Future<Output = Result<(), Self::Error> > + 'a\n\n where\n\n Self: 'a;\n\n\n\n /// Completely fill the provided buffer with random bytes.\n\n ///\n\n /// May result in delays if entropy is exhausted prior to completely\n\n /// filling the buffer. Upon completion, the buffer will be completely\n\n /// filled or an error will have been reported.\n\n fn fill_bytes<'a>(&'a mut self, dest: &'a mut [u8]) -> Self::RngFuture<'a>;\n\n}\n\n\n\npub struct Random<T: Rng> {\n\n rng: T,\n\n}\n\n\n", "file_path": "embassy-traits/src/rng.rs", "rank": 97, "score": 172264.79064892017 }, { "content": "pub trait Delay {\n\n type DelayFuture<'a>: Future<Output = ()> + 'a;\n\n\n\n /// Future that completes after now + millis\n\n fn delay_ms<'a>(&'a mut self, millis: u64) -> Self::DelayFuture<'a>;\n\n\n\n /// Future that completes after now + micros\n\n fn delay_us<'a>(&'a mut self, micros: u64) -> Self::DelayFuture<'a>;\n\n}\n", "file_path": "embassy-traits/src/delay.rs", "rank": 98, "score": 172264.79064892017 }, { "content": "pub trait Flash {\n\n type ReadFuture<'a>: Future<Output = Result<(), Error>>\n\n where\n\n Self: 'a;\n\n\n\n type WriteFuture<'a>: Future<Output = Result<(), Error>>\n\n where\n\n Self: 'a;\n\n\n\n type ErasePageFuture<'a>: Future<Output = Result<(), Error>>\n\n where\n\n Self: 'a;\n\n\n\n /// Reads data from the flash device.\n\n ///\n\n /// address must be a multiple of self.read_size().\n\n /// buf.len() must be a multiple of self.read_size().\n\n fn read<'a>(&'a mut self, address: usize, buf: &'a mut [u8]) -> Self::ReadFuture<'a>;\n\n\n\n /// Writes data to the flash device.\n", "file_path": "embassy-traits/src/flash.rs", "rank": 99, "score": 172264.79064892017 } ]
Rust
identity-account/src/account/builder.rs
charlesthompson3/identity.rs
713140734e86a4b11f85921009b491ff28b5cd10
#[cfg(feature = "stronghold")] use std::path::PathBuf; use std::sync::Arc; #[cfg(feature = "stronghold")] use zeroize::Zeroize; use identity_account_storage::storage::MemStore; use identity_account_storage::storage::Storage; #[cfg(feature = "stronghold")] use identity_account_storage::storage::Stronghold; use identity_iota::tangle::Client; use identity_iota::tangle::ClientBuilder; use identity_iota_core::did::IotaDID; use super::config::AccountConfig; use super::config::AccountSetup; use super::config::AutoSave; use crate::account::Account; use crate::error::Result; use crate::identity::IdentitySetup; #[derive(Debug)] pub enum AccountStorage { Memory, #[cfg(feature = "stronghold")] Stronghold(PathBuf, Option<String>, Option<bool>), Custom(Arc<dyn Storage>), } #[derive(Debug)] pub struct AccountBuilder { config: AccountConfig, storage_template: Option<AccountStorage>, storage: Option<Arc<dyn Storage>>, client_builder: Option<ClientBuilder>, client: Option<Arc<Client>>, } impl AccountBuilder { pub fn new() -> Self { Self { config: AccountConfig::new(), storage_template: Some(AccountStorage::Memory), storage: Some(Arc::new(MemStore::new())), client_builder: None, client: None, } } #[must_use] pub fn autosave(mut self, value: AutoSave) -> Self { self.config = self.config.autosave(value); self } #[must_use] pub fn autopublish(mut self, value: bool) -> Self { self.config = self.config.autopublish(value); self } #[cfg(test)] #[must_use] pub(crate) fn testmode(mut self, value: bool) -> Self { self.config = self.config.testmode(value); self } #[must_use] pub fn storage(mut self, value: AccountStorage) -> Self { self.storage_template = Some(value); self } async fn get_storage(&mut self) -> Result<Arc<dyn Storage>> { match self.storage_template.take() { Some(AccountStorage::Memory) => { let storage = Arc::new(MemStore::new()); self.storage = Some(storage); } #[cfg(feature = "stronghold")] Some(AccountStorage::Stronghold(snapshot, password, dropsave)) => { let passref: Option<&str> = password.as_deref(); let adapter: Stronghold = Stronghold::new(&snapshot, passref, dropsave).await?; if let Some(mut password) = password { password.zeroize(); } let storage = Arc::new(adapter); self.storage = Some(storage); } Some(AccountStorage::Custom(storage)) => { self.storage = Some(storage); } None => (), }; Ok(Arc::clone(self.storage.as_ref().unwrap())) } #[must_use] pub fn client(mut self, client: Arc<Client>) -> Self { self.client = Some(client); self.client_builder = None; self } #[must_use] pub fn client_builder(mut self, client_builder: ClientBuilder) -> Self { self.client = None; self.client_builder = Some(client_builder); self } async fn get_or_build_client(&mut self) -> Result<Arc<Client>> { if let Some(client) = &self.client { Ok(Arc::clone(client)) } else if let Some(client_builder) = self.client_builder.take() { let client: Arc<Client> = Arc::new(client_builder.build().await?); self.client = Some(Arc::clone(&client)); Ok(client) } else { let client: Arc<Client> = Arc::new(Client::new().await?); self.client = Some(Arc::clone(&client)); Ok(client) } } async fn build_setup(&mut self) -> Result<AccountSetup> { let client: Arc<Client> = self.get_or_build_client().await?; Ok(AccountSetup::new( self.get_storage().await?, client, self.config.clone(), )) } pub async fn create_identity(&mut self, input: IdentitySetup) -> Result<Account> { let setup: AccountSetup = self.build_setup().await?; Account::create_identity(setup, input).await } pub async fn load_identity(&mut self, did: IotaDID) -> Result<Account> { let setup: AccountSetup = self.build_setup().await?; Account::load_identity(setup, did).await } } impl Default for AccountBuilder { fn default() -> Self { Self::new() } }
#[cfg(feature = "stronghold")] use std::path::PathBuf; use std::sync::Arc; #[cfg(feature = "stronghold")] use zeroize::Zeroize; use identity_account_storage::storage::MemStore; use identity_account_storage::storage::Storage; #[cfg(feature = "stronghold")] use identity_account_storage::storage::Stronghold; use identity_iota::tangle::Client; use identity_iota::tangle::ClientBuilder; use identity_iota_core::did::IotaDID; use super::config::AccountConfig; use super::config::AccountSetup; use super::config::AutoSave; use crate::account::Account; use crate::error::Result; use crate::identity::IdentitySetup; #[derive(Debug)] pub enum AccountStorage { Memory, #[cfg(feature = "stronghold")] Stronghold(PathBuf, Option<String>, Option<bool>), Custom(Arc<dyn Storage>), } #[derive(Debug)] pub struct AccountBuilder { config: AccountConfig, storage_template: Option<AccountStorage>, storage: Option<Arc<dyn Storage>>, client_builder: Option<ClientBuilder>, client: Option<Arc<Client>>, } impl AccountBuilder { pub fn new() -> Self { Self { config: A
#[must_use] pub fn autosave(mut self, value: AutoSave) -> Self { self.config = self.config.autosave(value); self } #[must_use] pub fn autopublish(mut self, value: bool) -> Self { self.config = self.config.autopublish(value); self } #[cfg(test)] #[must_use] pub(crate) fn testmode(mut self, value: bool) -> Self { self.config = self.config.testmode(value); self } #[must_use] pub fn storage(mut self, value: AccountStorage) -> Self { self.storage_template = Some(value); self } async fn get_storage(&mut self) -> Result<Arc<dyn Storage>> { match self.storage_template.take() { Some(AccountStorage::Memory) => { let storage = Arc::new(MemStore::new()); self.storage = Some(storage); } #[cfg(feature = "stronghold")] Some(AccountStorage::Stronghold(snapshot, password, dropsave)) => { let passref: Option<&str> = password.as_deref(); let adapter: Stronghold = Stronghold::new(&snapshot, passref, dropsave).await?; if let Some(mut password) = password { password.zeroize(); } let storage = Arc::new(adapter); self.storage = Some(storage); } Some(AccountStorage::Custom(storage)) => { self.storage = Some(storage); } None => (), }; Ok(Arc::clone(self.storage.as_ref().unwrap())) } #[must_use] pub fn client(mut self, client: Arc<Client>) -> Self { self.client = Some(client); self.client_builder = None; self } #[must_use] pub fn client_builder(mut self, client_builder: ClientBuilder) -> Self { self.client = None; self.client_builder = Some(client_builder); self } async fn get_or_build_client(&mut self) -> Result<Arc<Client>> { if let Some(client) = &self.client { Ok(Arc::clone(client)) } else if let Some(client_builder) = self.client_builder.take() { let client: Arc<Client> = Arc::new(client_builder.build().await?); self.client = Some(Arc::clone(&client)); Ok(client) } else { let client: Arc<Client> = Arc::new(Client::new().await?); self.client = Some(Arc::clone(&client)); Ok(client) } } async fn build_setup(&mut self) -> Result<AccountSetup> { let client: Arc<Client> = self.get_or_build_client().await?; Ok(AccountSetup::new( self.get_storage().await?, client, self.config.clone(), )) } pub async fn create_identity(&mut self, input: IdentitySetup) -> Result<Account> { let setup: AccountSetup = self.build_setup().await?; Account::create_identity(setup, input).await } pub async fn load_identity(&mut self, did: IotaDID) -> Result<Account> { let setup: AccountSetup = self.build_setup().await?; Account::load_identity(setup, did).await } } impl Default for AccountBuilder { fn default() -> Self { Self::new() } }
ccountConfig::new(), storage_template: Some(AccountStorage::Memory), storage: Some(Arc::new(MemStore::new())), client_builder: None, client: None, } }
function_block-function_prefixed
[ { "content": "// implement Debug on the Enum from the `InputModel`.\n\npub fn impl_debug_enum(input: &InputModel) -> TokenStream {\n\n // collect appropriate data and generate param declarations.\n\n let diff: &Ident = input.diff();\n\n let evariants: &Vec<EVariant> = input.e_variants();\n\n\n\n let param_decls: &Punctuated<GenericParam, Comma> = input.param_decls();\n\n let params: &Punctuated<Ident, Comma> = input.params();\n\n\n\n let param_decls: Vec<TokenStream> = param_decls\n\n .iter()\n\n .map(|p| match p {\n\n GenericParam::Lifetime(life) => quote! { #life },\n\n GenericParam::Const(cp) => quote! { #cp },\n\n GenericParam::Type(typ) => {\n\n let S: &Ident = &typ.ident;\n\n\n\n let bounds: Vec<TokenStream> = typ.bounds.iter().map(|tb| quote! { #tb }).collect();\n\n\n\n quote! {\n\n #S: identity_diff::Diff\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 0, "score": 259427.8188133833 }, { "content": "/// derive the `Diff` trait for incoming Enum in `InputModel`.\n\npub fn impl_diff_enum(input: &InputModel) -> TokenStream {\n\n // collect appropriate data and generate param declarations.\n\n let name: &Ident = input.name();\n\n let diff: &Ident = input.diff();\n\n let evariants: &Vec<EVariant> = input.e_variants();\n\n\n\n let param_decls = input.param_decls();\n\n let params = input.params();\n\n\n\n let clause: &WhereClause = input.clause();\n\n\n\n let param_decls: Vec<TokenStream> = param_decls\n\n .iter()\n\n .map(|p| match p {\n\n GenericParam::Lifetime(life) => quote! { #life },\n\n GenericParam::Const(cp) => quote! { #cp },\n\n GenericParam::Type(typ) => {\n\n let S: &Ident = &typ.ident;\n\n\n\n let bounds: Vec<TokenStream> = typ.bounds.iter().map(|tb| quote! { #tb }).collect();\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 1, "score": 259427.75222665197 }, { "content": "pub fn default_hint() -> RecordHint {\n\n // unwrap is okay, the hint is <= 24 bytes\n\n RecordHint::new([0; 24]).unwrap()\n\n}\n", "file_path": "identity-account-storage/src/stronghold/hint.rs", "rank": 2, "score": 253411.2908978716 }, { "content": "pub fn impl_from_into(input: &InputModel) -> TokenStream {\n\n if input.from_into() {\n\n let diff = input.diff();\n\n let param_decls = input.param_decls();\n\n let params = input.params();\n\n let clause = input.clause();\n\n let name = input.name();\n\n let param_decls: Vec<TokenStream> = param_decls\n\n .iter()\n\n .map(|tp_decl| match tp_decl {\n\n GenericParam::Lifetime(life) => quote! { #life },\n\n GenericParam::Const(consts) => quote! { #consts},\n\n GenericParam::Type(typ) => {\n\n let S: &Ident = &typ.ident;\n\n\n\n let bounds: Vec<TokenStream> = typ\n\n .bounds\n\n .iter()\n\n .map(|bound| {\n\n quote! {\n", "file_path": "identity-diff/derive/src/impls/structs.rs", "rank": 3, "score": 252351.32214819832 }, { "content": "/// implement Diff for the struct.\n\npub fn diff_impl(input: &InputModel) -> TokenStream {\n\n // collect relevant fields and generate param declarations.\n\n let svariant = input.s_variant();\n\n let name = input.name();\n\n let diff = input.diff();\n\n let fields = input.fields();\n\n let param_decls = input.param_decls();\n\n let params = input.params();\n\n let clause = input.clause();\n\n let param_decls: Vec<TokenStream> = param_decls\n\n .iter()\n\n .map(|tp_decl| match tp_decl {\n\n GenericParam::Lifetime(life) => quote! { #life },\n\n GenericParam::Const(consts) => quote! { #consts},\n\n GenericParam::Type(typ) => {\n\n let S: &Ident = &typ.ident;\n\n\n\n let bounds: Vec<TokenStream> = typ\n\n .bounds\n\n .iter()\n", "file_path": "identity-diff/derive/src/impls/structs.rs", "rank": 4, "score": 249657.57911313308 }, { "content": "/// Implement the Debug trait on a derived struct.\n\npub fn debug_impl(input: &InputModel) -> TokenStream {\n\n // collect relevant fields.\n\n let svariant = input.s_variant();\n\n let diff = input.diff();\n\n let fields = input.fields();\n\n let param_decls = input.param_decls();\n\n let params = input.params();\n\n let clause = input.clause();\n\n\n\n // setup param declarations.\n\n let param_decls: Vec<TokenStream> = param_decls\n\n .iter()\n\n .map(|tp_decl| match tp_decl {\n\n GenericParam::Lifetime(life) => quote! { #life },\n\n GenericParam::Const(consts) => quote! { #consts},\n\n GenericParam::Type(typ) => {\n\n let S: &Ident = &typ.ident;\n\n\n\n let bounds: Vec<TokenStream> = typ\n\n .bounds\n", "file_path": "identity-diff/derive/src/impls/structs.rs", "rank": 5, "score": 249657.51106545446 }, { "content": "/// derive a Diff type Enum from an incoming `InputModel`.\n\npub fn derive_diff_enum(input: &InputModel) -> TokenStream {\n\n // collect appropriate data and generate param declarations.\n\n let diff: &Ident = input.diff();\n\n let evariants: &Vec<EVariant> = input.e_variants();\n\n\n\n let serde_attrs = if input.from_into() {\n\n let name = input.name();\n\n let stype = quote!(#name).to_string();\n\n\n\n quote! {\n\n #[serde(from=#stype, into=#stype)]\n\n }\n\n } else {\n\n quote! {}\n\n };\n\n\n\n let param_decls: &Punctuated<GenericParam, Comma> = input.param_decls();\n\n\n\n let clause = quote! {};\n\n\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 6, "score": 247097.07927423256 }, { "content": "/// Derive the difference struct code from the `InputModel`\n\npub fn derive_diff_struct(input: &InputModel) -> TokenStream {\n\n // setup the relevant fields.\n\n let svariant = input.s_variant();\n\n let diff = input.diff();\n\n let fields = input.fields();\n\n let param_decls = input.param_decls();\n\n let clause = input.clause();\n\n let serde_attrs = if input.from_into() {\n\n let name = input.name();\n\n let stype = quote!(#name).to_string();\n\n\n\n quote! {\n\n #[serde(from=#stype, into=#stype)]\n\n }\n\n } else {\n\n quote! {}\n\n };\n\n\n\n // set the param declarations.\n\n let param_decls: Vec<TokenStream> = param_decls\n", "file_path": "identity-diff/derive/src/impls/structs.rs", "rank": 7, "score": 247053.61225930278 }, { "content": "pub fn hint<T>(data: &T) -> Option<RecordHint>\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n RecordHint::new(data.as_ref())\n\n}\n\n\n", "file_path": "identity-account-storage/src/stronghold/hint.rs", "rank": 8, "score": 231660.36284570926 }, { "content": "pub fn encode_b64(data: impl AsRef<[u8]>) -> String {\n\n base64::encode_config(data.as_ref(), base64::URL_SAFE_NO_PAD)\n\n}\n\n\n", "file_path": "libjose/src/utils/base64.rs", "rank": 9, "score": 215036.0684075478 }, { "content": "struct Listener(Box<dyn FnMut(&Path, &SnapshotStatus) + Send>);\n\n\n", "file_path": "identity-account-storage/src/stronghold/context.rs", "rank": 10, "score": 207332.76535730626 }, { "content": "pub fn parse_utf8(slice: &(impl AsRef<[u8]> + ?Sized)) -> Result<&str> {\n\n str::from_utf8(slice.as_ref()).map_err(Error::InvalidUtf8)\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 11, "score": 203654.6358069453 }, { "content": "pub fn decode_b64(data: impl AsRef<[u8]>) -> Result<Vec<u8>> {\n\n base64::decode_config(data.as_ref(), base64::URL_SAFE_NO_PAD).map_err(Into::into)\n\n}\n\n\n", "file_path": "libjose/src/utils/base64.rs", "rank": 12, "score": 203654.6358069453 }, { "content": "pub fn encode_b64_into(data: impl AsRef<[u8]>, buffer: &mut String) {\n\n base64::encode_config_buf(data.as_ref(), base64::URL_SAFE_NO_PAD, buffer)\n\n}\n\n\n", "file_path": "libjose/src/utils/base64.rs", "rank": 13, "score": 203654.6358069453 }, { "content": "pub fn decode_b64_json<T>(data: impl AsRef<[u8]>) -> Result<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n decode_b64(data).and_then(|data| from_slice(&data).map_err(Into::into))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn smoke() {\n\n assert!(decode_b64(encode_b64(b\"libjose\")).is_ok());\n\n }\n\n}\n", "file_path": "libjose/src/utils/base64.rs", "rank": 14, "score": 201584.56304217063 }, { "content": "#[derive(Default, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ConfigOptions {\n\n network: Option<Network>,\n\n encoding: Option<WasmDIDMessageEncoding>,\n\n nodes: Option<Vec<String>>,\n\n primary_node: Option<NodeAuth>,\n\n primary_pow_node: Option<NodeAuth>,\n\n permanodes: Option<Vec<NodeAuth>>,\n\n node_auth: Option<Vec<NodeAuth>>,\n\n node_sync_interval: Option<u32>,\n\n node_sync_disabled: Option<bool>,\n\n quorum: Option<bool>,\n\n quorum_size: Option<usize>,\n\n quorum_threshold: Option<usize>,\n\n local_pow: Option<bool>,\n\n fallback_to_local_pow: Option<bool>,\n\n tips_interval: Option<u32>,\n\n request_timeout: Option<u32>,\n\n}\n\n\n\n#[wasm_bindgen(typescript_custom_section)]\n\nconst I_CLIENT_CONFIG: &'static str = r#\"\n\n/** {@link Client} configuration options. */\n", "file_path": "bindings/wasm/src/tangle/client_config.rs", "rank": 15, "score": 201457.42038439435 }, { "content": "#[test]\n\nfn test_struct_enum() {\n\n let t = StructEnum::A { x: 100 };\n\n\n\n let t2 = StructEnum::B { y: 200 };\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(\n\n DiffStructEnum::B {\n\n y: Some(200_usize.into_diff().unwrap())\n\n },\n\n diff\n\n );\n\n\n\n let res = StructEnum::from_diff(diff).unwrap();\n\n\n\n assert_eq!(StructEnum::B { y: 200 }, res);\n\n}\n\n\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 16, "score": 195014.62581474095 }, { "content": "pub fn decode_b64_into(data: impl AsRef<[u8]>, buffer: &mut Vec<u8>) -> Result<()> {\n\n base64::decode_config_buf(data.as_ref(), base64::URL_SAFE_NO_PAD, buffer).map_err(Into::into)\n\n}\n\n\n", "file_path": "libjose/src/utils/base64.rs", "rank": 17, "score": 193785.6744122125 }, { "content": "struct Locations;\n\n\n\nimpl Locations {\n\n fn index() -> Location {\n\n Location::generic(\"__index\", \"\")\n\n }\n\n\n\n fn record(record_tag: &[u8]) -> Location {\n\n Location::generic(format!(\"__record:{}\", encode_b58(record_tag)), \"\")\n\n }\n\n}\n\n\n\n// =============================================================================\n\n// =============================================================================\n\n\n\npub type RecordTag = Output<Blake2b256>;\n\n\n\npub struct RecordIndex(Vec<u8>);\n\n\n\nimpl RecordIndex {\n", "file_path": "identity-account-storage/src/stronghold/records.rs", "rank": 18, "score": 192511.2644766785 }, { "content": "struct Runtime {\n\n event_listeners: Mutex<Vec<Listener>>,\n\n password_clear: Mutex<Duration>,\n\n password_store: Mutex<PasswordMap>,\n\n}\n\n\n\nimpl Runtime {\n\n const PASSWORD_CLEAR: Duration = Duration::from_millis(0);\n\n\n\n fn new() -> Self {\n\n Self {\n\n event_listeners: Mutex::new(Vec::new()),\n\n password_clear: Mutex::new(Self::PASSWORD_CLEAR),\n\n password_store: Mutex::new(PasswordMap::new()),\n\n }\n\n }\n\n\n\n fn on_change<T>(&self, listener: T) -> IotaStrongholdResult<()>\n\n where\n\n T: FnMut(&Path, &SnapshotStatus) + Send + 'static,\n", "file_path": "identity-account-storage/src/stronghold/context.rs", "rank": 19, "score": 192511.2644766785 }, { "content": "fn location_state() -> Location {\n\n Location::generic(\"$state\", Vec::new())\n\n}\n\n\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 20, "score": 192232.63895165687 }, { "content": "pub fn derive_encryption_key(password: &str) -> EncryptionKey {\n\n let mut output: EncryptionKey = Default::default();\n\n\n\n // safe to unwrap (rounds > 0)\n\n PBKDF2_HMAC_SHA512(password.as_bytes(), PBKDF_SALT, PBKDF_ITER, &mut output).unwrap();\n\n\n\n output\n\n}\n", "file_path": "identity-account-storage/src/utils/crypto.rs", "rank": 21, "score": 191023.47269149387 }, { "content": "#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]\n\nstruct NodeAuth {\n\n url: String,\n\n jwt: Option<String>,\n\n username: Option<String>,\n\n password: Option<String>,\n\n}\n\n\n\n#[wasm_bindgen(typescript_custom_section)]\n\nconst I_NODE_AUTH: &'static str = r#\"\n\n/** IOTA node details with optional authentication. */\n", "file_path": "bindings/wasm/src/tangle/client_config.rs", "rank": 22, "score": 190286.43942048866 }, { "content": "fn location_chain_state() -> Location {\n\n Location::generic(\"$chain_state\", Vec::new())\n\n}\n\n\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 23, "score": 190009.96446016332 }, { "content": "fn location_published_generation() -> Location {\n\n Location::generic(\"$published_generation\", Vec::new())\n\n}\n\n\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 24, "score": 190009.96446016332 }, { "content": "// parse data for from_diff and into_diff functions.\n\nfn parse_from_into(\n\n var: &EVariant,\n\n vname: &Ident,\n\n vfields: &[DataFields],\n\n diff: &Ident,\n\n struct_type: SVariant,\n\n) -> (Vec<TokenStream>, Vec<TokenStream>) {\n\n let mut from_body: Vec<TokenStream> = vec![];\n\n let mut into_body: Vec<TokenStream> = vec![];\n\n\n\n match struct_type {\n\n // named structs.\n\n SVariant::Named => {\n\n let fnames: Vec<&Ident> = vfields.iter().map(|f| f.name()).collect();\n\n // setup from logic.\n\n let from_fassign: Vec<TokenStream> = var\n\n .fields\n\n .iter()\n\n .map(|f| {\n\n let fname = f.name();\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 25, "score": 188646.22879345738 }, { "content": "fn fmt_did(did: &IotaDID) -> String {\n\n format!(\"$identity:{}\", did.authority())\n\n}\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 26, "score": 188226.17706983697 }, { "content": "// parse data to generate the merge functions.\n\nfn parse_merge(\n\n vname: &Ident,\n\n vfields: &[DataFields],\n\n struct_type: SVariant,\n\n) -> (Vec<TokenStream>, Vec<TokenStream>, Vec<TokenStream>) {\n\n let mut merge_rpatterns: Vec<TokenStream> = vec![];\n\n let mut merge_lpatterns: Vec<TokenStream> = vec![];\n\n let mut merge_bodies: Vec<TokenStream> = vec![];\n\n\n\n match struct_type {\n\n // named variant.\n\n SVariant::Named => {\n\n // get field names.\n\n let fnames: Vec<&Ident> = vfields.iter().map(|f| f.name()).collect();\n\n\n\n let (left_names, right_names) = populate_field_names(vfields, 0, struct_type);\n\n\n\n // setup merge code.\n\n let merge_fvalues: Vec<TokenStream> = vfields\n\n .iter()\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 27, "score": 186059.76831024032 }, { "content": "/// parses data for the derived diff function.\n\nfn parse_diff(\n\n vname: &Ident,\n\n vfields: &[DataFields],\n\n\n\n struct_type: SVariant,\n\n) -> (Vec<TokenStream>, Vec<TokenStream>, Vec<TokenStream>) {\n\n let mut diff_rpatterns: Vec<TokenStream> = vec![];\n\n let mut diff_lpatterns: Vec<TokenStream> = vec![];\n\n let mut diff_bodies: Vec<TokenStream> = vec![];\n\n\n\n match struct_type {\n\n // named variant.\n\n SVariant::Named => {\n\n let fnames: Vec<&Ident> = vfields.iter().map(|f| f.name()).collect();\n\n\n\n let (left_names, right_names) = populate_field_names(vfields, 0, struct_type);\n\n\n\n // setup diff logic.\n\n let diff_fvalues: Vec<TokenStream> = vfields\n\n .iter()\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 28, "score": 186059.76831024032 }, { "content": "// create field names based on thee size of an enum.\n\nfn populate_field_names(vfields: &[DataFields], fmax: usize, struct_type: SVariant) -> (Vec<Ident>, Vec<Ident>) {\n\n match struct_type {\n\n SVariant::Named => (\n\n vfields\n\n .iter()\n\n .map(|f| f.name())\n\n .map(|ident| format_ident!(\"left_{}\", ident))\n\n .collect(),\n\n vfields\n\n .iter()\n\n .map(|f| f.name())\n\n .map(|ident| format_ident!(\"right_{}\", ident))\n\n .collect(),\n\n ),\n\n SVariant::Tuple => (\n\n (0..fmax).map(|ident| format_ident!(\"left_{}\", ident)).collect(),\n\n (0..fmax).map(|ident| format_ident!(\"right_{}\", ident)).collect(),\n\n ),\n\n _ => panic!(\"Wrong Struct Type!\"),\n\n }\n\n}\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 29, "score": 183649.11059108417 }, { "content": "fn location_seed(location: &KeyLocation) -> Location {\n\n Location::generic(fmt_key(\"$seed\", location), Vec::new())\n\n}\n\n\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 30, "score": 181744.96415985597 }, { "content": "fn location_skey(location: &KeyLocation) -> Location {\n\n Location::generic(fmt_key(\"$skey\", location), Vec::new())\n\n}\n\n\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 31, "score": 181744.964159856 }, { "content": "fn generate_filename() -> PathBuf {\n\n AsRef::<Path>::as_ref(TEST_DIR).join(format!(\"{}.stronghold\", rand_string(RANDOM_FILENAME_SIZE)))\n\n}\n\n\n\nasync fn open_snapshot(path: &Path, password: EncryptionKey) -> Snapshot {\n\n if path.exists() {\n\n fs::remove_file(path).unwrap();\n\n }\n\n\n\n load_snapshot(path, password).await\n\n}\n\n\n\nasync fn load_snapshot(path: &Path, password: EncryptionKey) -> Snapshot {\n\n let snapshot: Snapshot = Snapshot::new(path);\n\n snapshot.load(password).await.unwrap();\n\n snapshot\n\n}\n\n\n\nrusty_fork_test! {\n\n #[test]\n", "file_path": "identity-account-storage/src/stronghold/tests.rs", "rank": 32, "score": 178492.95412056896 }, { "content": "pub fn ensure_directory<P>(path: &P) -> Result<(), std::io::Error>\n\nwhere\n\n P: AsRef<Path> + ?Sized,\n\n{\n\n if let Some(parent) = path.as_ref().parent() {\n\n fs::create_dir_all(parent)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "identity-account-storage/src/utils/fs.rs", "rank": 33, "score": 176691.78886984033 }, { "content": "fn location(name: &str) -> Location {\n\n Location::generic(name, name)\n\n}\n\n\n", "file_path": "identity-account-storage/src/stronghold/tests.rs", "rank": 34, "score": 174486.49223874905 }, { "content": "fn rand_string(chars: usize) -> String {\n\n iter::repeat(())\n\n .map(|_| OsRng.sample(Alphanumeric))\n\n .map(char::from)\n\n .take(chars)\n\n .collect()\n\n}\n\n\n", "file_path": "identity-account-storage/src/stronghold/tests.rs", "rank": 35, "score": 172144.73813608414 }, { "content": "fn fmt_key(prefix: &str, location: &KeyLocation) -> Vec<u8> {\n\n format!(\"{}:{}:{}\", prefix, location.generation(), location.fragment_name()).into_bytes()\n\n}\n\n\n", "file_path": "identity-account-storage/src/storage/stronghold.rs", "rank": 36, "score": 171407.4187762396 }, { "content": "pub fn validate_jws_headers(\n\n protected: Option<&JwsHeader>,\n\n unprotected: Option<&JwsHeader>,\n\n permitted: Option<&[String]>,\n\n) -> Result<()> {\n\n // TODO: Validate Disjoint\n\n\n\n validate_crit(protected, unprotected, permitted)?;\n\n validate_b64(protected, unprotected)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 37, "score": 165782.45045532472 }, { "content": "pub fn validate_jwe_headers<'a>(\n\n protected: Option<&JweHeader>,\n\n unprotected: Option<&JweHeader>,\n\n recipients: impl Iterator<Item = Option<&'a JweHeader>>,\n\n permitted: Option<&[String]>,\n\n) -> Result<()> {\n\n // TODO: Validate Disjoint\n\n\n\n for recipient in recipients {\n\n // TODO: Validate Disjoint\n\n\n\n let unprotected: Option<Cow<'_, JweHeader>> = match (unprotected, recipient) {\n\n (Some(header), None) => Some(Cow::Borrowed(header)),\n\n (None, Some(header)) => Some(Cow::Borrowed(header)),\n\n (Some(_lhs), Some(_rhs)) => todo!(\"Merge Headers\"),\n\n (None, None) => None,\n\n };\n\n\n\n validate_crit(protected, unprotected.as_deref(), permitted)?;\n\n\n\n // The \"zip\" parameter MUST be integrity protected\n\n if unprotected.map(|header| header.has(\"zip\")).unwrap_or_default() {\n\n return Err(Error::InvalidParam(\"zip (unprotected)\"));\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 38, "score": 161339.56767276538 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n console_error_panic_hook::set_once();\n\n\n\n Ok(())\n\n}\n", "file_path": "bindings/wasm/src/lib.rs", "rank": 39, "score": 155806.23393729504 }, { "content": "pub fn diffie_hellman<'a, 'b>(\n\n curve: impl Into<EcdhCurve>,\n\n public: impl Into<Secret<'a>>,\n\n secret: impl Into<Secret<'b>>,\n\n) -> Result<Vec<u8>> {\n\n let public: Secret<'a> = public.into();\n\n let secret: Secret<'b> = secret.into();\n\n\n\n match curve.into() {\n\n EcdhCurve::Ec(curve) => match curve {\n\n EcCurve::P256 => {\n\n let public: _ = public.to_p256_public()?;\n\n let secret: _ = secret.to_p256_secret()?;\n\n let shared: _ = secret.diffie_hellman(&public);\n\n\n\n Ok(shared.as_bytes().to_vec())\n\n }\n\n EcCurve::P384 => Err(Error::AlgError(\"Diffie-Hellman (P384)\")),\n\n EcCurve::P521 => Err(Error::AlgError(\"Diffie-Hellman (P521)\")),\n\n EcCurve::Secp256K1 => {\n", "file_path": "libjose/src/utils/crypto/diffie_hellman.rs", "rank": 40, "score": 155806.23393729504 }, { "content": "/// checks to see if the `should_ignore` attribute has been put before a field.\n\npub fn should_ignore(field: &Field) -> bool {\n\n let find = field.attrs.iter().find(|field| {\n\n let attr_seg: Vec<String> = field.path.segments.iter().map(|seg| format!(\"{}\", seg.ident)).collect();\n\n\n\n let diff_attr = attr_seg == [\"diff\"];\n\n let arg_iter = field.tokens.clone().into_iter().next();\n\n\n\n let should_ignore = match arg_iter {\n\n Some(TokenTree::Group(gr)) if gr.delimiter() == PARENS => gr\n\n .stream()\n\n .into_iter()\n\n .map(|tt| format!(\"{}\", tt))\n\n .any(|x| x == \"should_ignore\"),\n\n _ => false,\n\n };\n\n\n\n diff_attr && should_ignore\n\n });\n\n\n\n find.is_some()\n\n}\n\n\n", "file_path": "identity-diff/derive/src/utils.rs", "rank": 41, "score": 150797.74900374422 }, { "content": "/// function that parses and sorts the variants into twp Vec<TokenStream> types.\n\nfn parse_evariants(evariants: &[EVariant], diff: &Ident) -> (Vec<TokenStream>, Vec<TokenStream>) {\n\n // setup vectors for patterns and bodies.\n\n let mut patterns: Vec<TokenStream> = vec![];\n\n let mut bodies: Vec<TokenStream> = vec![];\n\n\n\n evariants\n\n .iter()\n\n .for_each(|var| match (var.variant.clone(), &var.name, &var.fields) {\n\n // Named variants.\n\n (SVariant::Named, vname, fields) => {\n\n let fnames: Vec<&Ident> = fields.iter().map(|f| f.name()).collect();\n\n let buf: Ident = format_ident!(\"buf\");\n\n\n\n // format fields and create code.\n\n let fields: Vec<TokenStream> = fields\n\n .iter()\n\n .map(|f| {\n\n let (fname, ftyp) = (f.name(), f.typ());\n\n\n\n let str_name = format!(\"{}\", fname);\n", "file_path": "identity-diff/derive/src/impls/enums.rs", "rank": 42, "score": 149339.48411888455 }, { "content": "pub fn parse_from_into(input: &DeriveInput) -> bool {\n\n let find = input.attrs.iter().find(|a| {\n\n if let Meta::List(MetaList { path, nested, .. }) = a.parse_meta().unwrap() {\n\n {\n\n if let Some(ident) = path.get_ident() {\n\n if \"diff\" == format!(\"{}\", format_ident!(\"{}\", ident)) {\n\n let find_nested = nested.iter().find(|m| {\n\n if let NestedMeta::Meta(Meta::Path(p)) = m {\n\n if let Some(ident) = p.get_ident() {\n\n if \"from_into\" == format!(\"{}\", format_ident!(\"{}\", ident)) {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n });\n\n\n\n return find_nested.is_some();\n\n }\n\n }\n\n }\n\n }\n\n false\n\n });\n\n\n\n find.is_some()\n\n}\n", "file_path": "identity-diff/derive/src/utils.rs", "rank": 43, "score": 149235.75171566897 }, { "content": "/// Helper function to combine a username and password into a basic authentication tuple.\n\nfn basic_auth<'a>(username: &'a Option<String>, password: &'a Option<String>) -> Option<(&'a str, &'a str)> {\n\n username.as_deref().zip(password.as_deref())\n\n}\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n #[wasm_bindgen(typescript_type = \"IClientConfig\")]\n\n pub type IClientConfig;\n\n}\n\n\n\n/// Helper-struct for deserializing [`INodeAuth`].\n", "file_path": "bindings/wasm/src/tangle/client_config.rs", "rank": 44, "score": 148643.5584464134 }, { "content": "pub fn setup_diff_chain_bench() -> (ResolvedIotaDocument, KeyPair) {\n\n let keypair: KeyPair = KeyPair::new_ed25519().unwrap();\n\n let mut document: IotaDocument = IotaDocument::new(&keypair).unwrap();\n\n\n\n document\n\n .sign_self(\n\n keypair.private(),\n\n document.default_signing_method().unwrap().id().clone(),\n\n )\n\n .unwrap();\n\n\n\n let mut resolved: ResolvedIotaDocument = ResolvedIotaDocument::from(document);\n\n resolved.set_message_id(MessageId::new([8; 32]));\n\n\n\n (resolved, keypair)\n\n}\n\n\n", "file_path": "identity/benches/diff_chain.rs", "rank": 45, "score": 148228.90918903257 }, { "content": "#[proc_macro_derive(Diff, attributes(diff))]\n\npub fn derive_diff(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n internal(input)\n\n}\n\n\n", "file_path": "identity-diff/derive/src/lib.rs", "rank": 46, "score": 146255.8615833986 }, { "content": "#[test]\n\nfn test_serde_enum() {\n\n let t = MixedEnum::B(10);\n\n let t2 = MixedEnum::C {\n\n y: String::from(\"test\"),\n\n };\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let json = serde_json::to_string(&diff).unwrap();\n\n\n\n let diff = serde_json::from_str(&json).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(\n\n DiffMixedEnum::C {\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 47, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_enum_opt() {\n\n let t = TestOpt::Inner(None);\n\n let t2 = TestOpt::Inner(None);\n\n\n\n let diff1 = t.diff(&t2).unwrap();\n\n\n\n let diff2 = t2.into_diff().unwrap();\n\n\n\n assert_eq!(diff1, diff2);\n\n\n\n let t = TestOpt::InnerS { a: None };\n\n let diff = t.into_diff().unwrap();\n\n\n\n let json = serde_json::to_string(&diff).unwrap();\n\n\n\n let expected = r#\"{\"InnerS\":{}}\"#;\n\n\n\n assert_eq!(expected, json);\n\n\n\n let t = TestOpt::InnerS { a: None };\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 48, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_unit_enum() {\n\n let t = UnitEnum::A;\n\n let t2 = UnitEnum::B;\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(DiffUnitEnum::B, diff);\n\n\n\n let res = UnitEnum::from_diff(diff).unwrap();\n\n\n\n assert_eq!(UnitEnum::B, res);\n\n}\n\n\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 49, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_enum_with_generics() {\n\n let t: EnumWithGeneric<String, usize> = EnumWithGeneric::A(String::from(\"test\"));\n\n let t2: EnumWithGeneric<String, usize> = EnumWithGeneric::B(10);\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(DiffEnumWithGeneric::B(Some(10_usize.into_diff().unwrap())), diff);\n\n\n\n let res = EnumWithGeneric::from_diff(diff).unwrap();\n\n\n\n assert_eq!(EnumWithGeneric::B(10), res);\n\n}\n\n\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 50, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_tuple_enum() {\n\n let t = TupleEnum::A(10);\n\n let t2 = TupleEnum::C(20, 30);\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(\n\n DiffTupleEnum::C(Some(20_usize.into_diff().unwrap()), Some(30_usize.into_diff().unwrap())),\n\n diff\n\n );\n\n\n\n let res = TupleEnum::from_diff(diff).unwrap();\n\n\n\n assert_eq!(TupleEnum::C(20, 30), res);\n\n}\n\n\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 51, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_nested_enum() {\n\n let t = NestedEnum::Nest(InnerEnum::default());\n\n let t2 = NestedEnum::Nest(InnerEnum::Inner {\n\n y: InnerStruct { y: 10 },\n\n });\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(\n\n DiffNestedEnum::Nest(Some(DiffInnerEnum::Inner {\n\n y: Some(DiffInnerStruct {\n\n y: Some(10_usize.into_diff().unwrap())\n\n })\n\n })),\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 52, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_ignore_enum() {\n\n let t = IgnoreEnum::A { x: 10, y: 10 };\n\n let t2 = IgnoreEnum::B(String::from(\"test\"), 30);\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n let expected = IgnoreEnum::B(String::new(), 30);\n\n\n\n assert_eq!(expected, res)\n\n}\n\n\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 53, "score": 144865.50500698815 }, { "content": "#[test]\n\nfn test_mixed_enum() {\n\n let t = MixedEnum::B(10);\n\n let t2 = MixedEnum::C {\n\n y: String::from(\"test\"),\n\n };\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let res = t.merge(diff).unwrap();\n\n\n\n assert_eq!(t2, res);\n\n\n\n let diff = t2.into_diff().unwrap();\n\n\n\n assert_eq!(\n\n DiffMixedEnum::C {\n\n y: Some(String::from(\"test\").into_diff().unwrap())\n\n },\n\n diff\n\n );\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 54, "score": 144865.50500698815 }, { "content": "/// Generates a new pair of public/private Ed25519 keys.\n\n///\n\n/// Note that the private key is a 32-byte seed in compliance with [RFC 8032](https://datatracker.ietf.org/doc/html/rfc8032#section-3.2).\n\n/// Other implementations often use another format. See [this blog post](https://blog.mozilla.org/warner/2011/11/29/ed25519-keys/) for further explanation.\n\npub fn generate_ed25519_keypair() -> Result<(PublicKey, PrivateKey)> {\n\n let secret: ed25519::SecretKey = ed25519::SecretKey::generate()?;\n\n let public: ed25519::PublicKey = secret.public_key();\n\n\n\n let private: PrivateKey = secret.to_bytes().to_vec().into();\n\n let public: PublicKey = public.to_bytes().to_vec().into();\n\n\n\n Ok((public, private))\n\n}\n\n\n\n// Reconstructs a pair of public/private Ed25519 keys from an ed25519::SecretKey.\n\npub(crate) fn keypair_from_ed25519_private_key(private_key: ed25519::SecretKey) -> (PublicKey, PrivateKey) {\n\n let public: ed25519::PublicKey = private_key.public_key();\n\n\n\n let private: PrivateKey = private_key.to_bytes().to_vec().into();\n\n let public: PublicKey = public.to_bytes().to_vec().into();\n\n\n\n (public, private)\n\n}\n\n\n", "file_path": "identity-core/src/utils/ed25519.rs", "rank": 55, "score": 144843.16065896256 }, { "content": "pub fn extract_b64(header: Option<&JwsHeader>) -> bool {\n\n header.and_then(JwsHeader::b64).unwrap_or(DEFAULT_B64)\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 56, "score": 144717.1831531428 }, { "content": "/// Encodes the given `data` as base64.\n\npub fn encode_b64<T>(data: &T) -> String\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n base64::encode_config(data.as_ref(), base64::URL_SAFE)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use quickcheck_macros::quickcheck;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_decode_b58_empty() {\n\n assert_eq!(decode_b58(\"\").unwrap(), Vec::<u8>::new());\n\n }\n\n\n\n #[test]\n\n fn test_decode_b64_empty() {\n", "file_path": "identity-core/src/utils/base_encoding.rs", "rank": 57, "score": 143250.50537286454 }, { "content": "/// Encodes the given `data` as base58-btc.\n\npub fn encode_b58<T>(data: &T) -> String\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n bs58::encode(data).with_alphabet(bs58::Alphabet::BITCOIN).into_string()\n\n}\n\n\n", "file_path": "identity-core/src/utils/base_encoding.rs", "rank": 58, "score": 143250.50537286454 }, { "content": "/// Convert an error into an idiomatic [js_sys::Error].\n\npub fn wasm_error<'a, E>(error: E) -> JsValue\n\nwhere\n\n E: Into<WasmError<'a>>,\n\n{\n\n let wasm_err: WasmError = error.into();\n\n JsValue::from(wasm_err)\n\n}\n\n\n", "file_path": "bindings/wasm/src/error.rs", "rank": 59, "score": 142038.24574001456 }, { "content": "/// checks to see if a field's type is `Option`. This logic is necessary to find cases where fields contain nested\n\n/// Options and avoid a `Some(None)` case.\n\npub fn extract_option_segment(path: &Path) -> Option<&PathSegment> {\n\n let idents_of_path = path.segments.iter().fold(String::new(), |mut acc, v| {\n\n acc.push_str(&v.ident.to_string());\n\n acc.push('|');\n\n acc\n\n });\n\n vec![\"Option|\", \"std|option|Option|\", \"core|option|Option|\"]\n\n .into_iter()\n\n .find(|s| idents_of_path == *s)\n\n .and_then(|_| path.segments.last())\n\n}\n\n\n", "file_path": "identity-diff/derive/src/utils.rs", "rank": 60, "score": 141828.24811119656 }, { "content": "pub fn encode_b64_json<T>(data: &T) -> Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n to_vec(data).map(encode_b64).map_err(Into::into)\n\n}\n\n\n", "file_path": "libjose/src/utils/base64.rs", "rank": 61, "score": 140571.5679597363 }, { "content": "pub fn random_bytes(size: usize) -> Result<Vec<u8>> {\n\n let mut bytes: Vec<u8> = vec![0; size];\n\n\n\n rand::fill(&mut bytes)?;\n\n\n\n Ok(bytes)\n\n}\n", "file_path": "libjose/src/utils/crypto/random.rs", "rank": 62, "score": 140571.5679597363 }, { "content": "#[async_trait::async_trait]\n\npub trait Storage: Debug + Send + Sync + 'static {\n\n /// Sets the account password.\n\n async fn set_password(&self, password: EncryptionKey) -> Result<()>;\n\n\n\n /// Write any unsaved changes to disk.\n\n async fn flush_changes(&self) -> Result<()>;\n\n\n\n /// Creates a new keypair at the specified `location`\n\n async fn key_new(&self, did: &IotaDID, location: &KeyLocation) -> Result<PublicKey>;\n\n\n\n /// Inserts a private key at the specified `location`.\n\n async fn key_insert(&self, did: &IotaDID, location: &KeyLocation, private_key: PrivateKey) -> Result<PublicKey>;\n\n\n\n /// Retrieves the public key at the specified `location`.\n\n async fn key_get(&self, did: &IotaDID, location: &KeyLocation) -> Result<PublicKey>;\n\n\n\n /// Deletes the keypair specified by `location`.\n\n async fn key_del(&self, did: &IotaDID, location: &KeyLocation) -> Result<()>;\n\n\n\n /// Signs `data` with the private key at the specified `location`.\n", "file_path": "identity-account-storage/src/storage/traits.rs", "rank": 63, "score": 138218.72127105307 }, { "content": "pub fn create_message(header: &[u8], claims: &[u8]) -> Vec<u8> {\n\n let capacity: usize = header.len() + 1 + claims.len();\n\n let mut message: Vec<u8> = Vec::with_capacity(capacity);\n\n\n\n message.extend(header);\n\n message.push(b'.');\n\n message.extend(claims);\n\n message\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 64, "score": 138168.60890292475 }, { "content": "pub fn parse_cek(cek: Option<&str>) -> Result<Vec<u8>> {\n\n cek.ok_or(Error::EncError(\"CEK (missing)\")).and_then(decode_b64)\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 65, "score": 138168.60890292475 }, { "content": "fn decode_rsa_uint(data: impl AsRef<[u8]>) -> Result<RsaUint> {\n\n decode_b64(data).map(|data| RsaUint::from_bytes_be(&data))\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 66, "score": 137491.5373336404 }, { "content": "#[wasm_bindgen_test]\n\nfn test_document_new() {\n\n let keypair: WasmKeyPair = WasmKeyPair::new(KeyType::Ed25519).unwrap();\n\n let document: WasmDocument = WasmDocument::new(&keypair, None, None).unwrap();\n\n assert_eq!(document.id().network_name(), \"main\");\n\n assert!(document.default_signing_method().is_ok());\n\n}\n\n\n", "file_path": "bindings/wasm/tests/wasm.rs", "rank": 67, "score": 137035.4750340002 }, { "content": "#[allow(clippy::large_enum_variant)]\n\nenum ClientOrBuilder<T> {\n\n Client(T),\n\n Builder(ClientBuilder),\n\n}\n\n\n\nimpl<T> ResolverBuilder<T>\n\nwhere\n\n T: Clone + AsRef<Client> + From<Client>,\n\n{\n\n /// Constructs a new [`ResolverBuilder`] with no [`Clients`][Client] configured.\n\n pub fn new() -> Self {\n\n Self {\n\n clients: Default::default(),\n\n }\n\n }\n\n\n\n /// Inserts a [`Client`].\n\n ///\n\n /// NOTE: replaces any previous [`Client`] or [`ClientBuilder`] with the same [`NetworkName`].\n\n #[must_use]\n", "file_path": "identity-iota/src/tangle/resolver.rs", "rank": 68, "score": 137002.29501582397 }, { "content": "#[test]\n\nfn test_from_into() {\n\n let t = IntoFrom::SomeField(String::from(\"Test\"));\n\n\n\n let t2 = IntoFrom::Test(TestOpt::Inner(Some(10)));\n\n\n\n let diff = t.diff(&t2).unwrap();\n\n\n\n let json = serde_json::to_string(&diff).unwrap();\n\n\n\n let expected = r#\"{\"Inner\":10}\"#;\n\n\n\n assert_eq!(expected, json);\n\n\n\n let diff: DiffIntoFrom = serde_json::from_str(&json).unwrap();\n\n\n\n let merge = t2.merge(diff).unwrap();\n\n\n\n let expected = IntoFrom::Test(TestOpt::Inner(Some(10)));\n\n\n\n assert_eq!(expected, merge);\n\n}\n", "file_path": "identity-diff/tests/derive_enum_test.rs", "rank": 69, "score": 136888.23472862635 }, { "content": "pub fn create_pbes2_salt(algorithm: &str, p2s: &[u8]) -> Vec<u8> {\n\n let capacity: usize = algorithm.len() + 1 + p2s.len();\n\n let mut salt: Vec<u8> = Vec::with_capacity(capacity);\n\n\n\n // The salt value used is (UTF8(Alg) || 0x00 || Salt Input)\n\n salt.extend_from_slice(algorithm.as_bytes());\n\n salt.push(0x0);\n\n salt.extend_from_slice(p2s);\n\n salt\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 70, "score": 136746.35164125677 }, { "content": "/// Decodes the given `data` as base64.\n\npub fn decode_b64<T>(data: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n base64::decode_config(data.as_ref(), base64::URL_SAFE).map_err(Error::DecodeBase64)\n\n}\n\n\n", "file_path": "identity-core/src/utils/base_encoding.rs", "rank": 71, "score": 135366.5269893492 }, { "content": "/// Decodes the given `data` as [Multibase] with an inferred [`base`](Base).\n\n///\n\n/// [Multibase]: https://datatracker.ietf.org/doc/html/draft-multiformats-multibase-03\n\npub fn decode_multibase<T>(data: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: AsRef<str> + ?Sized,\n\n{\n\n if data.as_ref().is_empty() {\n\n return Ok(Vec::new());\n\n }\n\n multibase::decode(&data)\n\n .map(|(_base, output)| output)\n\n .map_err(Error::DecodeMultibase)\n\n}\n\n\n", "file_path": "identity-core/src/utils/base_encoding.rs", "rank": 72, "score": 135366.5269893492 }, { "content": "/// Decodes the given `data` as base58-btc.\n\npub fn decode_b58<T>(data: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n bs58::decode(data)\n\n .with_alphabet(bs58::Alphabet::BITCOIN)\n\n .into_vec()\n\n .map_err(Error::DecodeBase58)\n\n}\n\n\n", "file_path": "identity-core/src/utils/base_encoding.rs", "rank": 73, "score": 135366.5269893492 }, { "content": "#[wasm_bindgen_test]\n\nfn test_document_sign_self() {\n\n let keypair: WasmKeyPair = WasmKeyPair::new(KeyType::Ed25519).unwrap();\n\n\n\n // Sign with DIDUrl method query.\n\n {\n\n let mut document: WasmDocument = WasmDocument::new(&keypair, None, None).unwrap();\n\n document\n\n .sign_self(\n\n &keypair,\n\n &JsValue::from(document.default_signing_method().unwrap().id()).unchecked_into(),\n\n )\n\n .unwrap();\n\n assert!(document.verify_document(&document).is_ok());\n\n }\n\n\n\n // Sign with string method query.\n\n {\n\n let mut document: WasmDocument = WasmDocument::new(&keypair, None, None).unwrap();\n\n document\n\n .sign_self(\n\n &keypair,\n\n &JsValue::from_str(&document.default_signing_method().unwrap().id().to_string()).unchecked_into(),\n\n )\n\n .unwrap();\n\n assert!(document.verify_document(&document).is_ok());\n\n }\n\n}\n\n\n", "file_path": "bindings/wasm/tests/wasm.rs", "rank": 74, "score": 135308.67727493163 }, { "content": "/// Helper that takes two DID Documents (identities) for issuer and subject, and\n\n/// creates an unsigned credential with claims about subject by issuer.\n\npub fn issue_degree(issuer: &IotaDocument, subject: &IotaDocument) -> Result<Credential> {\n\n // Create VC \"subject\" field containing subject ID and claims about it.\n\n let subject: Subject = Subject::from_json_value(json!({\n\n \"id\": subject.id().as_str(),\n\n \"name\": \"Alice\",\n\n \"degree\": {\n\n \"type\": \"BachelorDegree\",\n\n \"name\": \"Bachelor of Science and Arts\",\n\n },\n\n \"GPA\": \"4.0\",\n\n }))?;\n\n\n\n // Build credential using subject above and issuer.\n\n let credential: Credential = CredentialBuilder::default()\n\n .id(Url::parse(\"https://example.edu/credentials/3732\")?)\n\n .issuer(Url::parse(issuer.id().as_str())?)\n\n .type_(\"UniversityDegreeCredential\")\n\n .subject(subject)\n\n .build()?;\n\n\n", "file_path": "examples/low-level-api/common.rs", "rank": 75, "score": 134027.26391220756 }, { "content": "#[derive(Debug)]\n\nstruct DerefContext(Dereference, Instant);\n\n\n\nimpl DerefContext {\n\n fn new() -> Self {\n\n Self(Dereference::new(), Instant::now())\n\n }\n\n\n\n fn set_content(&mut self, value: impl Into<Resource>) {\n\n self.0.content = Some(value.into());\n\n }\n\n\n\n fn set_metadata(&mut self, value: DocumentMetadata) {\n\n self.0.content_metadata = Some(value);\n\n }\n\n\n\n fn set_error(&mut self, value: ErrorKind) {\n\n self.0.metadata.error = Some(value);\n\n }\n\n\n\n fn finish_error(mut self, value: ErrorKind) -> Dereference {\n\n self.set_error(value);\n\n self.finish()\n\n }\n\n\n\n fn finish(mut self) -> Dereference {\n\n self.0.metadata.duration = self.1.elapsed();\n\n self.0\n\n }\n\n}\n\n\n", "file_path": "identity-did/src/resolution/impls.rs", "rank": 76, "score": 133904.04474674357 }, { "content": "#[derive(Debug)]\n\nstruct ResolveContext(Resolution, Instant);\n\n\n\nimpl ResolveContext {\n\n fn new() -> Self {\n\n Self(Resolution::new(), Instant::now())\n\n }\n\n\n\n fn set_document(&mut self, value: CoreDocument) {\n\n self.0.document = Some(value);\n\n }\n\n\n\n fn set_metadata(&mut self, value: DocumentMetadata) {\n\n self.0.document_metadata = Some(value);\n\n }\n\n\n\n fn set_resolved(&mut self, value: CoreDID) {\n\n self.0.metadata.resolved = Some(value);\n\n }\n\n\n\n fn set_error(&mut self, value: ErrorKind) {\n", "file_path": "identity-did/src/resolution/impls.rs", "rank": 77, "score": 133904.04474674357 }, { "content": "/// Compute the Merkle root hash for the given slice of `leaves`.\n\n///\n\n/// The values in `leaves` can be a pre-hashed slice of [`struct@Hash<D>`] or\n\n/// any type that implements [`AsRef<[u8]>`][`AsRef`].\n\n///\n\n/// For types implementing [`AsRef<[u8]>`][`AsRef`], the values will be hashed\n\n/// according to the [`Digest`][`DigestExt`] implementation, `D`.\n\npub fn compute_merkle_root<D, L>(leaves: &[L]) -> Hash<D>\n\nwhere\n\n D: DigestExt,\n\n L: AsLeaf<D>,\n\n{\n\n #[inline]\n\n fn __generate<D, L>(digest: &mut D, leaves: &[L]) -> Hash<D>\n\n where\n\n D: DigestExt,\n\n L: AsLeaf<D>,\n\n {\n\n match leaves {\n\n [] => digest.hash_empty(),\n\n [leaf] => leaf.hash(digest),\n\n leaves => {\n\n let (this, that): _ = __split_pow2(leaves);\n\n\n\n let lhs: Hash<D> = __generate(digest, this);\n\n let rhs: Hash<D> = __generate(digest, that);\n\n\n\n digest.hash_node(&lhs, &rhs)\n\n }\n\n }\n\n }\n\n\n\n __generate::<D, L>(&mut D::new(), leaves)\n\n}\n\n\n", "file_path": "identity-core/src/crypto/merkle_tree/merkle.rs", "rank": 78, "score": 132731.84728870034 }, { "content": "/// Generates a list of public/private Ed25519 keys.\n\n///\n\n/// See [`generate_ed25519_keypair`].\n\npub fn generate_ed25519_keypairs(count: usize) -> Result<Vec<(PublicKey, PrivateKey)>> {\n\n (0..count).map(|_| generate_ed25519_keypair()).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::generate_ed25519_keypair;\n\n\n\n #[test]\n\n fn generate_ed25519_keypair_has_expected_length() {\n\n let (public_key, private_key) = generate_ed25519_keypair().unwrap();\n\n assert_eq!(\n\n private_key.as_ref().len(),\n\n crypto::signatures::ed25519::SECRET_KEY_LENGTH\n\n );\n\n assert_eq!(public_key.as_ref().len(), private_key.as_ref().len());\n\n }\n\n}\n", "file_path": "identity-core/src/utils/ed25519.rs", "rank": 79, "score": 132726.79978399526 }, { "content": "fn expand_ec_secret<T, E>(curve: EcCurve, secret: Secret<'_>, f: impl Fn(&[u8]) -> Result<T, E>) -> Result<T>\n\nwhere\n\n E: Into<Error>,\n\n{\n\n secret.expand(f, |jwk| {\n\n let params: &JwkParamsEc = jwk.try_ec_params()?;\n\n\n\n if params.try_ec_curve()? != curve {\n\n return Err(Error::KeyError(curve.name()));\n\n }\n\n\n\n params\n\n .d\n\n .as_ref()\n\n .map(decode_b64)\n\n .transpose()?\n\n .ok_or_else(|| Error::KeyError(curve.name()))\n\n })\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 80, "score": 132547.5911087603 }, { "content": "fn expand_ed_public<T, E>(curve: EdCurve, secret: Secret<'_>, f: impl Fn(&[u8]) -> Result<T, E>) -> Result<T>\n\nwhere\n\n E: Into<Error>,\n\n{\n\n secret.expand(\n\n |arr| f(arr),\n\n |jwk| {\n\n let params: &JwkParamsOkp = jwk.try_okp_params()?;\n\n\n\n if params.try_ed_curve()? != curve {\n\n return Err(Error::KeyError(curve.name()));\n\n }\n\n\n\n decode_b64(&params.x)\n\n },\n\n )\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 81, "score": 132547.5911087603 }, { "content": "fn expand_ecx_secret<T, E>(curve: EcxCurve, secret: Secret<'_>, f: impl Fn(&[u8]) -> Result<T, E>) -> Result<T> {\n\n secret.expand(\n\n |arr| f(arr).map_err(|_| Error::KeyError(curve.name())),\n\n |jwk| {\n\n let params: &JwkParamsOkp = jwk.try_okp_params()?;\n\n\n\n if params.try_ecx_curve()? != curve {\n\n return Err(Error::KeyError(curve.name()));\n\n }\n\n\n\n params\n\n .d\n\n .as_ref()\n\n .map(decode_b64)\n\n .transpose()?\n\n .ok_or_else(|| Error::KeyError(curve.name()))\n\n },\n\n )\n\n}\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 82, "score": 132547.5911087603 }, { "content": "fn expand_ec_public<T, E>(curve: EcCurve, secret: Secret<'_>, f: impl Fn(&[u8]) -> Result<T, E>) -> Result<T>\n\nwhere\n\n E: Into<Error>,\n\n{\n\n secret.expand(f, |jwk| {\n\n let params: &JwkParamsEc = jwk.try_ec_params()?;\n\n\n\n if params.try_ec_curve()? != curve {\n\n return Err(Error::KeyError(curve.name()));\n\n }\n\n\n\n let bytes: Vec<u8> = decode_b64(&params.x)?\n\n .into_iter()\n\n .chain(decode_b64(&params.y)?.into_iter())\n\n .collect();\n\n\n\n Ok(bytes)\n\n })\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 83, "score": 132547.5911087603 }, { "content": "fn expand_ed_secret<T, E>(curve: EdCurve, secret: Secret<'_>, f: impl Fn(&[u8]) -> Result<T, E>) -> Result<T>\n\nwhere\n\n E: Into<Error>,\n\n{\n\n secret.expand(\n\n |arr| f(arr),\n\n |jwk| {\n\n let params: &JwkParamsOkp = jwk.try_okp_params()?;\n\n\n\n if params.try_ed_curve()? != curve {\n\n return Err(Error::KeyError(curve.name()));\n\n }\n\n\n\n params\n\n .d\n\n .as_deref()\n\n .map(decode_b64)\n\n .transpose()?\n\n .ok_or_else(|| Error::KeyError(curve.name()))\n\n },\n\n )\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 84, "score": 132547.5911087603 }, { "content": "fn expand_ecx_public<T, E>(curve: EcxCurve, secret: Secret<'_>, f: impl Fn(&[u8]) -> Result<T, E>) -> Result<T> {\n\n secret.expand(\n\n |arr| f(arr).map_err(|_| Error::KeyError(curve.name())),\n\n |jwk| {\n\n let params: &JwkParamsOkp = jwk.try_okp_params()?;\n\n\n\n if params.try_ecx_curve()? != curve {\n\n return Err(Error::KeyError(curve.name()));\n\n }\n\n\n\n decode_b64(&params.x)\n\n },\n\n )\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 85, "score": 132547.5911087603 }, { "content": "fn decode_rsa_uint_opt(data: Option<impl AsRef<[u8]>>) -> Result<RsaUint> {\n\n data.ok_or(Error::KeyError(\"RSA\")).and_then(decode_rsa_uint)\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/key_repr.rs", "rank": 86, "score": 132468.6921525762 }, { "content": "pub fn validate_b64(protected: Option<&JwsHeader>, unprotected: Option<&JwsHeader>) -> Result<()> {\n\n // The \"b64\" parameter MUST be integrity protected\n\n if unprotected.and_then(JwsHeader::b64).is_some() {\n\n return Err(Error::InvalidParam(\"b64 (unprotected)\"));\n\n }\n\n\n\n let b64: Option<bool> = protected.and_then(|header| header.b64());\n\n let crit: Option<&[String]> = protected.and_then(|header| header.crit());\n\n\n\n // The \"b64\" parameter MUST be included in the \"crit\" parameter values\n\n match (b64, crit) {\n\n (Some(_), Some(values)) if values.iter().any(|value| value == \"b64\") => Ok(()),\n\n (Some(_), None) => Err(Error::InvalidParam(\"b64 (non-critical)\")),\n\n _ => Ok(()),\n\n }\n\n}\n", "file_path": "libjose/src/utils/serde.rs", "rank": 87, "score": 131859.72621598325 }, { "content": "/// Creates a diff chain and updates it `n` times\n\npub fn update_diff_chain(n: usize, chain: &mut DocumentChain, keypair: &KeyPair) {\n\n let current_n = chain.diff().len();\n\n\n\n for i in current_n..(n + current_n) {\n\n let new: IotaDocument = {\n\n let mut this: IotaDocument = chain.current().clone().document;\n\n this.properties_mut().insert(i.to_string(), 123.into());\n\n this.metadata.updated = Timestamp::now_utc();\n\n this\n\n };\n\n\n\n let message_id = *chain.diff_message_id();\n\n let mut diff: DiffMessage = chain\n\n .current()\n\n .document\n\n .diff(\n\n &new,\n\n message_id,\n\n keypair.private(),\n\n chain.current().document.default_signing_method().unwrap().id(),\n\n )\n\n .unwrap();\n\n\n\n diff.set_message_id(message_id);\n\n assert!(chain.try_push_diff(diff).is_ok());\n\n }\n\n}\n\n\n", "file_path": "identity/benches/diff_chain.rs", "rank": 88, "score": 131859.72621598325 }, { "content": "/// Encodes the given `data` as [Multibase] with the given [`base`](Base), defaults to\n\n/// [`Base::Base58Btc`] if omitted.\n\n///\n\n/// NOTE: [`encode_multibase`] with [`Base::Base58Btc`] is different from [`encode_b58`] as\n\n/// the [Multibase] format prepends a base-encoding-character to the output.\n\n///\n\n/// [Multibase]: https://datatracker.ietf.org/doc/html/draft-multiformats-multibase-03\n\npub fn encode_multibase<T>(data: &T, base: Option<Base>) -> String\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n multibase::encode(multibase::Base::from(base.unwrap_or(Base::Base58Btc)), data)\n\n}\n\n\n", "file_path": "identity-core/src/utils/base_encoding.rs", "rank": 89, "score": 131859.72621598325 }, { "content": "/// Creates an integration chain and updates it `n` times\n\npub fn update_integration_chain(n: usize, chain: &mut DocumentChain, keypair: &KeyPair) {\n\n let current_n = chain.diff().len();\n\n\n\n for i in current_n..(n + current_n) {\n\n let mut new: ResolvedIotaDocument = chain.current().clone();\n\n\n\n let authentication: MethodRef<IotaDID> = MethodBuilder::default()\n\n .id(chain.id().to_url().join(&format!(\"#key-{}\", i)).unwrap())\n\n .controller(chain.id().clone())\n\n .key_type(MethodType::Ed25519VerificationKey2018)\n\n .key_data(MethodData::new_multibase(keypair.public()))\n\n .build()\n\n .map(Into::into)\n\n .unwrap();\n\n\n\n new.document.core_document_mut().authentication_mut().clear();\n\n new\n\n .document\n\n .core_document_mut()\n\n .authentication_mut()\n", "file_path": "identity/benches/diff_chain.rs", "rank": 90, "score": 131859.72621598325 }, { "content": "pub fn ed25519_to_x25519_secret<T>(secret: &T) -> Result<[u8; X25519_SECRET_KEY_LEN]>\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n let mut ed25519: [u8; ED25519_SECRET_KEY_LEN] = secret\n\n .as_ref()\n\n .try_into()\n\n .map_err(|_| Error::KeyError(\"ed25519_to_x25519_secret\"))?;\n\n\n\n let mut x25519: [u8; X25519_SECRET_KEY_LEN] = [0; X25519_SECRET_KEY_LEN];\n\n let hash: Output<Sha512> = Sha512::digest(&ed25519);\n\n\n\n x25519.copy_from_slice(&hash[..X25519_SECRET_KEY_LEN]);\n\n x25519[0] &= 248;\n\n x25519[31] &= 127;\n\n x25519[31] |= 64;\n\n\n\n ed25519.zeroize();\n\n\n\n Ok(x25519)\n", "file_path": "libjose/src/utils/crypto/x25519.rs", "rank": 91, "score": 131463.4726485081 }, { "content": "pub fn ed25519_to_x25519_public<T>(public: &T) -> Result<[u8; X25519_PUBLIC_KEY_LEN]>\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n{\n\n let mut ed25519: [u8; ED25519_PUBLIC_KEY_LEN] = public\n\n .as_ref()\n\n .try_into()\n\n .map_err(|_| Error::KeyError(\"ed25519_to_x25519_public\"))?;\n\n\n\n let x25519: [u8; X25519_PUBLIC_KEY_LEN] = CompressedEdwardsY(ed25519)\n\n .decompress()\n\n .map(|edwards| edwards.to_montgomery().0)\n\n .ok_or(Error::KeyError(\"ed25519_to_x25519_public\"))?;\n\n\n\n ed25519.zeroize();\n\n\n\n Ok(x25519)\n\n}\n\n\n", "file_path": "libjose/src/utils/crypto/x25519.rs", "rank": 92, "score": 131463.4726485081 }, { "content": "pub fn filter_non_empty_bytes<'a, T, U: 'a>(value: T) -> Option<&'a [u8]>\n\nwhere\n\n T: Into<Option<&'a U>>,\n\n U: AsRef<[u8]> + ?Sized,\n\n{\n\n value.into().map(AsRef::as_ref).filter(|value| !value.is_empty())\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 93, "score": 129444.13880327664 }, { "content": "#[doc(hidden)]\n\npub trait PleaseDontMakeYourOwnResult<T> {\n\n #[allow(clippy::wrong_self_convention)]\n\n fn to_result(self) -> Result<T>;\n\n}\n\n\n\nimpl From<identity_did::did::DIDError> for Error {\n\n fn from(error: identity_did::did::DIDError) -> Self {\n\n identity_did::Error::from(error).into()\n\n }\n\n}\n", "file_path": "identity-account-storage/src/error.rs", "rank": 94, "score": 127790.42691908462 }, { "content": "pub fn create_aad<T, U>(header: Option<&T>, data: Option<&U>) -> Vec<u8>\n\nwhere\n\n T: AsRef<[u8]> + ?Sized,\n\n U: AsRef<[u8]> + ?Sized,\n\n{\n\n let header: &[u8] = header.map(AsRef::as_ref).unwrap_or_default();\n\n let data: &[u8] = data.map(AsRef::as_ref).unwrap_or_default();\n\n\n\n if data.is_empty() {\n\n header.to_vec()\n\n } else {\n\n create_message(header, data)\n\n }\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 95, "score": 125167.53682279124 }, { "content": "interface IClientConfig {\n\n /** Sets the IOTA Tangle network. */\n\n readonly network?: Network;\n\n\n\n /** Sets the DID message encoding used when publishing to the Tangle. */\n\n readonly encoding?: DIDMessageEncoding;\n\n\n\n /** Adds a list of IOTA nodes to use by their URLs. */\n\n readonly nodes?: string[];\n\n\n\n /** Sets an IOTA node by its URL to be used as primary node. */\n\n readonly primaryNode?: INodeAuth;\n\n\n\n /** Adds an IOTA node by its URL to be used as primary PoW node (for remote PoW). */\n\n readonly primaryPowNode?: INodeAuth;\n\n\n\n /** Adds a list of IOTA permanodes by their URLs. */\n\n readonly permanodes?: INodeAuth[];\n\n\n\n /** Adds a list of IOTA nodes to be used by their URLs. */\n", "file_path": "bindings/wasm/src/tangle/client_config.rs", "rank": 96, "score": 124451.7268727393 }, { "content": "pub fn check_slice_param<T>(name: &'static str, slice: Option<&[T]>, value: &T) -> Result<()>\n\nwhere\n\n T: PartialEq,\n\n{\n\n if slice.map(|slice| slice.contains(value)).unwrap_or(true) {\n\n Ok(())\n\n } else {\n\n Err(Error::InvalidParam(name))\n\n }\n\n}\n\n\n", "file_path": "libjose/src/utils/serde.rs", "rank": 97, "score": 123904.2096873041 }, { "content": "/// Generate a proof-of-inclusion for the leaf node at the specified `index`.\n\npub fn compute_merkle_proof<D, L>(leaves: &[L], index: usize) -> Option<Proof<D>>\n\nwhere\n\n D: DigestExt,\n\n L: AsLeaf<D>,\n\n{\n\n #[inline]\n\n fn __generate<D, L>(digest: &mut D, path: &mut Vec<Node<D>>, leaves: &[L], index: usize)\n\n where\n\n D: DigestExt,\n\n L: AsLeaf<D>,\n\n {\n\n if leaves.len() > 1 {\n\n let k: usize = __pow2(leaves.len() as u32 - 1);\n\n let (this, that): _ = leaves.split_at(k);\n\n\n\n if index < k {\n\n __generate::<D, L>(digest, path, this, index);\n\n path.push(Node::R(compute_merkle_root::<D, L>(that)));\n\n } else {\n\n __generate::<D, L>(digest, path, that, index - k);\n", "file_path": "identity-core/src/crypto/merkle_tree/merkle.rs", "rank": 98, "score": 123119.65476829292 }, { "content": "pub fn validate_crit<T>(protected: Option<&T>, unprotected: Option<&T>, permitted: Option<&[String]>) -> Result<()>\n\nwhere\n\n T: JoseHeader,\n\n{\n\n // The \"crit\" parameter MUST be integrity protected\n\n if unprotected.map(|header| header.has_claim(\"crit\")).unwrap_or_default() {\n\n return Err(Error::InvalidParam(\"crit (unprotected)\"));\n\n }\n\n\n\n let values: Option<&[String]> = protected.and_then(|header| header.common().crit());\n\n\n\n // The \"crit\" parameter MUST NOT be an empty list\n\n if values.map(|values| values.is_empty()).unwrap_or_default() {\n\n return Err(Error::InvalidParam(\"crit (empty)\"));\n\n }\n\n\n\n let permitted: &[String] = permitted.unwrap_or_default();\n\n let values: &[String] = values.unwrap_or_default();\n\n\n\n for value in values {\n", "file_path": "libjose/src/utils/serde.rs", "rank": 99, "score": 122401.42741974373 } ]
Rust
integration_test/src/transactional_event_stream_writer_tests.rs
claudiofahey/pravega-client-rust
efaccc1ba896588ffb125cd72378b07f714609e5
use pravega_client::event_stream_writer::EventStreamWriter; use pravega_client_config::{ClientConfigBuilder, MOCK_CONTROLLER_URI}; use pravega_client_shared::*; use pravega_connection_pool::connection_pool::ConnectionPool; use pravega_controller_client::{ControllerClient, ControllerClientImpl}; use pravega_wire_protocol::connection_factory::{ConnectionFactory, SegmentConnectionManager}; use pravega_wire_protocol::wire_commands::{Replies, Requests}; use std::net::SocketAddr; use pravega_client::client_factory::ClientFactory; use pravega_client::raw_client::RawClient; use pravega_client::segment_reader::AsyncSegmentReader; use pravega_client::transaction::transactional_event_stream_writer::TransactionalEventStreamWriter; use pravega_client::transaction::Transaction; use tracing::{error, info}; use crate::pravega_service::PravegaStandaloneServiceConfig; use pravega_wire_protocol::client_connection::{ClientConnection, ClientConnectionImpl}; use pravega_wire_protocol::commands::{ Command, EventCommand, GetStreamSegmentInfoCommand, StreamSegmentInfoCommand, }; use std::sync::Arc; use std::time::Duration; use tokio::runtime::Handle; use tokio::time::sleep; pub fn test_transactional_event_stream_writer(config: PravegaStandaloneServiceConfig) { info!("test TransactionalEventStreamWriter"); let scope_name = Scope::from("testScopeTxnWriter".to_owned()); let stream_name = Stream::from("testStreamTxnWriter".to_owned()); let scoped_stream = ScopedStream { scope: scope_name.clone(), stream: stream_name.clone(), }; let config = ClientConfigBuilder::default() .controller_uri(MOCK_CONTROLLER_URI) .is_auth_enabled(config.auth) .is_tls_enabled(config.tls) .build() .expect("creating config"); let client_factory = ClientFactory::new(config); let handle = client_factory.get_runtime(); handle.block_on(setup_test( &scope_name, &stream_name, client_factory.get_controller_client(), )); let mut writer = handle.block_on(client_factory.create_transactional_event_stream_writer(scoped_stream, WriterId(0))); handle.block_on(test_commit_transaction(&mut writer)); handle.block_on(test_abort_transaction(&mut writer)); handle.block_on(test_write_and_read_transaction(&mut writer, &client_factory)); info!("test TransactionalEventStreamWriter passed"); } async fn test_commit_transaction(writer: &mut TransactionalEventStreamWriter) { info!("test commit transaction"); let mut transaction = writer.begin().await.expect("begin transaction"); assert_eq!( transaction.check_status().await.expect("get transaction status"), TransactionStatus::Open ); transaction .commit(Timestamp(0u64)) .await .expect("commit transaction"); wait_for_transaction_with_timeout(&transaction, TransactionStatus::Committed, 10).await; info!("test commit transaction passed"); } async fn test_abort_transaction(writer: &mut TransactionalEventStreamWriter) { info!("test abort transaction"); let mut transaction = writer.begin().await.expect("begin transaction"); assert_eq!( transaction.check_status().await.expect("get transaction status"), TransactionStatus::Open ); transaction.abort().await.expect("abort transaction"); wait_for_transaction_with_timeout(&transaction, TransactionStatus::Aborted, 10).await; info!("test abort transaction passed"); } async fn test_write_and_read_transaction( writer: &mut TransactionalEventStreamWriter, factory: &ClientFactory, ) { info!("test write transaction"); let mut transaction = writer.begin().await.expect("begin transaction"); assert_eq!( transaction.check_status().await.expect("get transaction status"), TransactionStatus::Open ); let num_events: i32 = 100; for _ in 0..num_events { transaction .write_event(None, String::from("hello").into_bytes()) .await .expect("write to transaction"); } let segments = factory .get_controller_client() .get_current_segments(&transaction.get_stream()) .await .expect("get segments"); for segment in segments.get_segments() { let segment_info = get_segment_info(&segment, factory).await; assert_eq!(segment_info.write_offset, 0); } transaction .commit(Timestamp(0u64)) .await .expect("commit transaction"); wait_for_transaction_with_timeout(&transaction, TransactionStatus::Committed, 10).await; let mut count: i32 = 0; let data: &str = "hello"; for segment in segments.get_segments() { info!("creating reader for segment {:?}", segment); let reader = factory.create_async_event_reader(segment.clone()).await; let segment_info = get_segment_info(&segment, factory).await; let mut offset = 0; let end_offset = segment_info.write_offset; loop { if offset >= end_offset { break; } match reader.read(offset, data.len() as i32 + 8).await { Ok(reply) => { count += 1; offset += data.len() as i64 + 8; let expected = EventCommand { data: String::from("hello").into_bytes(), } .write_fields() .expect("serialize cmd"); assert_eq!(reply.data, expected); } Err(e) => { error!("error {:?} when reading from segment", e); panic!("failed to read data from segmentstore"); } } } } assert_eq!(count, num_events); info!("test write transaction passed"); } async fn setup_test(scope_name: &Scope, stream_name: &Stream, controller_client: &dyn ControllerClient) { controller_client .create_scope(scope_name) .await .expect("create scope"); info!("Scope created"); let request = StreamConfiguration { scoped_stream: ScopedStream { scope: scope_name.clone(), stream: stream_name.clone(), }, scaling: Scaling { scale_type: ScaleType::FixedNumSegments, target_rate: 0, scale_factor: 0, min_num_segments: 2, }, retention: Retention { retention_type: RetentionType::None, retention_param: 0, }, }; controller_client .create_stream(&request) .await .expect("create stream"); info!("Stream created"); } async fn get_segment_info(segment: &ScopedSegment, factory: &ClientFactory) -> StreamSegmentInfoCommand { let delegation_toke_provider = factory .create_delegation_token_provider(ScopedStream::from(segment)) .await; let cmd = GetStreamSegmentInfoCommand { request_id: 0, segment_name: segment.to_string(), delegation_token: delegation_toke_provider .retrieve_token(factory.get_controller_client()) .await, }; let request = Requests::GetStreamSegmentInfo(cmd); let rawclient = factory.create_raw_client(segment).await; let reply = rawclient .send_request(&request) .await .expect("send get segment info cmd"); if let Replies::StreamSegmentInfo(r) = reply { r } else { panic!("wrong reply from segment {:?}", reply); } } async fn wait_for_transaction_with_timeout( transaction: &Transaction, expected_status: TransactionStatus, timeout_second: i32, ) { for _i in 0..timeout_second { if expected_status == transaction.check_status().await.expect("get transaction status") { return; } sleep(Duration::from_secs(1)).await; } panic!( "timeout {:?} exceeded, Transaction is not {:?}", timeout_second, expected_status ); }
use pravega_client::event_stream_writer::EventStreamWriter; use pravega_client_config::{ClientConfigBuilder, MOCK_CONTROLLER_URI}; use pravega_client_shared::*; use pravega_connection_pool::connection_pool::ConnectionPool; use pravega_controller_client::{ControllerClient, ControllerClientImpl}; use pravega_wire_protocol::connection_factory::{ConnectionFactory, SegmentConnectionManager}; use pravega_wire_protocol::wire_commands::{Replies, Requests}; use std::net::SocketAddr; use pravega_client::client_factory::ClientFactory; use pravega_client::raw_client::RawClient; use pravega_client::segment_reader::AsyncSegmentReader; use pravega_client::transaction::transactional_event_stream_writer::TransactionalEventStreamWriter; use pravega_client::transaction::Transaction; use tracing::{error, info}; use crate::pravega_service::PravegaStandaloneServiceConfig; use pravega_wire_protocol::client_connection::{ClientConnection, ClientConnectionImpl}; use pravega_wire_protocol::commands::{ Command, EventCommand, GetStreamSegmentInfoCommand, StreamSegmentInfoCommand, }; use std::sync::Arc; use std::time::Duration; use tokio::runtime::Handle; use tokio::time::sleep; pub fn test_transactional_event_stream_writer(config: PravegaStandaloneServiceConfig) { info!("test TransactionalEventStreamWriter"); let scope_name = Scope::from("testScopeTxnWriter".to_owned()); let stream_name = Stream::from("testStreamTxnWriter".to_owned()); let scoped_stream = ScopedStream { scope: scope_name.clone(), stream: stream_name.clone(), }; let config = ClientConfigBuilder::default() .controller_uri(MOCK_CONTROLLER_URI) .is_auth_enabled(config.auth) .is_tls_enabled(config.tls) .build() .expect("creating config"); let client_factory = ClientFactory::new(config); let handle = client_factory.get_runtime(); handle.block_on(setup_test( &scope_name, &stream_name, client_factory.get_controller_client(), )); let mut writer = handle.block_on(client_factory.create_transactional_event_stream_writer(scoped_stream, WriterId(0))); handle.block_on(test_commit_transaction(&mut writer)); handle.block_on(test_abort_transaction(&mut writer)); handle.block_on(test_write_and_read_transaction(&mut writer, &client_factory)); info!("test TransactionalEventStreamWriter passed"); } async fn test_commit_transaction(writer: &mut TransactionalEventStreamWriter) { info!("test commit transaction"); let mut transaction = writer.begin().await.expect("begin transaction"); assert_eq!( transaction.check_status().await.expect("get transaction status"), TransactionStatus::Open ); transaction .commit(Timestamp(0u64)) .await .expect("commit transaction"); wait_for_transaction_with_timeout(&transaction, TransactionStatus::Committed, 10).await; info!("test commit transaction passed"); } async fn test_abort_transaction(writer: &mut TransactionalEventStreamWriter) { info!("test abort transaction"); let mut transaction = writer.begin().await.expect("begin transaction"); assert_eq!( transaction.check_status().await.expect("get transaction status"), TransactionStatus::Open ); transaction.abort().await.expect("abort transaction"); wait_for_transaction_with_timeout(&transaction, TransactionStatus::Aborted, 10).await; info!("test abort transaction passed"); } async fn test_write_and_read_transaction( writer: &mut TransactionalEventStreamWriter, factory: &ClientFactory, ) { info!("test write transaction"); let mut transaction = writer.begin().await.expect("begin transaction"); assert_eq!( transaction.check_status().await.expect("get transaction status"), TransactionStatus::Open ); let num_events: i32 = 100; for _ in 0..num_events { transaction .write_event(None, String::from("hello").into_bytes()) .await .expect("write to transaction"); } let segments = factory .get_controller_client() .get_current_segments(&transaction.get_stream()) .await .expect("get segments"); for segment in segments.get_segments() { let segment_info = get_segment_info(&segment, factory).await; assert_eq!(segment_info.write_offset, 0); } transaction .commit(Timestamp(0u64)) .await .expect("commit transaction"); wait_for_transaction_with_timeout(&transaction, TransactionStatus::Committed, 10).await; let mut count: i32 = 0; let data: &str = "hello"; for segment in segments.get_segments() { info!("creating reader for segment {:?}", segment); let reader = factory.create_async_event_reader(segment.clone()).await; let segment_info = get_segment_info(&segment, factory).await; let mut offset = 0; let end_offset = segment_info.write_offset; loop { if offset >= end_offset { break; } match reader.read(offset, data.len() as i32 + 8).await { Ok(reply) => { count += 1; offset += data.len() as i64 + 8; let expected = EventCommand { data: String::from("hello").into_bytes(), } .write_fields() .expect("serialize cmd"); assert_eq!(reply.data, expected); } Err(e) => { error!("error {:?} when reading from segment", e); panic!("failed to read data from segmentstore"); } } } } assert_eq!(count, num_events); info!("test write transaction passed"); } async fn setup_test(scope_name: &Scope, stream_name: &Stream, controller_client: &dyn ControllerClient) { controller_client .create_scope(scope_name) .await .expect("create scope"); info!("Scope created"); let request = StreamConfiguration { scoped_stream: ScopedStream { scope: scope_name.clone(), stream: stream_name.clone(), }, scaling: Scaling { scale_type: ScaleType::FixedNumSegments, target_rate: 0, scale_factor: 0, min_num_segments: 2, }, retention: Retention { retention_type: RetentionType::None, retention_param: 0, }, }; controller_client .create_stream(&request) .await .expect("create stream"); info!("Stream created"); } async fn get_segment_info(segment: &ScopedSegment, factory: &ClientFactory) -> StreamSegmentInfoCommand {
let cmd = GetStreamSegmentInfoCommand { request_id: 0, segment_name: segment.to_string(), delegation_token: delegation_toke_provider .retrieve_token(factory.get_controller_client()) .await, }; let request = Requests::GetStreamSegmentInfo(cmd); let rawclient = factory.create_raw_client(segment).await; let reply = rawclient .send_request(&request) .await .expect("send get segment info cmd"); if let Replies::StreamSegmentInfo(r) = reply { r } else { panic!("wrong reply from segment {:?}", reply); } } async fn wait_for_transaction_with_timeout( transaction: &Transaction, expected_status: TransactionStatus, timeout_second: i32, ) { for _i in 0..timeout_second { if expected_status == transaction.check_status().await.expect("get transaction status") { return; } sleep(Duration::from_secs(1)).await; } panic!( "timeout {:?} exceeded, Transaction is not {:?}", timeout_second, expected_status ); }
let delegation_toke_provider = factory .create_delegation_token_provider(ScopedStream::from(segment)) .await;
assignment_statement
[ { "content": "fn test_simple_write_and_read(writer: &mut ByteStreamWriter, reader: &mut ByteStreamReader) {\n\n info!(\"test byte stream write and read\");\n\n let payload1 = vec![1; 4];\n\n let payload2 = vec![2; 4];\n\n\n\n let size1 = writer.write(&payload1).expect(\"write payload1 to byte stream\");\n\n assert_eq!(size1, 4);\n\n writer.flush().expect(\"flush byte stream writer\");\n\n writer.seek_to_tail();\n\n assert_eq!(writer.current_write_offset(), 4);\n\n\n\n let size2 = writer.write(&payload2).expect(\"write payload2 to byte stream\");\n\n assert_eq!(size2, 4);\n\n writer.flush().expect(\"flush byte stream writer\");\n\n\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n // Note: prefetching issues a read when reader was initialized and that read returned\n\n // with result of 4 when the first write is flushed.\n\n let bytes1 = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(bytes1, 4);\n\n assert_eq!(buf, payload1);\n\n\n\n let bytes2 = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(bytes2, 4);\n\n assert_eq!(buf, payload2);\n\n\n\n info!(\"test byte stream write and read passed\");\n\n}\n\n\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 0, "score": 321075.6392117342 }, { "content": "fn test_write_and_read_with_workload(writer: &mut ByteStreamWriter, reader: &mut ByteStreamReader) {\n\n info!(\"test write and read with workload\");\n\n for _i in 0..10 {\n\n for _j in 0..1000 {\n\n let buf = vec![1; 1024];\n\n let size = writer.write(&buf).expect(\"write to byte stream\");\n\n assert_eq!(size, 1024)\n\n }\n\n writer.flush().expect(\"flush data\");\n\n }\n\n\n\n let mut read = 0;\n\n loop {\n\n let mut buf = vec![0; 1024];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n read += size;\n\n if read == 1024 * 1000 * 10 {\n\n break;\n\n }\n\n }\n\n assert_eq!(reader.available(), 0);\n\n\n\n info!(\"test write and read with workload passed\");\n\n}\n\n\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 1, "score": 321075.6392117343 }, { "content": "pub fn test_event_stream_writer(config: PravegaStandaloneServiceConfig) {\n\n // spin up Pravega standalone\n\n let scope_name = Scope::from(\"testScopeWriter\".to_owned());\n\n let stream_name = Stream::from(\"testStreamWriter\".to_owned());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.get_runtime();\n\n handle.block_on(utils::create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n\n\n let scoped_stream = ScopedStream {\n", "file_path": "integration_test/src/event_stream_writer_tests.rs", "rank": 3, "score": 268083.2239034091 }, { "content": "pub fn test_event_stream_reader(config: PravegaStandaloneServiceConfig) {\n\n info!(\"test event stream reader\");\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n\n\n let runtime = client_factory.get_runtime();\n\n test_read_large_events(&client_factory, &runtime);\n\n test_multi_reader_multi_segments_tail_read(&client_factory, &runtime);\n\n runtime.block_on(test_read_api(&client_factory));\n\n runtime.block_on(test_stream_scaling(&client_factory));\n\n runtime.block_on(test_release_segment(&client_factory));\n\n runtime.block_on(test_release_segment_at(&client_factory));\n\n test_multiple_readers(&client_factory);\n\n test_reader_offline(&client_factory);\n\n test_segment_rebalance(&client_factory);\n\n}\n\n\n", "file_path": "integration_test/src/event_stream_reader_tests.rs", "rank": 4, "score": 267579.61632342846 }, { "content": "fn test_truncation(writer: &mut ByteStreamWriter, reader: &mut ByteStreamReader, rt: &mut Runtime) {\n\n info!(\"test byte stream truncate\");\n\n // truncate\n\n rt.block_on(writer.truncate_data_before(4)).expect(\"truncate\");\n\n\n\n // read before truncation point\n\n reader.seek(SeekFrom::Start(0)).expect(\"seek to start\");\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n let result = reader.read(&mut buf);\n\n assert!(result.is_err());\n\n\n\n // get current head\n\n let head = reader.current_head().expect(\"get current head\");\n\n assert_eq!(head, 4);\n\n\n\n // read from current head\n\n reader.seek(SeekFrom::Start(head)).expect(\"seek to start\");\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(size, 4);\n\n assert_eq!(buf, vec![2; 4]);\n\n\n\n info!(\"test byte stream truncate passed\");\n\n}\n\n\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 5, "score": 264384.69538856077 }, { "content": "fn test_seal(writer: &mut ByteStreamWriter, reader: &mut ByteStreamReader, rt: &mut Runtime) {\n\n info!(\"test byte stream seal\");\n\n // seal\n\n rt.block_on(writer.seal()).expect(\"seal\");\n\n\n\n // read sealed segment\n\n reader.seek(SeekFrom::Start(4)).expect(\"seek to start\");\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(size, 4);\n\n assert_eq!(buf, vec![2; 4]);\n\n\n\n // read beyond sealed segment\n\n let mut buf: Vec<u8> = vec![0; 8];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(size, 0);\n\n assert_eq!(buf, vec![0; 8]);\n\n\n\n info!(\"test byte stream seal passed\");\n\n}\n\n\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 6, "score": 264384.69538856077 }, { "content": "fn run_byte_stream_read(reader: &mut ByteStreamReader) {\n\n for _i in 0..EVENT_NUM {\n\n let mut read = 0;\n\n let mut buf = vec![0; EVENT_SIZE];\n\n while read != EVENT_SIZE {\n\n let size = reader.read(&mut buf[read..]).expect(\"byte stream read\");\n\n read += size;\n\n }\n\n }\n\n}\n\n\n\ncriterion_group! {\n\n name = event_writer_performance;\n\n config = Criterion::default().sample_size(10);\n\n targets = event_stream_writer_mock_server,event_stream_writer_mock_server_no_block,event_stream_writer_mock_connection,event_stream_writer_mock_connection_no_block\n\n}\n\ncriterion_group! {\n\n name = event_reader_performance;\n\n config = Criterion::default().sample_size(10);\n\n targets = event_stream_read_mock_server\n", "file_path": "benches/benchmark.rs", "rank": 7, "score": 258349.5634321242 }, { "content": "fn set_up_byte_stream_reader(config: ClientConfig, rt: &mut Runtime) -> ByteStreamReader {\n\n let scope_name: Scope = Scope::from(\"testByteReaderPerf\".to_string());\n\n let stream_name = Stream::from(\"testByteReaderPerf\".to_string());\n\n let client_factory = ClientFactory::new(config.clone());\n\n let controller_client = client_factory.get_controller_client();\n\n rt.block_on(create_scope_stream(\n\n controller_client,\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n let scoped_segment = ScopedSegment::from(\"testByteReaderPerf/testByteReaderPerf/0\");\n\n client_factory.create_byte_stream_reader(scoped_segment)\n\n}\n\n\n\nasync fn create_scope_stream(\n\n controller_client: &dyn ControllerClient,\n\n scope_name: &Scope,\n\n stream_name: &Stream,\n\n segment_number: i32,\n", "file_path": "benches/benchmark.rs", "rank": 8, "score": 239225.52858477487 }, { "content": "fn create_segment(segment_number: i64) -> ScopedSegment {\n\n ScopedSegment {\n\n scope: Scope::new(\"Scope\".to_string()),\n\n stream: Stream::new(\"Stream\".to_string()),\n\n segment: Segment::new(segment_number),\n\n }\n\n}\n", "file_path": "shared/src/test.rs", "rank": 9, "score": 235673.82902224298 }, { "content": "fn test_multiple_writers_conditional_append(factory: &ClientFactory, segment: ScopedSegment) {\n\n info!(\"test byte stream multiple writers concurrent append\");\n\n let mut writer1 = factory.create_byte_stream_writer(segment.clone());\n\n let payload = vec![1; 1024];\n\n let _num = writer1.write(&payload).expect(\"writer1 write payload\");\n\n assert_eq!(writer1.current_write_offset(), 1024);\n\n writer1.flush().expect(\"writer1 flush\");\n\n writer1.seek_to_tail();\n\n assert_eq!(writer1.current_write_offset(), 1024);\n\n\n\n let mut writer2 = factory.create_byte_stream_writer(segment);\n\n writer2.seek_to_tail();\n\n let _num = writer2.write(&payload).expect(\"writer2 write payload\");\n\n assert_eq!(writer2.current_write_offset(), 2048);\n\n writer2.flush().expect(\"writer2 flush\");\n\n\n\n let writer_res = writer1.write(&payload);\n\n let flush_res = writer1.flush();\n\n assert!(writer_res.is_err() || flush_res.is_err());\n\n\n\n writer1.seek_to_tail();\n\n let _num = writer1.write(&payload).expect(\"writer1 write payload\");\n\n assert_eq!(writer1.current_write_offset(), 3072);\n\n writer1.flush().expect(\"writer1 flush\");\n\n writer1.seek_to_tail();\n\n assert_eq!(writer1.current_write_offset(), 3072);\n\n info!(\"test byte stream multiple writers concurrent append passed\");\n\n}\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 10, "score": 235160.99147583696 }, { "content": "pub fn test_byte_stream(config: PravegaStandaloneServiceConfig) {\n\n // spin up Pravega standalone\n\n let mut rt = Runtime::new().unwrap();\n\n let scope_name = Scope::from(\"testScopeByteStream\".to_owned());\n\n let stream_name = Stream::from(\"testStreamByteStream\".to_owned());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.get_runtime();\n\n handle.block_on(utils::create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 11, "score": 233309.85532373446 }, { "content": "fn test_seek(reader: &mut ByteStreamReader) {\n\n info!(\"test byte stream seek\");\n\n // seek to start\n\n reader.seek(SeekFrom::Start(0)).expect(\"seek to start\");\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(size, 4);\n\n assert_eq!(buf, vec![1; 4]);\n\n\n\n // seek to current\n\n reader.seek(SeekFrom::Current(-4)).expect(\"seek to current\");\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(size, 4);\n\n assert_eq!(buf, vec![1; 4]);\n\n\n\n // seek to end\n\n reader.seek(SeekFrom::End(-4)).expect(\"seek to current\");\n\n let mut buf: Vec<u8> = vec![0; 4];\n\n let size = reader.read(&mut buf).expect(\"read from byte stream\");\n\n assert_eq!(size, 4);\n\n assert_eq!(buf, vec![2; 4]);\n\n info!(\"test byte stream seek passed\");\n\n}\n\n\n", "file_path": "integration_test/src/byte_stream_tests.rs", "rank": 12, "score": 216699.6410085629 }, { "content": "fn test_multi_reader_multi_segments_tail_read(client_factory: &ClientFactory, rt: &Runtime) {\n\n let scope_name = Scope::from(\"testMultiReaderMultiSegmentsTailRead\".to_owned());\n\n let stream_name = Stream::from(\"testMultiReaderMultiSegmentsTailRead\".to_owned());\n\n\n\n const NUM_EVENTS: usize = 2000;\n\n const EVENT_SIZE: usize = 1024;\n\n\n\n let new_stream = rt.block_on(create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 2,\n\n ));\n\n // write events only if the stream is created. This is useful if we are running the reader tests\n\n // multiple times.\n\n let factory = client_factory.clone();\n\n let scope_name_clone = scope_name.clone();\n\n let stream_name_clone = stream_name.clone();\n\n if new_stream {\n\n rt.spawn(async {\n", "file_path": "integration_test/src/event_stream_reader_tests.rs", "rank": 13, "score": 200720.88094043487 }, { "content": "pub fn test_tablesynchronizer(config: PravegaStandaloneServiceConfig) {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.get_runtime();\n\n handle.block_on(test_insert(&client_factory));\n\n handle.block_on(test_remove(&client_factory));\n\n handle.block_on(test_insert_with_two_table_synchronizers(&client_factory));\n\n handle.block_on(test_remove_with_two_table_synchronizers(&client_factory));\n\n handle.block_on(test_insert_and_get_with_customize_struct(&client_factory));\n\n handle.block_on(test_fetching_updates_delta(&client_factory));\n\n}\n\n\n\nasync fn test_insert(client_factory: &ClientFactory) {\n\n info!(\"test insert\");\n\n let scope = Scope {\n", "file_path": "integration_test/src/tablesynchronizer_tests.rs", "rank": 14, "score": 196223.50051253423 }, { "content": "pub fn test_tablemap(config: PravegaStandaloneServiceConfig) {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.get_runtime();\n\n handle.block_on(test_single_key_operations(&client_factory));\n\n handle.block_on(test_multiple_key_operations(&client_factory));\n\n handle.block_on(test_multiple_key_remove_operations(&client_factory));\n\n handle.block_on(test_iterators(&client_factory));\n\n}\n\n\n\nasync fn test_single_key_operations(client_factory: &ClientFactory) {\n\n let scope = Scope {\n\n name: \"tablemapScope\".to_string(),\n\n };\n", "file_path": "integration_test/src/tablemap_tests.rs", "rank": 15, "score": 196223.50051253423 }, { "content": "fn remove_suffix(value: &mut String, suffix: &str) {\n\n if value.ends_with(suffix) {\n\n let n = value.len();\n\n value.truncate(n - suffix.len());\n\n }\n\n}\n\n\n", "file_path": "integration_test/build.rs", "rank": 16, "score": 195888.88033294547 }, { "content": "pub fn test_controller_apis(config: PravegaStandaloneServiceConfig) {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n\n\n let controller = client_factory.get_controller_client();\n\n let scope_name = Scope::from(\"testScope123\".to_owned());\n\n let stream_name = Stream::from(\"testStream\".to_owned());\n\n let handle = client_factory.get_runtime();\n\n\n\n let scope_result = handle.block_on(controller.create_scope(&scope_name));\n\n info!(\"Response for create_scope is {:?}\", scope_result);\n\n\n\n let stream_cfg = StreamConfiguration {\n\n scoped_stream: ScopedStream {\n\n scope: scope_name,\n", "file_path": "integration_test/src/controller_tests.rs", "rank": 17, "score": 193564.80223405146 }, { "content": "#[async_trait]\n\npub trait AsyncSegmentReader: Send + Sync {\n\n async fn read(&self, offset: i64, length: i32) -> StdResult<SegmentReadCommand, ReaderError>;\n\n}\n\n\n\n#[derive(new)]\n\npub struct AsyncSegmentReaderImpl {\n\n segment: ScopedSegment,\n\n endpoint: Mutex<PravegaNodeUri>,\n\n factory: ClientFactory,\n\n delegation_token_provider: DelegationTokenProvider,\n\n}\n\n\n\n#[async_trait]\n\nimpl AsyncSegmentReader for AsyncSegmentReaderImpl {\n\n async fn read(&self, offset: i64, length: i32) -> StdResult<SegmentReadCommand, ReaderError> {\n\n retry_async(self.factory.get_config().retry_policy, || async {\n\n let raw_client = self\n\n .factory\n\n .create_raw_client_for_endpoint(self.endpoint.lock().await.clone());\n\n match self.read_inner(offset, length, &raw_client).await {\n", "file_path": "src/segment_reader.rs", "rank": 18, "score": 190644.32968511514 }, { "content": "fn test_segment_rebalance(client_factory: &ClientFactory) {\n\n let h = client_factory.get_runtime();\n\n let scope_name = Scope::from(\"testScope\".to_owned());\n\n let stream_name = Stream::from(\"testsegrebalance\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 50;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream = create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 4,\n\n )\n\n .await;\n\n // write events only if the stream is created.\n", "file_path": "integration_test/src/event_stream_reader_tests.rs", "rank": 19, "score": 187323.97500919606 }, { "content": "// This benchmark test uses a mock server that replies ok to any requests instantly. It involves\n\n// kernel latency.\n\nfn event_stream_read_mock_server(c: &mut Criterion) {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let mut reader = rt.block_on(set_up_event_stream_reader(config));\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start reader with mock server performance testing\");\n\n let mut last_offset: i64 = -1;\n\n c.bench_function(\"read 100KB mock server\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run_reader(&mut reader, &mut last_offset));\n\n });\n\n });\n\n println!(\"reader performance testing finished\");\n\n}\n", "file_path": "benches/benchmark.rs", "rank": 20, "score": 186071.56334707286 }, { "content": "// This benchmark test uses a mock connection that replies ok to any requests instantly. It does not\n\n// involve kernel latency.\n\nfn event_stream_writer_mock_connection(c: &mut Criterion) {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .mock(true)\n\n .connection_type(ConnectionType::Mock(MockType::Happy))\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock connection performance testing\");\n\n c.bench_function(\"mock connection\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock server connection testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 21, "score": 185757.9285931787 }, { "content": "// This benchmark test uses a mock server that replies ok to any requests instantly. It involves\n\n// kernel latency.\n\nfn event_stream_writer_mock_server(c: &mut Criterion) {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock server performance testing\");\n\n c.bench_function(\"mock server\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock server performance testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 22, "score": 185757.9285931787 }, { "content": "fn byte_stream_reader_mock_server(c: &mut Criterion) {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let mut reader = set_up_byte_stream_reader(config, &mut rt);\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start byte stream reader mock server performance testing\");\n\n c.bench_function(\"byte_stream_reader_mock_server\", |b| {\n\n b.iter(|| {\n\n run_byte_stream_read(&mut reader);\n\n });\n\n });\n\n info!(\"byte stream reader mock server testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 23, "score": 185306.54488377605 }, { "content": "// This benchmark test uses a mock connection that replies ok to any requests instantly. It does not\n\n// involve kernel latency. It does not wait for reply.\n\nfn event_stream_writer_mock_connection_no_block(c: &mut Criterion) {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .mock(true)\n\n .connection_type(ConnectionType::Mock(MockType::Happy))\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock connection(no block) performance testing\");\n\n c.bench_function(\"mock connection(no block)\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run_no_block(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock connection(no block) testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 24, "score": 181230.47900813562 }, { "content": "// This benchmark test uses a mock server that replies ok to any requests instantly. It involves\n\n// kernel latency. It does not wait for reply.\n\nfn event_stream_writer_mock_server_no_block(c: &mut Criterion) {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock server(no block) performance testing\");\n\n c.bench_function(\"mock server(no block)\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run_no_block(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock server(no block) performance testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 25, "score": 181230.47900813562 }, { "content": "pub trait Request {\n\n fn get_request_id(&self) -> i64;\n\n fn must_log(&self) -> bool {\n\n true\n\n }\n\n}\n\n\n\n/**\n\n * trait for Reply\n\n */\n", "file_path": "wire_protocol/src/commands.rs", "rank": 26, "score": 175805.67538874596 }, { "content": "#[test]\n\nfn test_stream_segment_info() {\n\n let segment_name = String::from(\"segment-1\");\n\n let stream_segment_info = WireCommands::Replies(Replies::StreamSegmentInfo(StreamSegmentInfoCommand {\n\n request_id: 0,\n\n segment_name,\n\n exists: false,\n\n is_sealed: false,\n\n is_deleted: false,\n\n last_modified: 0,\n\n write_offset: 0,\n\n start_offset: 0,\n\n }));\n\n test_command(stream_segment_info);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 27, "score": 174023.61686359032 }, { "content": "fn test_read_large_events(client_factory: &ClientFactory, rt: &Runtime) {\n\n let scope_name = Scope::from(\"testReaderScaling\".to_owned());\n\n let stream_name = Stream::from(\"testReadLargeEvents\".to_owned());\n\n\n\n const NUM_EVENTS: usize = 1000;\n\n const EVENT_SIZE: usize = 1000;\n\n\n\n let new_stream = rt.block_on(create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n // write events only if the stream is created. This is useful if we are running the reader tests\n\n // multiple times.\n\n if new_stream {\n\n rt.block_on(write_events(\n\n scope_name.clone(),\n\n stream_name.clone(),\n\n client_factory.clone(),\n", "file_path": "integration_test/src/event_stream_reader_tests.rs", "rank": 28, "score": 172506.2526278962 }, { "content": "#[test]\n\nfn test_get_stream_segment_info() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let get_stream_segment_info =\n\n WireCommands::Requests(Requests::GetStreamSegmentInfo(GetStreamSegmentInfoCommand {\n\n request_id: 1,\n\n segment_name,\n\n delegation_token: token,\n\n }));\n\n test_command(get_stream_segment_info);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 29, "score": 169198.54635948243 }, { "content": "fn test_multiple_readers(client_factory: &ClientFactory) {\n\n let h = client_factory.get_runtime();\n\n let scope_name = Scope::from(\"testScope\".to_owned());\n\n let stream_name = Stream::from(\"testMultiReader\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 50;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream = create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 4,\n\n )\n\n .await;\n\n // write events only if the stream is created.\n", "file_path": "integration_test/src/event_stream_reader_tests.rs", "rank": 30, "score": 168877.70423182065 }, { "content": "fn test_reader_offline(client_factory: &ClientFactory) {\n\n let h = client_factory.get_runtime();\n\n let scope_name = Scope::from(\"testScope\".to_owned());\n\n let stream_name = Stream::from(\"testReaderOffline\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 10;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream = create_scope_stream(\n\n client_factory.get_controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 4,\n\n )\n\n .await;\n\n // write events only if the stream is created.\n", "file_path": "integration_test/src/event_stream_reader_tests.rs", "rank": 31, "score": 168877.70423182065 }, { "content": "pub fn retry_internal<O, T, E>(\n\n retry_schedule: impl BackoffSchedule,\n\n mut operation: O,\n\n) -> Result<T, RetryError<E>>\n\nwhere\n\n O: FnMut(u64) -> RetryResult<T, E>,\n\n{\n\n let mut iterator = retry_schedule;\n\n let mut current_try = 1;\n\n let mut total_delay = Duration::default();\n\n // Must use return(for early return).\n\n loop {\n\n match operation(current_try) {\n\n RetryResult::Success(value) => return Ok(value),\n\n RetryResult::Retry(error) => {\n\n if let Some(delay) = iterator.next() {\n\n sleep(delay);\n\n current_try += 1;\n\n total_delay += delay;\n\n } else {\n", "file_path": "retry/src/retry_sync.rs", "rank": 32, "score": 164598.84765824385 }, { "content": "pub fn retry_sync<O, T, E>(retry_schedule: impl BackoffSchedule, mut operation: O) -> Result<T, RetryError<E>>\n\nwhere\n\n O: FnMut() -> RetryResult<T, E>,\n\n{\n\n retry_internal(retry_schedule, |_| operation())\n\n}\n\n\n", "file_path": "retry/src/retry_sync.rs", "rank": 33, "score": 164239.74530386436 }, { "content": "pub trait Command {\n\n const TYPE_CODE: i32;\n\n fn write_fields(&self) -> Result<Vec<u8>, CommandError>;\n\n fn read_from(input: &[u8]) -> Result<Self, CommandError>\n\n where\n\n Self: Sized;\n\n}\n\n\n\n/**\n\n * trait for Request\n\n */\n", "file_path": "wire_protocol/src/commands.rs", "rank": 34, "score": 147018.34657115332 }, { "content": "#[test]\n\nfn test_scoped_segment_to_string() {\n\n let seg = ScopedSegment::new(\n\n Scope::new(\"scope\".into()),\n\n Stream::new(\"stream\".into()),\n\n Segment::new(0),\n\n );\n\n assert_eq!(\"scope/stream/0.#epoch.0\", seg.to_string());\n\n}\n\n\n", "file_path": "shared/src/test.rs", "rank": 35, "score": 140994.3692654465 }, { "content": "#[test]\n\nfn test_create_segment() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let create_segment_command = WireCommands::Requests(Requests::CreateSegment(CreateSegmentCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n target_rate: 1,\n\n scale_type: 0,\n\n delegation_token: token,\n\n }));\n\n test_command(create_segment_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 36, "score": 140984.072862611 }, { "content": "#[test]\n\nfn test_segment_created() {\n\n let segment_name = String::from(\"segment-1\");\n\n let segment_created_cmd = WireCommands::Replies(Replies::SegmentCreated(SegmentCreatedCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n }));\n\n test_command(segment_created_cmd);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 37, "score": 140984.072862611 }, { "content": "#[test]\n\nfn test_segment_read() {\n\n let segment_name = String::from(\"segment-1\");\n\n let data = String::from(\"event-1\").into_bytes();\n\n let segment_read_command = WireCommands::Replies(Replies::SegmentRead(SegmentReadCommand {\n\n segment: segment_name,\n\n offset: 0,\n\n at_tail: true,\n\n end_of_segment: true,\n\n data,\n\n request_id: 1,\n\n }));\n\n test_command(segment_read_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 38, "score": 140929.47310116809 }, { "content": "#[test]\n\nfn test_read_segment() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let read_segment_command = WireCommands::Requests(Requests::ReadSegment(ReadSegmentCommand {\n\n segment: segment_name,\n\n offset: 0,\n\n suggested_length: 10,\n\n delegation_token: token,\n\n request_id: 1,\n\n }));\n\n test_command(read_segment_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 39, "score": 140929.47310116806 }, { "content": "#[test]\n\nfn test_create_scope_error() {\n\n let mut rt = Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .retry_policy(RetryWithBackoff::default().max_delay(Duration::from_micros(1)))\n\n .build()\n\n .expect(\"build client config\");\n\n\n\n let client = ControllerClientImpl::new(config, rt.handle().clone());\n\n\n\n let request = Scope::from(\"testScope124\".to_owned());\n\n let create_scope_result = rt.block_on(client.create_scope(&request));\n\n assert!(create_scope_result.is_err());\n\n match create_scope_result {\n\n Ok(_) => assert!(false, \"Failure excepted\"),\n\n Err(RetryError {\n\n error,\n\n total_delay: _,\n\n tries: _,\n\n }) => {\n\n assert!(error.can_retry());\n\n }\n\n };\n\n}\n\n\n", "file_path": "controller-client/src/test.rs", "rank": 40, "score": 137723.35645445733 }, { "content": "#[test]\n\nfn test_create_table_segment() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let create_table_segment_command =\n\n WireCommands::Requests(Requests::CreateTableSegment(CreateTableSegmentCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n delegation_token: token,\n\n }));\n\n test_command(create_table_segment_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 41, "score": 137105.9332250852 }, { "content": "#[test]\n\nfn test_create_stream_error() {\n\n let mut rt = Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .retry_policy(RetryWithBackoff::default().max_delay(Duration::from_micros(1)))\n\n .build()\n\n .expect(\"build client config\");\n\n let client = ControllerClientImpl::new(config, rt.handle().clone());\n\n\n\n let request = StreamConfiguration {\n\n scoped_stream: ScopedStream {\n\n scope: Scope::from(\"testScope123\".to_owned()),\n\n stream: Stream::from(\"testStream\".to_owned()),\n\n },\n\n scaling: Scaling {\n\n scale_type: ScaleType::FixedNumSegments,\n\n target_rate: 0,\n\n scale_factor: 0,\n\n min_num_segments: 1,\n\n },\n", "file_path": "controller-client/src/test.rs", "rank": 42, "score": 137035.43991206482 }, { "content": "fn get_segments_for_stream(\n\n stream: &ScopedStream,\n\n created_streams: &RwLockReadGuard<HashMap<ScopedStream, StreamConfiguration>>,\n\n) -> Result<Vec<ScopedSegment>, RetryError<ControllerError>> {\n\n let stream_config = created_streams.get(stream);\n\n if stream_config.is_none() {\n\n return Err(RetryError {\n\n error: ControllerError::OperationError {\n\n can_retry: false, // do not retry.\n\n operation: \"get segments for stream\".into(),\n\n error_msg: \"stream does not exist.\".into(),\n\n },\n\n total_delay: Duration::from_millis(1),\n\n tries: 0,\n\n });\n\n }\n\n\n\n let scaling_policy = stream_config.unwrap().scaling.clone();\n\n\n\n if scaling_policy.scale_type != ScaleType::FixedNumSegments {\n", "file_path": "controller-client/src/mock_controller.rs", "rank": 43, "score": 136427.9675188579 }, { "content": "pub fn init() {\n\n let subscriber = FmtSubscriber::builder()\n\n .with_ansi(true)\n\n .with_max_level(Level::DEBUG)\n\n .finish();\n\n\n\n let my_dispatch = Dispatch::new(subscriber);\n\n // this function can only be called once.\n\n dispatcher::set_global_default(my_dispatch).expect(\"set global dispatch\");\n\n}\n\n\n", "file_path": "src/trace.rs", "rank": 44, "score": 136237.21863072106 }, { "content": "type EventHandle = oneshot::Receiver<Result<(), SegmentWriterError>>;\n\n\n\nimpl Transaction {\n\n // maximum 16 MB total size of events could be held in memory\n\n const CHANNEL_CAPACITY: usize = 16 * 1024 * 1024;\n\n\n\n // Transaction should be created by transactional event stream writer, so this new method\n\n // is not public.\n\n async fn new(\n\n info: TransactionInfo,\n\n stream_segments: StreamSegments,\n\n handle: PingerHandle,\n\n factory: ClientFactory,\n\n closed: bool,\n\n ) -> Self {\n\n let (tx, rx) = create_channel(Self::CHANNEL_CAPACITY);\n\n if closed {\n\n return Transaction {\n\n info,\n\n sender: tx,\n", "file_path": "src/transaction/mod.rs", "rank": 45, "score": 135519.81654366196 }, { "content": "type EventHandle = oneshot::Receiver<Result<(), SegmentWriterError>>;\n\n\n\n/// Allows for writing raw bytes directly to a segment.\n\n///\n\n/// ByteStreamWriter does not frame, attach headers, or otherwise modify the bytes written to it in any\n\n/// way. So unlike [`EventStreamWriter`] the data written cannot be split apart when read.\n\n/// As such, any bytes written by this API can ONLY be read using [`ByteStreamReader`].\n\n///\n\n/// Similarly, multiple ByteStreamWriters write to the same segment as this will result in interleaved data,\n\n/// which is not desirable in most cases. ByteStreamWriter uses Conditional Append to make sure that writers\n\n/// are aware of the content in the segment. If another process writes data to the segment after this one began writing,\n\n/// all subsequent writes from this writer will not be written and [`flush`] will fail. This prevents data from being accidentally interleaved.\n\n///\n\n/// [`EventStreamWriter`]: crate::event_stream_writer::EventStreamWriter\n\n/// [`ByteStreamReader`]: ByteStreamReader\n\n/// [`flush`]: ByteStreamWriter::flush\n\n///\n\n/// # Note\n\n///\n\n/// The ByteStreamWriter implementation provides [`retry`] logic to handle connection failures and service host\n", "file_path": "src/byte_stream.rs", "rank": 46, "score": 135082.8216789721 }, { "content": "/// Deserialize the Value into the type T by using cbor deserializer.\n\n/// This method would be used by the user after calling get() of table_synchronizer.\n\npub fn deserialize_from<T>(reader: &[u8]) -> Result<T, serde_cbor::error::Error>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n serde_cbor::de::from_slice(reader)\n\n}\n\n\n\nasync fn conditionally_write(\n\n mut updates_generator: impl FnMut(&mut Table) -> Result<Option<String>, SynchronizerError>,\n\n table_synchronizer: &mut TableSynchronizer,\n\n mut retry: i32,\n\n) -> Result<Option<String>, SynchronizerError> {\n\n let mut update_result = None;\n\n\n\n while retry > 0 {\n\n let map = table_synchronizer.get_outer_map();\n\n let map_version = table_synchronizer.get_inner_map_version();\n\n\n\n let mut to_update = Table {\n\n map,\n", "file_path": "src/table_synchronizer.rs", "rank": 47, "score": 135046.72653863396 }, { "content": "fn wait_for_standalone_with_timeout(expected_status: bool, timeout_second: i32) {\n\n for _i in 0..timeout_second {\n\n if expected_status == check_standalone_status() {\n\n return;\n\n }\n\n thread::sleep(time::Duration::from_secs(1));\n\n }\n\n panic!(\n\n \"timeout {} exceeded, Pravega standalone is in status {} while expected {}\",\n\n timeout_second, !expected_status, expected_status\n\n );\n\n}\n\n\n", "file_path": "integration_test/src/lib.rs", "rank": 48, "score": 134915.51974132462 }, { "content": "fn test_command(command: WireCommands) -> WireCommands {\n\n let encoded: Vec<u8> = command.write_fields().unwrap();\n\n let decoded = WireCommands::read_from(&encoded).unwrap();\n\n assert_eq!(command, decoded);\n\n decoded\n\n}\n", "file_path": "wire_protocol/src/tests.rs", "rank": 49, "score": 132025.1620617562 }, { "content": "/// scope: scope_name.clone(),\n\n/// stream: stream_name.clone(),\n\n/// };\n\n/// // omit the step to create scope and stream in Pravega\n\n///\n\n/// let config = ClientConfigBuilder::default()\n\n/// .controller_uri(PravegaNodeUri::from(\"127.0.0.2:9091\".to_string()))\n\n/// .build()\n\n/// .expect(\"creating config\");\n\n/// let client_factory = ClientFactory::new(config.clone());\n\n/// let mut writer = client_factory\n\n/// .create_transactional_event_stream_writer(scoped_stream.clone(), WriterId(0))\n\n/// .await;\n\n///\n\n/// // start a transaction\n\n/// let mut transaction = writer.begin().await.expect(\"begin transaction\");\n\n///\n\n/// // do something with it\n\n/// transaction.write_event(None, String::from(\"hello\").into_bytes()).await.unwrap();\n\n///\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 50, "score": 131285.07244767743 }, { "content": "use tracing::{info, info_span};\n\nuse tracing_futures::Instrument;\n\n\n\n/// A writer that writes Events to an Event stream transactionally. Events that are written to the\n\n/// transaction can be committed atomically, which means that reader cannot see any writes prior to committing\n\n/// and will not see any writes if the transaction is aborted.\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use tokio;\n\n/// use pravega_client_shared::{Timestamp, ScopedStream, Scope, Stream, WriterId, PravegaNodeUri};\n\n/// use pravega_client::client_factory::ClientFactory;\n\n/// use pravega_client_config::ClientConfigBuilder;\n\n///\n\n/// #[tokio::main]\n\n/// async fn main() {\n\n/// let scope_name = Scope::from(\"txnScope\".to_owned());\n\n/// let stream_name = Stream::from(\"txnStream\".to_owned());\n\n/// let scoped_stream = ScopedStream {\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 51, "score": 131281.31479859533 }, { "content": "/// // commit the transaction\n\n/// transaction.commit(Timestamp(0u64)).await;\n\n/// }\n\n/// ```\n\npub struct TransactionalEventStreamWriter {\n\n stream: ScopedStream,\n\n writer_id: WriterId,\n\n factory: ClientFactory,\n\n pinger_handle: PingerHandle,\n\n delegation_token_provider: Arc<DelegationTokenProvider>,\n\n}\n\n\n\nimpl TransactionalEventStreamWriter {\n\n // use ClientFactory to initialize a TransactionalEventStreamWriter.\n\n pub(crate) async fn new(stream: ScopedStream, writer_id: WriterId, factory: ClientFactory) -> Self {\n\n let (mut pinger, pinger_handle) = Pinger::new(\n\n stream.clone(),\n\n factory.get_config().transaction_timeout_time,\n\n factory.clone(),\n\n );\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 52, "score": 131281.15108886603 }, { "content": " // helper function\n\n pub(crate) async fn create_txn_stream_writer() -> TransactionalEventStreamWriter {\n\n let txn_segment = ScopedSegment::from(\"scope/stream/0\");\n\n let writer_id = WriterId(123);\n\n let config = ClientConfigBuilder::default()\n\n .connection_type(ConnectionType::Mock(MockType::Happy))\n\n .mock(true)\n\n .controller_uri(PravegaNodeUri::from(\"127.0.0.2:9091\"))\n\n .build()\n\n .unwrap();\n\n let factory = ClientFactory::new(config);\n\n create_stream(&factory, \"scope\", \"stream\").await;\n\n factory\n\n .create_transactional_event_stream_writer(ScopedStream::from(&txn_segment), writer_id)\n\n .await\n\n }\n\n}\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 53, "score": 131280.1965423895 }, { "content": " let delegation_token_provider =\n\n Arc::new(factory.create_delegation_token_provider(stream.clone()).await);\n\n let runtime_handle = factory.get_runtime();\n\n let span = info_span!(\"Pinger\", transactional_event_stream_writer = %writer_id);\n\n runtime_handle.enter();\n\n tokio::spawn(async move { pinger.start_ping().instrument(span).await });\n\n TransactionalEventStreamWriter {\n\n stream,\n\n writer_id,\n\n factory,\n\n pinger_handle,\n\n delegation_token_provider,\n\n }\n\n }\n\n\n\n /// This method opens a transaction by sending a request to Pravega controller.\n\n pub async fn begin(&mut self) -> Result<Transaction, TransactionalEventStreamWriterError> {\n\n let txn_segments = self\n\n .factory\n\n .get_controller_client()\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 54, "score": 131277.70742938117 }, { "content": " /// This method returns the Transaction based on the given transaction id.\n\n /// If the current transaction is not in open status, meaning it has been committed\n\n /// or aborted, this method will create a closed transaction that only contains the meta data\n\n /// of this transaction.\n\n pub async fn get_txn(&self, txn_id: TxId) -> Result<Transaction, TransactionalEventStreamWriterError> {\n\n let status = self\n\n .factory\n\n .get_controller_client()\n\n .check_transaction_status(&self.stream, txn_id)\n\n .await\n\n .map_err(|e| e.error)\n\n .context(TxnStreamControllerError {})?;\n\n if status != TransactionStatus::Open {\n\n return Ok(Transaction::new(\n\n TransactionInfo::new(txn_id, self.writer_id, self.stream.clone(), true),\n\n StreamSegments::new(BTreeMap::new()),\n\n self.pinger_handle.clone(),\n\n self.factory.clone(),\n\n true,\n\n )\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 55, "score": 131275.92639570197 }, { "content": " .create_transaction(\n\n &self.stream,\n\n Duration::from_millis(self.factory.get_config().transaction_timeout_time),\n\n )\n\n .await\n\n .map_err(|e| e.error)\n\n .context(TxnStreamControllerError {})?;\n\n info!(\"Transaction {} created\", txn_segments.tx_id);\n\n let txn_id = txn_segments.tx_id;\n\n self.pinger_handle.add(txn_id).await?;\n\n Ok(Transaction::new(\n\n TransactionInfo::new(txn_id, self.writer_id, self.stream.clone(), false),\n\n txn_segments.stream_segments,\n\n self.pinger_handle.clone(),\n\n self.factory.clone(),\n\n false,\n\n )\n\n .await)\n\n }\n\n\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 56, "score": 131274.05881697487 }, { "content": "#[cfg(test)]\n\npub(crate) mod test {\n\n use super::*;\n\n use crate::create_stream;\n\n use pravega_client_config::connection_type::{ConnectionType, MockType};\n\n use pravega_client_config::ClientConfigBuilder;\n\n use pravega_client_shared::{PravegaNodeUri, ScopedSegment};\n\n use tokio::runtime::Runtime;\n\n\n\n #[test]\n\n fn test_txn_stream_writer() {\n\n let rt = Runtime::new().unwrap();\n\n let mut txn_stream_writer = rt.block_on(create_txn_stream_writer());\n\n let transaction = rt.block_on(txn_stream_writer.begin()).expect(\"open transaction\");\n\n let fetched_transaction = rt\n\n .block_on(txn_stream_writer.get_txn(transaction.get_txn_id()))\n\n .expect(\"get transaction\");\n\n assert_eq!(transaction.get_txn_id(), fetched_transaction.get_txn_id());\n\n }\n\n\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 57, "score": 131269.76700835058 }, { "content": " .await);\n\n }\n\n let segments = self\n\n .factory\n\n .get_controller_client()\n\n .get_epoch_segments(&self.stream, txn_id.get_epoch())\n\n .await\n\n .map_err(|e| e.error)\n\n .context(TxnStreamControllerError {})?;\n\n Ok(Transaction::new(\n\n TransactionInfo::new(txn_id, self.writer_id, self.stream.clone(), true),\n\n segments,\n\n self.pinger_handle.clone(),\n\n self.factory.clone(),\n\n false,\n\n )\n\n .await)\n\n }\n\n}\n\n\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 58, "score": 131265.4163024842 }, { "content": "//\n\n// Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n\n\nuse crate::client_factory::ClientFactory;\n\nuse crate::error::*;\n\nuse crate::transaction::pinger::{Pinger, PingerHandle};\n\nuse crate::transaction::{Transaction, TransactionInfo};\n\nuse pravega_client_auth::DelegationTokenProvider;\n\nuse pravega_client_shared::{ScopedStream, StreamSegments, TransactionStatus, TxId, WriterId};\n\nuse snafu::ResultExt;\n\nuse std::collections::BTreeMap;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n", "file_path": "src/transaction/transactional_event_stream_writer.rs", "rank": 59, "score": 131258.5871061962 }, { "content": "impl StreamTxnWriter {\n\n ///\n\n /// Create a new transaction.\n\n /// This returns a StreamTransaction which can be perform writes on the created transaction. It\n\n /// can also be used to perform commit() and abort() operations on the created transaction.\n\n ///\n\n #[text_signature = \"($self)\"]\n\n pub fn begin_txn(&mut self) -> PyResult<StreamTransaction> {\n\n let result = self.factory.get_runtime().block_on(self.writer.begin());\n\n match result {\n\n Ok(txn) => Ok(StreamTransaction::new(txn, self.factory.clone())),\n\n Err(e) => Err(exceptions::ValueError::py_err(format!(\"{:?}\", e))),\n\n }\n\n }\n\n\n\n ///\n\n /// Get a StreamTransaction for a given transaction id.\n\n ///\n\n #[text_signature = \"($self, txn_id)\"]\n\n pub fn get_txn(&mut self, txn_id: u128) -> PyResult<StreamTransaction> {\n", "file_path": "bindings/src/stream_writer_transactional.rs", "rank": 60, "score": 124031.214333313 }, { "content": " use pravega_client_shared::ScopedStream;\n\n use tracing::debug;\n\n }\n\n}\n\n\n\n///\n\n/// This represents a Transaction writer for a given Stream.\n\n/// Note: A python object of StreamTxnWriter cannot be created directly without using the StreamManager.\n\n///\n\n#[cfg(feature = \"python_binding\")]\n\n#[pyclass]\n\n#[derive(new)]\n\npub(crate) struct StreamTxnWriter {\n\n writer: TransactionalEventStreamWriter,\n\n factory: ClientFactory,\n\n stream: ScopedStream,\n\n}\n\n\n\n#[cfg(feature = \"python_binding\")]\n\n#[pymethods]\n", "file_path": "bindings/src/stream_writer_transactional.rs", "rank": 61, "score": 124016.34693661712 }, { "content": " debug!(\"Writing a single event for a given routing key\");\n\n let result = self\n\n .factory\n\n .get_runtime()\n\n .block_on(self.writer.get_txn(TxId(txn_id)));\n\n\n\n match result {\n\n Ok(txn) => Ok(StreamTransaction::new(txn, self.factory.clone())),\n\n Err(e) => Err(exceptions::ValueError::py_err(format!(\"{:?}\", e))),\n\n }\n\n }\n\n\n\n /// Returns the string representation.\n\n fn to_str(&self) -> String {\n\n format!(\"Stream: {:?} \", self.stream)\n\n }\n\n}\n\n\n\n///\n\n/// Refer https://docs.python.org/3/reference/datamodel.html#basic-customization\n", "file_path": "bindings/src/stream_writer_transactional.rs", "rank": 62, "score": 124010.36572889789 }, { "content": "//\n\n// Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n\n\ncfg_if! {\n\n if #[cfg(feature = \"python_binding\")] {\n\n use pravega_client::transaction::transactional_event_stream_writer::TransactionalEventStreamWriter;\n\n use pravega_client::client_factory::ClientFactory;\n\n use pyo3::exceptions;\n\n use pyo3::prelude::*;\n\n use pyo3::PyResult;\n\n use pyo3::PyObjectProtocol;\n\n use crate::transaction::StreamTransaction;\n\n use pravega_client_shared::TxId;\n", "file_path": "bindings/src/stream_writer_transactional.rs", "rank": 63, "score": 124003.0648893773 }, { "content": "/// This function will be called by the repr() built-in function to compute the “official” string\n\n/// representation of an Python object.\n\n///\n\n#[cfg(feature = \"python_binding\")]\n\n#[pyproto]\n\nimpl PyObjectProtocol for StreamTxnWriter {\n\n fn __repr__(&self) -> PyResult<String> {\n\n Ok(format!(\"StreamTxnWriter({})\", self.to_str()))\n\n }\n\n}\n", "file_path": "bindings/src/stream_writer_transactional.rs", "rank": 64, "score": 123992.27680007632 }, { "content": "pub fn wirecommand_test_wrapper() {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .build()\n\n .expect(\"build client config\");\n\n let cf = ClientFactory::new(config);\n\n let h = cf.get_runtime();\n\n h.block_on(wirecommand_tests(&cf));\n\n}\n\n\n\npub async fn wirecommand_tests(factory: &ClientFactory) {\n\n let timeout_second = time::Duration::from_secs(30);\n\n\n\n timeout(timeout_second, test_hello(factory)).await.unwrap();\n\n\n\n timeout(timeout_second, test_keep_alive(factory)).await.unwrap();\n\n\n\n timeout(timeout_second, test_setup_append(factory)).await.unwrap();\n\n\n\n timeout(timeout_second, test_create_segment(factory))\n", "file_path": "integration_test/src/wirecommand_tests.rs", "rank": 65, "score": 122247.05627901005 }, { "content": "pub fn disconnection_test_wrapper() {\n\n let mut rt = tokio::runtime::Runtime::new().expect(\"create runtime\");\n\n rt.block_on(test_retry_with_no_connection());\n\n rt.shutdown_timeout(Duration::from_millis(100));\n\n\n\n let config = PravegaStandaloneServiceConfig::new(false, false, false);\n\n let mut pravega = PravegaStandaloneService::start(config);\n\n test_retry_while_start_pravega();\n\n assert_eq!(check_standalone_status(), true);\n\n test_retry_with_unexpected_reply();\n\n pravega.stop().unwrap();\n\n wait_for_standalone_with_timeout(false, 10);\n\n\n\n let mut rt = tokio::runtime::Runtime::new().expect(\"create runtime\");\n\n rt.block_on(test_with_mock_server());\n\n}\n\n\n\nasync fn test_retry_with_no_connection() {\n\n let retry_policy = RetryWithBackoff::default().max_tries(4);\n\n // give a wrong endpoint\n", "file_path": "integration_test/src/disconnection_tests.rs", "rank": 66, "score": 122247.05627901005 }, { "content": "pub fn create_channel<U>(capacity: usize) -> (ChannelSender<U>, ChannelReceiver<U>) {\n\n let (tx, rx) = unbounded_channel();\n\n let semaphore = Semaphore::new(true, capacity);\n\n let semaphore_arc = Arc::new(semaphore);\n\n let sender = ChannelSender {\n\n sender: tx,\n\n semaphore: semaphore_arc.clone(),\n\n capacity,\n\n };\n\n let receiver = ChannelReceiver {\n\n receiver: rx,\n\n semaphore: semaphore_arc,\n\n capacity,\n\n };\n\n (sender, receiver)\n\n}\n\n\n\npub struct CapacityGuard {\n\n semaphore: Arc<Semaphore>,\n\n pub size: usize,\n", "file_path": "channel/src/lib.rs", "rank": 67, "score": 119608.71468311317 }, { "content": "pub fn current_span() -> span::Span {\n\n span::Span::current()\n\n}\n", "file_path": "src/trace.rs", "rank": 68, "score": 118517.05157420659 }, { "content": "fn apply_deletes_to_localmap(to_delete: &mut Table, table_synchronizer: &mut TableSynchronizer) {\n\n let mut i = 0;\n\n for delete in to_delete.get_remove_iter() {\n\n let delete_key = Key {\n\n key: delete.inner_key.clone(),\n\n key_version: TableKey::KEY_NO_VERSION,\n\n };\n\n let in_mem_inner_map = table_synchronizer\n\n .in_memory_map\n\n .entry(delete.outer_key.clone())\n\n .or_insert_with(HashMap::new);\n\n in_mem_inner_map.remove(&delete_key);\n\n i += 1;\n\n }\n\n debug!(\"Deletes {} entries in local map \", i);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n", "file_path": "src/table_synchronizer.rs", "rank": 69, "score": 116796.63221571516 }, { "content": "pub fn metric_init(scrape_port: SocketAddr) {\n\n PrometheusBuilder::new()\n\n .listen_address(scrape_port)\n\n .install()\n\n .expect(\"install scraper\");\n\n\n\n for metric in ClientMetrics::into_enum_iter() {\n\n metric.register();\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! update {\n\n ($metric:expr, $value:expr, $($tags:tt)*) => {\n\n match $metric {\n\n ClientMetrics::ClientAppendLatency => {\n\n metrics::gauge!(\"pravega.client.segment.append_latency_ms\", $value as f64, $($tags)*);\n\n }\n\n ClientMetrics::ClientAppendBlockSize => {\n\n metrics::gauge!(\"pravega.client.segment.append_block_size\", $value as f64, $($tags)*);\n\n }\n\n ClientMetrics::ClientOutstandingAppendCount => {\n\n metrics::gauge!(\"pravega.client.segment.outstanding_append_count\", $value as f64, $($tags)*);\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/metric.rs", "rank": 70, "score": 113503.99699447554 }, { "content": "fn main() {\n\n tonic_build::compile_protos(\"proto/Controller.proto\").unwrap();\n\n}\n", "file_path": "controller-client/build.rs", "rank": 71, "score": 111714.22925398269 }, { "content": "fn main() {\n\n // first check if the Pravega directory already exists.\n\n if check_exist() {\n\n info!(\"Returning early because {} was already found\", LIBRARY);\n\n return;\n\n }\n\n install_prebuilt();\n\n}\n\n\n", "file_path": "integration_test/build.rs", "rank": 72, "score": 111714.22925398269 }, { "content": "/// Downloads and unpacks a prebuilt binary. Only works for certain platforms.\n\nfn install_prebuilt() {\n\n let url = format!(\n\n \"https://github.com/Tristan1900/pravega/releases/download/{}/pravega-{}.tgz\",\n\n TAG, VERSION\n\n );\n\n let short_file_name = url.split('/').last().unwrap();\n\n let mut base_name = short_file_name.to_string();\n\n remove_suffix(&mut base_name, \".tgz\");\n\n\n\n let file_name = Path::new(BASE).join(short_file_name);\n\n\n\n // check the tarball and download the tarball.\n\n if !file_name.exists() {\n\n let mut f = File::create(&file_name).unwrap();\n\n let mut resp = reqwest::blocking::get(&url).unwrap();\n\n io::copy(&mut resp, &mut f).unwrap();\n\n }\n\n\n\n // Extract the Pravega standalone.\n\n let unpacked_dir = \".\";\n", "file_path": "integration_test/build.rs", "rank": 85, "score": 108494.23330706614 }, { "content": "pub trait Reply {\n\n fn get_request_id(&self) -> i64;\n\n fn is_failure(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\n/*\n\n * bincode serialize and deserialize config\n\n */\n\nlazy_static! {\n\n static ref CONFIG: Config = {\n\n let mut config = bincode2::config();\n\n config.big_endian();\n\n config.limit(MAX_WIRECOMMAND_SIZE.into());\n\n config.array_length(LengthOption::U32);\n\n config.string_length(LengthOption::U16);\n\n config\n\n };\n\n}\n", "file_path": "wire_protocol/src/commands.rs", "rank": 86, "score": 107213.92902144133 }, { "content": "#[test]\n\nfn test_hello() {\n\n let hello_command = WireCommands::Replies(Replies::Hello(HelloCommand {\n\n high_version: 9,\n\n low_version: 5,\n\n }));\n\n test_command(hello_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 87, "score": 105527.86150472949 }, { "content": "#[async_trait]\n\npub trait ConnectionFactory: Send + Sync {\n\n /// establish_connection will return a Connection future that used to send and read data.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// use pravega_wire_protocol::connection_factory::{ConnectionFactory, ConnectionFactoryConfig};\n\n /// use pravega_client_shared::PravegaNodeUri;\n\n /// use pravega_client_config::connection_type::ConnectionType;\n\n /// use tokio::runtime::Runtime;\n\n ///\n\n /// fn main() {\n\n /// let mut rt = Runtime::new().unwrap();\n\n /// let endpoint = PravegaNodeUri::from(\"localhost:9090\".to_string());\n\n /// let config = ConnectionFactoryConfig::new(ConnectionType::Tokio);\n\n /// let cf = ConnectionFactory::create(config);\n\n /// let connection_future = cf.establish_connection(endpoint);\n\n /// let mut connection = rt.block_on(connection_future).unwrap();\n\n /// }\n\n /// ```\n", "file_path": "wire_protocol/src/connection_factory.rs", "rank": 88, "score": 105187.96482786453 }, { "content": "#[test]\n\nfn test_no_such_segment() {\n\n let segment_name = String::from(\"segment-1\");\n\n let stack_trace = String::from(\"some exception\");\n\n let offset_pos = 100i64;\n\n let no_such_segment_command = WireCommands::Replies(Replies::NoSuchSegment(NoSuchSegmentCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n server_stack_trace: stack_trace,\n\n offset: offset_pos,\n\n }));\n\n test_command(no_such_segment_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 89, "score": 104854.74533874326 }, { "content": "fn add_segment_entry(\n\n segment_map: &mut BTreeMap<OrderedFloat<f64>, SegmentWithRange>,\n\n segment: i64,\n\n low_key: f64,\n\n high_key: f64,\n\n) {\n\n segment_map.insert(\n\n OrderedFloat(high_key),\n\n SegmentWithRange::new(\n\n create_segment(segment),\n\n OrderedFloat(low_key),\n\n OrderedFloat(high_key),\n\n ),\n\n );\n\n}\n", "file_path": "shared/src/test.rs", "rank": 90, "score": 104854.74533874326 }, { "content": "#[test]\n\nfn test_valid_segments() {\n\n let segment_1 = create_segment(1);\n\n\n\n let segment_0 = create_segment(0);\n\n\n\n let mut segment_map: BTreeMap<OrderedFloat<f64>, SegmentWithRange> = BTreeMap::new();\n\n segment_map.insert(\n\n OrderedFloat(0.5),\n\n SegmentWithRange::new(segment_0.clone(), OrderedFloat(0.0), OrderedFloat(0.5)),\n\n );\n\n segment_map.insert(\n\n OrderedFloat(1.0),\n\n SegmentWithRange::new(segment_1.clone(), OrderedFloat(0.5), OrderedFloat(1.0)),\n\n );\n\n let s = StreamSegments::new(segment_map);\n\n assert_eq!(s.key_segment_map.len(), 2);\n\n assert_eq!(segment_1.clone(), s.get_segment(0.75));\n\n assert_eq!(segment_1.clone(), s.get_segment(1.0));\n\n assert_eq!(segment_0.clone(), s.get_segment(0.5));\n\n assert_eq!(segment_0.clone(), s.get_segment(0.4));\n\n assert_eq!(segment_0.clone(), s.get_segment(0.499));\n\n}\n\n\n", "file_path": "shared/src/test.rs", "rank": 91, "score": 104854.74533874326 }, { "content": "fn add_replacement_segment(\n\n segment_map: &mut HashMap<SegmentWithRange, Vec<Segment>>,\n\n segment: i64,\n\n low_key: f64,\n\n high_key: f64,\n\n predecessor: Vec<i64>,\n\n) {\n\n let s = predecessor.iter().map(|s| Segment::new(*s)).collect();\n\n segment_map.insert(\n\n SegmentWithRange::new(\n\n create_segment(segment),\n\n OrderedFloat(low_key),\n\n OrderedFloat(high_key),\n\n ),\n\n s,\n\n );\n\n}\n\n\n", "file_path": "shared/src/test.rs", "rank": 92, "score": 104854.74533874326 }, { "content": "pub trait Encode {\n\n fn write_fields(&self) -> Result<Vec<u8>, CommandError>;\n\n}\n\n\n", "file_path": "wire_protocol/src/wire_commands.rs", "rank": 93, "score": 104439.69397670584 }, { "content": "pub trait Decode {\n\n type Item;\n\n fn read_from(raw_input: &[u8]) -> Result<Self::Item, CommandError>;\n\n}\n\n\n\nimpl Encode for Requests {\n\n fn write_fields(&self) -> Result<Vec<u8>, CommandError> {\n\n let mut res = Vec::new();\n\n match self {\n\n Requests::Padding(padding_command) => {\n\n res.extend_from_slice(&PaddingCommand::TYPE_CODE.to_be_bytes());\n\n let se = padding_command.write_fields()?;\n\n res.extend_from_slice(&(se.len() as i32).to_be_bytes());\n\n res.extend(se);\n\n }\n\n Requests::PartialEvent(partial_event_cmd) => {\n\n res.extend_from_slice(&PartialEventCommand::TYPE_CODE.to_be_bytes());\n\n let se = partial_event_cmd.write_fields()?;\n\n res.extend_from_slice(&(se.len() as i32).to_be_bytes());\n\n res.extend(se);\n", "file_path": "wire_protocol/src/wire_commands.rs", "rank": 94, "score": 104439.69397670584 }, { "content": "fn check_exist() -> bool {\n\n let path = Path::new(BASE).join(LIBRARY);\n\n if path.exists() {\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "integration_test/build.rs", "rank": 95, "score": 102815.72329587958 }, { "content": "#[test]\n\nfn test_data_appended() {\n\n let writer_id_number: u128 = 123;\n\n let data_appended_cmd = WireCommands::Replies(Replies::DataAppended(DataAppendedCommand {\n\n writer_id: writer_id_number,\n\n event_number: 1,\n\n previous_event_number: 0,\n\n request_id: 1,\n\n current_segment_write_offset: 0,\n\n }));\n\n test_command(data_appended_cmd);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 96, "score": 102742.5234503957 }, { "content": "#[test]\n\nfn test_table_read() {\n\n let mut entries = Vec::<(TableKey, TableValue)>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n let value_data = String::from(\"value-1\").into_bytes();\n\n entries.push((TableKey::new(key_data, 1), TableValue::new(value_data)));\n\n let table_entries = TableEntries { entries };\n\n let segment_name = String::from(\"segment-1\");\n\n let table_read = WireCommands::Replies(Replies::TableRead(TableReadCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n entries: table_entries,\n\n }));\n\n\n\n test_command(table_read);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 97, "score": 102670.57417997858 }, { "content": "#[test]\n\nfn test_read_table() {\n\n let segment = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let mut keys = Vec::<TableKey>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n keys.push(TableKey::new(key_data, 1));\n\n let read_table_command = WireCommands::Requests(Requests::ReadTable(ReadTableCommand {\n\n request_id: 1,\n\n segment,\n\n delegation_token: token,\n\n keys,\n\n }));\n\n\n\n test_command(read_table_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 98, "score": 102670.57417997858 }, { "content": "#[test]\n\nfn test_segment_is_sealed() {\n\n let segment_name = String::from(\"segment-1\");\n\n let stack_trace = String::from(\"some exception\");\n\n let offset_pos = 100i64;\n\n let segment_is_sealed_command = WireCommands::Replies(Replies::SegmentIsSealed(SegmentIsSealedCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n server_stack_trace: stack_trace,\n\n offset: offset_pos,\n\n }));\n\n test_command(segment_is_sealed_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 99, "score": 102096.60561127876 } ]
Rust
cs39/src/size_test.rs
gretchenfrage/CS639S20_Demos
d75a16cf66b6b95eb74fc8f101020672b62e5a90
use crate::{ cap_parse, navigate::{ DemoLookup, find_demo, }, compile::{ cpp_files, modify_compile, Compiled, }, output::{ INFO_INDENT, TableWriter, }, quant::{ subproc, demo_min_time, }, }; use std::{ path::Path, process::Command, mem::replace, collections::HashMap, fs::read_to_string, ffi::OsString, }; use regex::{self, Regex}; use byte_unit::Byte; use serde::Serialize; #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)] #[repr(usize)] pub enum Dim { X, Y } pub fn parse_dim_line(dim: Dim, line: &str) -> Option<u128> { let pat = format!( r#"^{}[[:space:]]+(?P<n>\d+)[[:space:]]*$"#, regex::escape(&format!( r##"#define {}DIM"##, match dim { Dim::X => 'X', Dim::Y => 'Y', }, )), ); let pat = Regex::new(&pat).unwrap(); pat.captures(line) .map(|caps| cap_parse::<u128>(&caps, "n").unwrap()) } pub fn format_dim_line(dim: Dim, val: u128) -> String { format!( r##"#define {}DIM {}"##, match dim { Dim::X => 'X', Dim::Y => 'Y', }, val, ) } pub fn find_dims( lookup: &DemoLookup, major: u32, minor: u32 ) -> Result<(u128, u128), ()> { let path = find_demo(lookup, major, minor)?; let mut found: [Option<u128>; 2] = [None, None]; for file in cpp_files(&path) { let code = read_to_string(path.join(file)).unwrap(); for line in code.lines() { for &dim in &[Dim::X, Dim::Y] { if let Some(val) = parse_dim_line(dim, line) { if found[dim as usize].is_some() { println!("[ERROR] dimension {:?} defined twice in code", dim); return Err(()); } else { found[dim as usize] = Some(val); } } } } } for &dim in &[Dim::X, Dim::Y] { if found[dim as usize].is_none() { println!("[ERROR] dimension {:?} not found in code", dim); return Err(()); } } Ok((found[0].unwrap(), found[1].unwrap())) } #[derive(Clone, Debug, Serialize, PartialEq)] #[serde(rename_all = "PascalCase")] pub struct SizeTestRow { x_size: u128, y_size: u128, product_size: u128, data_size_bytes: u128, data_size_string: String, best_time_ms: f64, } pub fn run<P>( repo: P, lookup: &DemoLookup, major: u32, minor: u32, mut table: TableWriter<SizeTestRow>, ) -> Result<(), ()> where P: AsRef<Path> { let (base_x, base_y) = find_dims(lookup, major, minor)?; println!("[INFO] default dimensions are {:?}", (base_x, base_y)); let dim_seq: Vec<(u128, u128)> = { let mut vec = Vec::new(); let base = base_x * base_y; let mut curr = base; let incr = 1; for _ in 0..10 { if (curr >> incr) >= (1 << 12) { curr >>= incr; } else { break; } } loop { vec.push(curr); if (curr << incr) > (base << 12) { break; } else if (curr << incr) > ((1 << 30) * 4 / 4) { break; } else { curr <<= incr; } } fn u128sqrt(n: u128) -> u128 { (n as f64).sqrt() as u128 } vec .into_iter() .map(|s| { let y = u128sqrt(base_y * s / base_x); let mut x = s / y; x += s % (x * y); (x, y) }) .collect() }; println!("[INFO] testing with dimensions:"); let mut dim_pretty = Vec::new(); for &(x, y) in &dim_seq { let data_size = x * y * 4; let data_size_str = Byte::from_bytes(data_size) .get_appropriate_unit(true) .format(0); let dim_pretty_curr = format!("{}×{} = {}", x, y, data_size_str); println!("{} • {}", INFO_INDENT, dim_pretty_curr); dim_pretty.push(dim_pretty_curr); } for (i, &(x, y)) in dim_seq.iter().enumerate() { println!("[INFO] benchmarking dimension {}", &dim_pretty[i]); let Compiled { workdir, binary } = modify_compile( &repo, lookup, major, minor, |code: &mut HashMap<OsString, String>| { for (file, content) in replace(code, HashMap::new()) { let rewritten: String = content.lines() .map(|line: &str| { let mut line = line.to_owned(); if parse_dim_line(Dim::X, &line).is_some() { line = format_dim_line(Dim::X, x); } else if parse_dim_line(Dim::Y, &line).is_some() { line = format_dim_line(Dim::Y, y); } line.push('\n'); line }) .collect(); code.insert(file, rewritten); } })?; let (status, lines) = subproc( Command::new(&binary) .current_dir(&workdir), false); let min_time = demo_min_time(&lines); println!("[INFO] best time = {:.2}ms", min_time.as_secs_f64() / 1000.0); table.write(SizeTestRow { x_size: x, y_size: y, product_size: x * y, data_size_bytes: x * y * 4, data_size_string: dim_pretty[i].clone(), best_time_ms: min_time.as_secs_f64() / 1000.0 }); println!(); if !status.success() { println!("[ERROR] exit code {}", status.code().unwrap()); return Err(()); } } println!("[INFO] done"); Ok(()) }
use crate::{ cap_parse, navigate::{ DemoLookup, find_demo, }, compile::{ cpp_files, modify_compile, Compiled, }, output::{ INFO_INDENT, TableWriter, }, quant::{ subproc, demo_min_time,
lone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)] #[repr(usize)] pub enum Dim { X, Y } pub fn parse_dim_line(dim: Dim, line: &str) -> Option<u128> { let pat = format!( r#"^{}[[:space:]]+(?P<n>\d+)[[:space:]]*$"#, regex::escape(&format!( r##"#define {}DIM"##, match dim { Dim::X => 'X', Dim::Y => 'Y', }, )), ); let pat = Regex::new(&pat).unwrap(); pat.captures(line) .map(|caps| cap_parse::<u128>(&caps, "n").unwrap()) } pub fn format_dim_line(dim: Dim, val: u128) -> String { format!( r##"#define {}DIM {}"##, match dim { Dim::X => 'X', Dim::Y => 'Y', }, val, ) } pub fn find_dims( lookup: &DemoLookup, major: u32, minor: u32 ) -> Result<(u128, u128), ()> { let path = find_demo(lookup, major, minor)?; let mut found: [Option<u128>; 2] = [None, None]; for file in cpp_files(&path) { let code = read_to_string(path.join(file)).unwrap(); for line in code.lines() { for &dim in &[Dim::X, Dim::Y] { if let Some(val) = parse_dim_line(dim, line) { if found[dim as usize].is_some() { println!("[ERROR] dimension {:?} defined twice in code", dim); return Err(()); } else { found[dim as usize] = Some(val); } } } } } for &dim in &[Dim::X, Dim::Y] { if found[dim as usize].is_none() { println!("[ERROR] dimension {:?} not found in code", dim); return Err(()); } } Ok((found[0].unwrap(), found[1].unwrap())) } #[derive(Clone, Debug, Serialize, PartialEq)] #[serde(rename_all = "PascalCase")] pub struct SizeTestRow { x_size: u128, y_size: u128, product_size: u128, data_size_bytes: u128, data_size_string: String, best_time_ms: f64, } pub fn run<P>( repo: P, lookup: &DemoLookup, major: u32, minor: u32, mut table: TableWriter<SizeTestRow>, ) -> Result<(), ()> where P: AsRef<Path> { let (base_x, base_y) = find_dims(lookup, major, minor)?; println!("[INFO] default dimensions are {:?}", (base_x, base_y)); let dim_seq: Vec<(u128, u128)> = { let mut vec = Vec::new(); let base = base_x * base_y; let mut curr = base; let incr = 1; for _ in 0..10 { if (curr >> incr) >= (1 << 12) { curr >>= incr; } else { break; } } loop { vec.push(curr); if (curr << incr) > (base << 12) { break; } else if (curr << incr) > ((1 << 30) * 4 / 4) { break; } else { curr <<= incr; } } fn u128sqrt(n: u128) -> u128 { (n as f64).sqrt() as u128 } vec .into_iter() .map(|s| { let y = u128sqrt(base_y * s / base_x); let mut x = s / y; x += s % (x * y); (x, y) }) .collect() }; println!("[INFO] testing with dimensions:"); let mut dim_pretty = Vec::new(); for &(x, y) in &dim_seq { let data_size = x * y * 4; let data_size_str = Byte::from_bytes(data_size) .get_appropriate_unit(true) .format(0); let dim_pretty_curr = format!("{}×{} = {}", x, y, data_size_str); println!("{} • {}", INFO_INDENT, dim_pretty_curr); dim_pretty.push(dim_pretty_curr); } for (i, &(x, y)) in dim_seq.iter().enumerate() { println!("[INFO] benchmarking dimension {}", &dim_pretty[i]); let Compiled { workdir, binary } = modify_compile( &repo, lookup, major, minor, |code: &mut HashMap<OsString, String>| { for (file, content) in replace(code, HashMap::new()) { let rewritten: String = content.lines() .map(|line: &str| { let mut line = line.to_owned(); if parse_dim_line(Dim::X, &line).is_some() { line = format_dim_line(Dim::X, x); } else if parse_dim_line(Dim::Y, &line).is_some() { line = format_dim_line(Dim::Y, y); } line.push('\n'); line }) .collect(); code.insert(file, rewritten); } })?; let (status, lines) = subproc( Command::new(&binary) .current_dir(&workdir), false); let min_time = demo_min_time(&lines); println!("[INFO] best time = {:.2}ms", min_time.as_secs_f64() / 1000.0); table.write(SizeTestRow { x_size: x, y_size: y, product_size: x * y, data_size_bytes: x * y * 4, data_size_string: dim_pretty[i].clone(), best_time_ms: min_time.as_secs_f64() / 1000.0 }); println!(); if !status.success() { println!("[ERROR] exit code {}", status.code().unwrap()); return Err(()); } } println!("[INFO] done"); Ok(()) }
}, }; use std::{ path::Path, process::Command, mem::replace, collections::HashMap, fs::read_to_string, ffi::OsString, }; use regex::{self, Regex}; use byte_unit::Byte; use serde::Serialize; #[derive(Copy, C
random
[ { "content": "/// Spawn a sub-process, and by the power of threads,\n\n/// elevate its stdout and stderr to the parent while\n\n/// also merging them together into a line stream,\n\n/// then collecting them.\n\npub fn subproc<B>(mut command: B, quiet: bool) -> (ExitStatus, Vec<String>) \n\nwhere\n\n B: BorrowMut<Command>,\n\n{\n\n let cmd = command.borrow_mut();\n\n \n\n cmd.stdout(Stdio::piped());\n\n cmd.stderr(Stdio::piped());\n\n \n\n let (send_0, recv) = mpsc::sync_channel::<String>(10000);\n\n let send_1 = send_0.clone();\n\n \n\n let mut child = cmd.spawn().unwrap();\n\n \n\n let stdout = child.stdout.take().unwrap();\n\n let stdout = Box::new(stdout) as Box<dyn Read + Send>;\n\n \n\n let stderr = child.stderr.take().unwrap();\n\n let stderr = Box::new(stderr) as Box<dyn Read + Send>;\n\n \n", "file_path": "cs39/src/quant.rs", "rank": 0, "score": 83081.918346277 }, { "content": "/// Find a path for a demo by number.\n\npub fn find_demo(\n\n lookup: &DemoLookup, \n\n major: u32, \n\n minor: u32\n\n) -> Result<PathBuf, ()> {\n\n let subdir = lookup.get(&major)\n\n .ok_or_else(|| {\n\n eprintln!(\"[ERROR] major version {} not found\", major);\n\n eprintln!(\" available: {:?}\", \n\n lookup.keys().copied().collect::<Vec<u32>>());\n\n })?;\n\n let path = subdir.demos.get(&minor)\n\n .ok_or_else(|| {\n\n eprintln!(\"[ERROR] minor version {} not found in {:?}\", \n\n minor, subdir.subdir_path);\n\n eprintln!(\" available: {:?}\",\n\n subdir.demos.keys().copied().collect::<Vec<u32>>());\n\n })?;\n\n Ok(path.clone())\n\n}\n\n\n\n\n", "file_path": "cs39/src/navigate.rs", "rank": 1, "score": 68051.6809454132 }, { "content": "/// Compile code, get path to binary.\n\npub fn compile(lookup: &DemoLookup, major: u32, minor: u32) -> Result<Compiled, ()> {\n\n let path = find_demo(lookup, major, minor)?;\n\n \n\n let compiler = Compiler::GccMkl;\n\n \n\n println!(\"[INFO] compiling with {:?}\", compiler);\n\n println!();\n\n \n\n let status = compiler.compile(&path);\n\n\n\n if !status.success() {\n\n eprintln!();\n\n eprintln!(\"[ERROR] compile failure {}\", status.code().unwrap());\n\n return Err(());\n\n }\n\n \n\n Ok(Compiled {\n\n workdir: path.clone(),\n\n binary: path.join(\"a.out\")\n\n })\n\n}\n\n\n", "file_path": "cs39/src/compile.rs", "rank": 2, "score": 63736.94105894667 }, { "content": "/// Read code to memory, modify, write to temp dir, compile, get path\n\n/// to binary.\n\npub fn modify_compile<P, F>(\n\n repo: P,\n\n lookup: &DemoLookup, \n\n major: u32,\n\n minor: u32, \n\n edit: F\n\n) -> Result<Compiled, ()> \n\nwhere\n\n P: AsRef<Path>,\n\n F: FnOnce(&mut HashMap<OsString, String>),\n\n{\n\n // find code\n\n let path = find_demo(lookup, major, minor)?;\n\n \n\n // read code\n\n let mut code: HashMap<OsString, String> = cpp_files(&path)\n\n .map(|file| (\n\n file.clone(), \n\n read_to_string(path.join(file)).unwrap()\n\n ))\n", "file_path": "cs39/src/compile.rs", "rank": 3, "score": 51378.74271123002 }, { "content": "/// Read the demo directory structure.\n\npub fn demo_lookup<P: AsRef<Path>>(repo: P) -> DemoLookup {\n\n let pat = r#\"^[_[[:alnum:]]]+_(?P<major>\\d+)_(?P<minor>\\d+)$\"#;\n\n let pat = Regex::new(pat).unwrap();\n\n \n\n let mut lookup = DemoLookup::new();\n\n \n\n for subdir in subdirs(&repo) {\n\n let mut demos: Vec<(PathBuf, (u32, u32))> = subdirs(&subdir)\n\n .filter_map(|p| p.file_stem()\n\n .and_then(OsStr::to_str)\n\n .and_then(|s| pat.captures(s))\n\n .and_then(|cap| {\n\n cap_parse::<u32>(&cap, \"major\")\n\n .and_then(move |major| \n\n cap_parse::<u32>(&cap, \"minor\")\n\n .map(move |minor| (major, minor)))\n\n })\n\n .map(move |num| (p, num)))\n\n .collect();\n\n demos.sort_by_key(|&(_, num)| num);\n", "file_path": "cs39/src/navigate.rs", "rank": 4, "score": 50349.26927516273 }, { "content": "\n\nuse std::{\n\n fmt::{self, Display, Formatter},\n\n path::{Path, PathBuf},\n\n fs::{\n\n File,\n\n create_dir_all,\n\n },\n\n marker::PhantomData,\n\n};\n\nuse csv::Writer as CsvWriter;\n\nuse serde::Serialize;\n\n\n\npub struct Indent<'a, I: Display>(pub &'a str, pub I);\n\n\n\nimpl<'a, I: Display> Display for Indent<'a, I> {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let string = format!(\"{}\", self.1);\n\n let mut first = true;\n\n for line in string.lines() {\n", "file_path": "cs39/src/output.rs", "rank": 5, "score": 46135.90062061337 }, { "content": " {\n\n println!(\"[INFO] writing csv file to {:?}\", path.as_ref());\n\n let csv = CsvWriter::from_path(&path).unwrap();\n\n Self::from(TableTarget::Csv(csv))\n\n }\n\n \n\n pub fn write(&mut self, row: T) {\n\n match &mut self.target {\n\n &mut TableTarget::None => (),\n\n &mut TableTarget::Csv(ref mut csv) => {\n\n csv.serialize(row).unwrap();\n\n },\n\n };\n\n }\n\n}\n\n\n\n/*\n\n\n\npub enum TableOutput<R: Serialize> {\n\n None,\n\n Csv(csv::Writer<File>),\n\n}\n\n\n\nimpl<R: Serialize> TableOutput<R> {\n\n \n\n}\n\n*/", "file_path": "cs39/src/output.rs", "rank": 6, "score": 46135.25125580984 }, { "content": " if first {\n\n first = false;\n\n } else {\n\n f.write_str(\"\\n\")?;\n\n }\n\n f.write_str(self.0)?;\n\n f.write_str(line)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub const INFO_INDENT: &'static str = \" \";\n\n\n\n/// Allocate a path for a CSV file.\n", "file_path": "cs39/src/output.rs", "rank": 7, "score": 46131.821839543656 }, { "content": "\n\nuse crate::{\n\n cap_parse,\n\n};\n\nuse std::{\n\n process::{\n\n Command,\n\n Stdio,\n\n ExitStatus,\n\n },\n\n sync::mpsc,\n\n thread,\n\n io::{Read, BufRead, BufReader},\n\n borrow::BorrowMut,\n\n time::Duration,\n\n};\n\nuse regex::Regex;\n\n\n", "file_path": "cs39/src/quant.rs", "rank": 8, "score": 46131.26186791135 }, { "content": "\n\nuse crate::cap_parse;\n\nuse std::{\n\n path::{Path, PathBuf},\n\n collections::BTreeMap,\n\n fs::{\n\n read_dir,\n\n FileType,\n\n },\n\n ffi::OsStr,\n\n};\n\nuse regex::Regex;\n\n\n\n#[derive(Debug)] \n\npub struct Subdir {\n\n pub subdir_path: PathBuf,\n\n pub demos: BTreeMap<u32, PathBuf>,\n\n}\n\n\n\npub type DemoLookup = BTreeMap::<u32, Subdir>;\n\n\n\n/// Read the demo directory structure.\n", "file_path": "cs39/src/navigate.rs", "rank": 9, "score": 46130.081759871005 }, { "content": " \n\n let mut majors: Vec<u32> = demos.iter()\n\n .map(|&(_, (n, _))| n)\n\n .collect();\n\n majors.dedup();\n\n\n\n if majors.len() == 0 { continue; }\n\n if majors.len() > 1 {\n\n eprintln!(\"[WARN] several major versions detected in {:?}\", subdir);\n\n continue;\n\n }\n\n let major = majors[0];\n\n if let Some(conflict) = lookup.get(&major) {\n\n eprintln!(\n\n \"[WARN] conflicting major version {} between {:?} and {:?}\",\n\n major, subdir, conflict.subdir_path);\n\n if !(demos.len() > conflict.demos.len()) {\n\n continue;\n\n }\n\n }\n", "file_path": "cs39/src/navigate.rs", "rank": 10, "score": 46123.59265967629 }, { "content": " for thread in threads {\n\n thread.join().unwrap();\n\n }\n\n \n\n let mut lines = Vec::new();\n\n while let Ok(line) = recv.try_recv() {\n\n lines.push(line);\n\n }\n\n \n\n (status, lines)\n\n}", "file_path": "cs39/src/quant.rs", "rank": 11, "score": 46123.59265967629 }, { "content": " \n\n lookup.insert(major, Subdir {\n\n subdir_path: subdir,\n\n demos: demos.into_iter()\n\n .map(|(path, (_, minor))| (minor, path))\n\n .collect(),\n\n });\n\n }\n\n \n\n lookup\n\n}\n\n\n", "file_path": "cs39/src/navigate.rs", "rank": 12, "score": 46123.59265967629 }, { "content": " let mut threads = Vec::new();\n\n \n\n \n\n for (read, send) in vec![\n\n (stdout, send_0),\n\n (stderr, send_1),\n\n ] {\n\n let thread = thread::spawn(move || {\n\n let read = BufReader::new(read);\n\n for line in read.lines() {\n\n let line = line.unwrap();\n\n if !quiet { println!(\"{}\", line); }\n\n let _ = send.send(line);\n\n }\n\n });\n\n threads.push(thread);\n\n }\n\n \n\n let status = child.wait().unwrap();\n\n \n", "file_path": "cs39/src/quant.rs", "rank": 13, "score": 46123.59265967629 }, { "content": "\n\nuse crate::navigate::{find_demo, DemoLookup};\n\nuse std::{\n\n path::{Path, PathBuf},\n\n ffi::{OsStr, OsString},\n\n fs::{\n\n self,\n\n read_to_string,\n\n read_dir,\n\n create_dir_all,\n\n FileType\n\n },\n\n env,\n\n collections::HashMap,\n\n process::{\n\n Command,\n\n ExitStatus,\n\n }\n\n};\n\nuse rand::prelude::*;\n\n\n\n/// Result of code compilation.\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Compiled {\n\n pub workdir: PathBuf,\n\n pub binary: PathBuf,\n\n}\n\n\n\n/// List names of C++ source code files directly in a directory.\n", "file_path": "cs39/src/compile.rs", "rank": 14, "score": 46110.47128291009 }, { "content": " \n\n let status = compiler.compile(&temp);\n\n if !status.success() {\n\n eprintln!();\n\n eprintln!(\"[ERROR] compile failure {}\", status.code().unwrap());\n\n return Err(());\n\n }\n\n \n\n // done\n\n Ok(Compiled {\n\n workdir: temp.clone(),\n\n binary: temp.join(\"a.out\")\n\n })\n\n}\n", "file_path": "cs39/src/compile.rs", "rank": 15, "score": 46103.90365818446 }, { "content": " // try to get MKL working\n\n GccMkl,\n\n}\n\n\n\n/// Try to compile code in a subdirectory.\n\nimpl Compiler {\n\n fn compile<P>(self, path: P) -> ExitStatus \n\n where\n\n P: AsRef<Path>,\n\n {\n\n match self {\n\n Compiler::ClangPp => Command::new(\"clang++\")\n\n .args(\"-std=c++11 -stdlib=libc++ -w -O3\".split_whitespace())\n\n .args(cpp_files(&path))\n\n .current_dir(&path)\n\n .status().unwrap(),\n\n Compiler::Gcc9 => Command::new(\"gcc-9\")\n\n .args(\"-x c++ -fopenmp -w -O3 \".split_whitespace())\n\n .args(cpp_files(&path))\n\n .arg(\"-lstdc++\")\n", "file_path": "cs39/src/compile.rs", "rank": 16, "score": 46103.54444569516 }, { "content": " .collect();\n\n \n\n // code modification callback\n\n edit(&mut code);\n\n \n\n // allocate temp directory\n\n let temp = repo.as_ref().join(\"tmp\").join(format!(\"rng-{}\", random::<u16>()));\n\n println!(\"[INFO] building code in {:?}\", temp);\n\n create_dir_all(&temp).unwrap();\n\n \n\n // save code\n\n for (file, content) in code {\n\n let path = temp.join(file);\n\n fs::write(path, content).unwrap();\n\n }\n\n \n\n let compiler = Compiler::Gcc9;\n\n \n\n println!(\"[INFO] compiling with {:?}\", compiler);\n\n println!();\n", "file_path": "cs39/src/compile.rs", "rank": 17, "score": 46103.43075048562 }, { "content": " .current_dir(&path)\n\n .status().unwrap(),\n\n Compiler::Gcc => Command::new(\"gcc\")\n\n .args(\"-x c++ -fopenmp -w -O3 \".split_whitespace())\n\n .args(cpp_files(&path))\n\n .arg(\"-lstdc++\")\n\n .current_dir(&path)\n\n .status().unwrap(),\n\n Compiler::GccMkl => {\n\n let mklroot = env::var(\"MKLROOT\")\n\n .expect(\"missing required env var MKLROOT\");\n\n Command::new(\"gcc\")\n\n .args(format!(\n\n \"-x c++ -fopenmp -w -O3 -m64 -I{}/include\",\n\n mklroot).split_whitespace())\n\n .args(cpp_files(&path))\n\n .args(format!(\n\n \" -lstdc++ -L${}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_ilp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl\",\n\n mklroot).split_whitespace())\n\n .current_dir(&path)\n", "file_path": "cs39/src/compile.rs", "rank": 18, "score": 46102.07702409369 }, { "content": " .status().unwrap()\n\n },\n\n }\n\n\n\n /*\n\n gcc -x c++ -fopenmp -w -O3 -m64 -I${MKLROOT}/include *.cpp *.h -lstdc++ -L${MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_ilp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl\n\n */\n\n }\n\n}\n\n\n", "file_path": "cs39/src/compile.rs", "rank": 19, "score": 46098.989584462666 }, { "content": "enum TableTarget {\n\n None,\n\n Csv(CsvWriter<File>),\n\n}\n\n\n\nimpl<T: Serialize> TableWriter<T> {\n\n fn from(target: TableTarget) -> Self {\n\n TableWriter {\n\n target,\n\n p: PhantomData,\n\n }\n\n }\n\n \n\n pub fn none() -> Self {\n\n Self::from(TableTarget::None)\n\n }\n\n \n\n pub fn csv_file<P>(path: P) -> Self \n\n where \n\n P: AsRef<Path>\n", "file_path": "cs39/src/output.rs", "rank": 20, "score": 44187.78303210496 }, { "content": "pub fn demo_min_time<I, L>(lines: I) -> Duration \n\nwhere\n\n I: IntoIterator<Item=L>,\n\n L: AsRef<str>,\n\n{\n\n lines.into_iter()\n\n .flat_map(|line| parse_elapsed_time_line(line.as_ref()))\n\n .min()\n\n .unwrap()\n\n}\n\n\n", "file_path": "cs39/src/quant.rs", "rank": 21, "score": 39969.06472135241 }, { "content": "/// Allocate a path for a CSV file.\n\npub fn csv_path<S>(name: S) -> PathBuf \n\nwhere\n\n S: AsRef<str> \n\n{\n\n // unpleasent redundancy, but whatevs.\n\n let repo = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n .canonicalize().unwrap()\n\n .parent().map(PathBuf::from).unwrap();\n\n \n\n let host = repo.join(\"output\");\n\n create_dir_all(&host).unwrap();\n\n \n\n host.join(name.as_ref())\n\n}\n\n\n\n/// Output data table writer.\n\npub struct TableWriter<T: Serialize> {\n\n target: TableTarget,\n\n \n\n p: PhantomData<fn(T)>,\n\n}\n\n\n", "file_path": "cs39/src/output.rs", "rank": 22, "score": 39228.4149836591 }, { "content": "pub fn parse_elapsed_time_line(line: &str) -> Option<Duration> {\n\n let pat = r##\"^Running test iteration\\s+\\d+\\s+\\[Elapsed time : (?P<ms>\\d+(?:\\.\\d+)?)ms\\]$\"##;\n\n let pat = Regex::new(pat).unwrap();\n\n \n\n pat\n\n .captures(line)\n\n .map(|caps| cap_parse::<f64>(&caps, \"ms\").unwrap())\n\n .map(|ms| Duration::from_secs_f64(ms / 1000.0))\n\n}\n\n\n", "file_path": "cs39/src/quant.rs", "rank": 23, "score": 38501.226670585216 }, { "content": "/// List direct sub-**directories** of a directory.\n\n///\n\n/// Filters to not being with '.'.\n\npub fn subdirs<P: AsRef<Path>>(path: P) -> impl Iterator<Item=PathBuf> {\n\n read_dir(path).unwrap()\n\n .filter_map(Result::ok)\n\n .filter_map(|f| f.file_type().ok()\n\n .filter(FileType::is_dir)\n\n .map(|_| f.path())\n\n .filter(|p| p.file_name()\n\n .and_then(OsStr::to_str)\n\n .and_then(|s| s.chars().next())\n\n .map(|c| c != '.')\n\n .unwrap_or(false)))\n\n}\n", "file_path": "cs39/src/navigate.rs", "rank": 24, "score": 36491.06076323111 }, { "content": "/// List names of C++ source code files directly in a directory.\n\npub fn cpp_files<P: AsRef<Path>>(path: P) -> impl Iterator<Item=OsString> {\n\n read_dir(path).unwrap()\n\n .filter_map(Result::ok)\n\n .filter_map(|f| f.file_type().ok()\n\n .filter(FileType::is_file)\n\n .map(|_| f.path()))\n\n .filter_map(|p| p.extension()\n\n .filter(|e| *e == \"cpp\" || *e == \"h\")\n\n .and_then(|_| p.file_name().map(OsStr::to_owned)))\n\n}\n\n\n\n/// Possible C++ compiler toolchain to invoke.\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\n#[allow(dead_code)]\n\npub enum Compiler {\n\n ClangPp,\n\n // https://stackoverflow.com/questions/3178342/compiling-a-c-program-with-gcc#3206195\n\n Gcc9,\n\n // this seems to be the default installed in my arch linux\n\n Gcc,\n", "file_path": "cs39/src/compile.rs", "rank": 25, "score": 35847.72076998361 }, { "content": "/// `run` task.\n\npub fn run_demo(lookup: &DemoLookup, major: u32, minor: u32) -> Result<(), ()> {\n\n let Compiled { workdir, binary } = compile(lookup, major, minor)?;\n\n \n\n println!(\"[INFO] running\");\n\n println!();\n\n let status = Command::new(&binary)\n\n .current_dir(&workdir)\n\n .status().unwrap();\n\n println!();\n\n println!(\"[INFO] exit {}\", status.code().unwrap());\n\n \n\n Ok(())\n\n}\n\n\n", "file_path": "cs39/src/main.rs", "rank": 26, "score": 11588.889202613223 }, { "content": "extern crate regex;\n\nextern crate num_cpus;\n\nextern crate rand;\n\nextern crate byte_unit;\n\nextern crate csv;\n\nextern crate serde;\n\n\n\nuse crate::{\n\n compile::{\n\n compile,\n\n Compiled,\n\n },\n\n navigate::{\n\n DemoLookup,\n\n demo_lookup,\n\n },\n\n quant::{\n\n subproc,\n\n demo_min_time,\n\n },\n", "file_path": "cs39/src/main.rs", "rank": 28, "score": 21.480290776275535 }, { "content": "use crate::{\n\n {cap_parse, cpu_stat},\n\n navigate::{\n\n DemoLookup,\n\n },\n\n compile::{\n\n compile,\n\n Compiled,\n\n },\n\n quant::{\n\n subproc,\n\n },\n\n};\n\nuse std::{\n\n process::Command,\n\n collections::HashMap,\n\n time::Duration,\n\n};\n\nuse regex::{self, Regex};\n\nuse num_cpus;\n\n\n\n/// `kernel_sum_test` task.\n", "file_path": "cs39/src/kernel_sum_test.rs", "rank": 29, "score": 21.31410488057488 }, { "content": " output::{\n\n Indent, \n\n INFO_INDENT,\n\n TableWriter,\n\n csv_path,\n\n },\n\n};\n\nuse std::{\n\n env::args,\n\n process::Command,\n\n path::PathBuf,\n\n str::FromStr,\n\n};\n\nuse serde::Serialize;\n\n\n\n/// C++ compilation.\n\npub mod compile;\n\n\n\n/// Code demo navigation.\n\npub mod navigate;\n", "file_path": "cs39/src/main.rs", "rank": 30, "score": 14.921954744827417 }, { "content": "#ifdef DO_NOT_USE_MKL \n\n // Just for reference -- implementation without MKL\n\n#pragma omp parallel for\n\n for (int i = 0; i < XDIM; i++)\n\n for (int j = 0; j < YDIM; j++)\n\n for (int k = 0; k < ZDIM; k++)\n\n y[i][j][k] += x[i][j][k] * scale;\n\n#else\n\n cblas_saxpy(\n\n XDIM * YDIM * ZDIM, // Length of vectors\n\n scale, // Scale factor\n\n &x[0][0][0], // Input vector x, in operation y := x * scale + y\n\n 1, // Use step 1 for x\n\n &y[0][0][0], // Input/output vector y, in operation y := x * scale + y\n\n 1 // Use step 2 for y\n\n );\n\n#endif\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/PointwiseOps.cpp", "rank": 31, "score": 6.983847195832011 }, { "content": "\n\n/// Program output helpers.\n\npub mod output;\n\n\n\n/// `size_test` task.\n\npub mod size_test;\n\n\n\n/// Extraction of quantitative data from demos.\n\npub mod quant;\n\n\n\n/// `kernel_sum_test` task.\n\npub mod kernel_sum_test;\n\n\n\n/// Extract and parse a regex capture group.\n", "file_path": "cs39/src/main.rs", "rank": 32, "score": 6.960036797957617 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_1/Utilities.cpp", "rank": 33, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_4/Utilities.cpp", "rank": 34, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_6/Utilities.cpp", "rank": 35, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_2/Utilities.cpp", "rank": 36, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/Utilities.cpp", "rank": 37, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_5/Utilities.cpp", "rank": 38, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_7/Utilities.cpp", "rank": 39, "score": 4.918835583253515 }, { "content": " case 0: output << YDIM << \" \" << ZDIM << std::endl; break;\n\n case 1: output << XDIM << \" \" << ZDIM << std::endl; break;\n\n case 2: output << XDIM << \" \" << YDIM << std::endl; break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n output << \"255\" << std::endl;\n\n\n\n switch(axis){\n\n \n\n case 0:\n\n for (int j = 0; j < YDIM; j++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[slice][j][k]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n case 1:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int k = 0; k < ZDIM; k++)\n\n output << (int)(x[i][slice][k]*255.0) << \" \";\n", "file_path": "LaplaceSolver/LaplaceSolver_1_3/Utilities.cpp", "rank": 40, "score": 4.918835583253515 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_6/Utilities.cpp", "rank": 41, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_1/Utilities.cpp", "rank": 42, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_4/Utilities.cpp", "rank": 43, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/Utilities.cpp", "rank": 44, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_3/Utilities.cpp", "rank": 45, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_5/Utilities.cpp", "rank": 46, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_2/Utilities.cpp", "rank": 47, "score": 4.863277341859021 }, { "content": " output << std::endl;\n\n }\n\n break;\n\n case 2:\n\n for (int i = 0; i < XDIM; i++){\n\n for (int j = 0; j < YDIM; j++)\n\n output << (int)(x[i][j][slice]*255.0) << \" \";\n\n output << std::endl;\n\n }\n\n break;\n\n default: throw std::logic_error(\"Invalid axis in WriteAsImage()\");\n\n }\n\n \n\n output.close();\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_7/Utilities.cpp", "rank": 48, "score": 4.863277341859021 }, { "content": "#include \"MatVecMultiply.h\"\n\n\n\n #define DO_NOT_USE_MKL\n\n#ifndef DO_NOT_USE_MKL\n\n#include <mkl.h>\n\n#endif\n\n\n\nvoid MatVecMultiply(CSRMatrix& mat, const float *x, float *y)\n\n{\n\n int N = mat.mSize;\n\n const auto rowOffsets = mat.GetRowOffsets();\n\n const auto columnIndices = mat.GetColumnIndices();\n\n const auto values = mat.GetValues();\n\n\n\n#ifdef DO_NOT_USE_MKL \n\n#pragma omp parallel for\n\n for (int i = 0; i < N; i++)\n\n {\n\n y[i] = 0.;\n\n for (int k = rowOffsets[i]; k < rowOffsets[i+1]; k++) {\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/MatVecMultiply.cpp", "rank": 50, "score": 3.903857908817053 }, { "content": "#include \"PointwiseOps.h\"\n\n#include \"changes.h\"\n\n\n\n// #define DO_NOT_USE_MKL\n\n#ifndef DO_NOT_USE_MKL\n\n#include <mkl.h>\n\n#endif\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n if (CHANGES) {\n\n int n = XDIM * YDIM * ZDIM;\n\n const float *x_ptr = &x[0][0][0];\n\n float *y_ptr = &y[0][0][0];\n\n\n\n cblas_scopy(n, x_ptr, 1, y_ptr, 1);\n\n } else {\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/PointwiseOps.cpp", "rank": 51, "score": 3.6604121692909004 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_2/Utilities.cpp", "rank": 52, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_7/Utilities.cpp", "rank": 53, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_4/Utilities.cpp", "rank": 54, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_3/Utilities.cpp", "rank": 55, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_1/Utilities.cpp", "rank": 56, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_6/Utilities.cpp", "rank": 57, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_5/Utilities.cpp", "rank": 58, "score": 3.4294162661836256 }, { "content": " Clear(x);\n\n Clear(b);\n\n\n\n // Make some of the boundary of values of x non-zero\n\n // (this operation is far too simple to be worth parallelizing)\n\n\n\n for(int i = XDIM/4; i < 3*(XDIM/4); i++)\n\n for(int j = XDIM/4; j < 3*(XDIM/4); j++)\n\n x[i][j][0] = 1.;\n\n}\n\n\n\nvoid WriteAsImage(const std::string& filenamePrefix, const float (&x)[XDIM][YDIM][ZDIM], const int count, const int axis, const int slice)\n\n{\n\n std::ostringstream filename;\n\n filename << filenamePrefix << \".\" << std::setfill('0') << std::setw(4) << count << \".pgm\";\n\n \n\n std::ofstream output(filename.str());\n\n\n\n output << \"P2\" << std::endl;\n\n switch(axis){\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/Utilities.cpp", "rank": 59, "score": 3.4294162661836256 }, { "content": " if (k > 1)\n\n matrixHelper( LinearIndex(i, j, k), LinearIndex(i, j, k-1) ) = 1.;\n\n }\n\n\n\n return matrixHelper.ConvertToCSRMatrix();\n\n}\n\n\n\nvoid ComputeLaplacian(CSRMatrix& laplacianMatrix,\n\n const float (&u)[XDIM][YDIM][ZDIM], float (&Lu)[XDIM][YDIM][ZDIM],\n\n bool usingSymmetricLowerTriangular)\n\n{ \n\n // Treat the arrays u & Lu as flattened vectors, and apply matrix-vector multiplication \n\n if(usingSymmetricLowerTriangular)\n\n SymmetricLowerTriangularMatVecMultiply(laplacianMatrix, &u[0][0][0], &Lu[0][0][0]);\n\n else\n\n MatVecMultiply(laplacianMatrix, &u[0][0][0], &Lu[0][0][0]);\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_6/Laplacian.cpp", "rank": 60, "score": 3.319981446282198 }, { "content": " if (j < YDIM-2)\n\n matrixHelper( LinearIndex(i, j, k), LinearIndex(i, j+1, k) ) = 1.;\n\n if (j > 1)\n\n matrixHelper( LinearIndex(i, j, k), LinearIndex(i, j-1, k) ) = 1.;\n\n if (k < ZDIM-2)\n\n matrixHelper( LinearIndex(i, j, k), LinearIndex(i, j, k+1) ) = 1.;\n\n if (k > 1)\n\n matrixHelper( LinearIndex(i, j, k), LinearIndex(i, j, k-1) ) = 1.;\n\n }\n\n\n\n return matrixHelper.ConvertToCSRMatrix();\n\n}\n\n\n\nvoid ComputeLaplacian(CSRMatrix& laplacianMatrix,\n\n const float (&u)[XDIM][YDIM][ZDIM], float (&Lu)[XDIM][YDIM][ZDIM], bool usingTranspose)\n\n{ \n\n // Treat the arrays u & Lu as flattened vectors, and apply matrix-vector multiplication \n\n if(usingTranspose)\n\n MatTransposeVecMultiply(laplacianMatrix, &u[0][0][0], &Lu[0][0][0]);\n\n else\n\n MatVecMultiply(laplacianMatrix, &u[0][0][0], &Lu[0][0][0]);\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_5/Laplacian.cpp", "rank": 61, "score": 3.083790757123701 }, { "content": "\n\n### Changes made\n\n\n\n- I switched the `ConjugateGradients` function to use the \n\n `ComputeLaplacian` implementation from `LaplaceSolver_1_3`.\n\n- I switched `InnerProduct` to use `cblas_sdot`.\n\n- I switched `Copy` to use `cblas_scopy`\n\n- Created a bash script to automate testing.\n\n\n\n### Behavior effects\n\n\n\nThe results are very close, but the last few float digits \n\ndiffer. This difference began upon switching `InnerProduct`\n\nto use `cblas_dsot`. I suspect that this is because the \n\noriginal implementation summed into a `double` and casted\n\ninto a `float` at the end, whereas `cblas_dsot` may sum\n\ninto a `float`.\n\n\n\nIf this behavior doesn't occur on others' computers, I \n\nwould conjecture that it may be because I am running an AMD\n\nprocessor, whereas others usually don't.\n\n\n\n### Performance effects\n\n\n\nI ran these tests on my PC, which runs an AMD Threadripper \n\n1920x with 12 physical / 24 logical cores. This PC's\n\nRAM access speed is 3200 MHz across 2 channels. It's worth \n\nnoting that intel MKL does not advertise performance \n\nimprovements for non-intel processors, and this is a \n\nnon-intel processor.\n\n\n\nNevertheless, these optimizations did exhibit a slight \n\noverall performance increase.\n\n\n\n```\n\n>>>> with changes\n\n\n\nreal 0m28.735s\n\nuser 0m43.225s\n\nsys 0m7.466s\n\n>>>> without changes\n\n\n\nreal 0m29.435s\n\nuser 1m6.216s\n\nsys 0m11.880s\n\n```\n\n\n\nWhat I find particularly interesting is that, although the \n\nreal time spent is only marginally smaller (~1.3s shaved off),\n\nthe user time exhibits significant improvement (nearly cut \n\nin half). I suspect that intel MKL realized that parallelization\n\nof these routines would exhibit diminishing returns at a\n\ncertain point, and decided to stop parallelizing at this\n\npoint. This would seem correct, because I cheapskated on the\n\nRAM in my computer and so it has much more concurrency in CPU\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/WRITEUP.md", "rank": 62, "score": 3.045227813320674 }, { "content": "#include \"MatMatMultiply.h\"\n\n\n\nvoid MatMatMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n\n\n\n /*\n\n // phoenix note: this looks incorrect\n\n#pragma omp parallel for\n\n for (int i = 0; i < NBLOCKS; i++)\n\n for (int j = 0; j < NBLOCKS; j++)\n\n C[i][j] = 0.;\n", "file_path": "DenseAlgebra/GEMM_Test_2_2/MatMatMultiply.cpp", "rank": 64, "score": 2.89269746257469 }, { "content": " float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale, int line, std::string nth)\n\n{\n\n Timer timer;\n\n timer.Start();\n\n \n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n \n\n timer.Stop(\"KERNEL \" + nth + \"Saxpy() on line \" + std::to_string(line) + \" : Time = \");\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_2/PointwiseOps.cpp", "rank": 65, "score": 2.8787562758524783 }, { "content": " const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n\n\n\n#pragma omp parallel for\n\n for (int i = 0; i < MATRIX_SIZE; i++)\n\n for (int j = 0; j < MATRIX_SIZE; j++)\n\n C[i][j] = 0.; \n\n\n\n#pragma omp parallel for\n\n for (int bi = 0; bi < NBLOCKS; bi++)\n\n for (int bj = 0; bj < NBLOCKS; bj++)\n\n for (int bk = 0; bk < NBLOCKS; bk++) { \n", "file_path": "DenseAlgebra/GEMM_Test_2_7/MatMatMultiply.cpp", "rank": 66, "score": 2.85191485380793 }, { "content": "void MatMatTransposeMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n\n\n\n#pragma omp parallel for\n\n for (int i = 0; i < MATRIX_SIZE; i++)\n\n for (int j = 0; j < MATRIX_SIZE; j++)\n\n C[i][j] = 0.; \n\n\n\n#pragma omp parallel for\n\n for (int bi = 0; bi < NBLOCKS; bi++)\n\n for (int bj = 0; bj < NBLOCKS; bj++)\n", "file_path": "DenseAlgebra/GEMM_Test_2_6/MatMatMultiply.cpp", "rank": 67, "score": 2.786440604430804 }, { "content": " const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n\n\n\n for (int i = 0; i < MATRIX_SIZE; i++)\n\n for (int j = 0; j < MATRIX_SIZE; j++)\n\n C[i][j] = 0.; \n\n\n\n for (int bi = 0; bi < NBLOCKS; bi++)\n\n for (int bj = 0; bj < NBLOCKS; bj++)\n\n for (int bk = 0; bk < NBLOCKS; bk++) \n\n for (int ii = 0; ii < BLOCK_SIZE; ii++)\n\n for (int jj = 0; jj < BLOCK_SIZE; jj++)\n", "file_path": "DenseAlgebra/GEMM_Test_2_5/MatMatMultiply.cpp", "rank": 68, "score": 2.7737048634820387 }, { "content": "alignas(64) float localB[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localC[BLOCK_SIZE][BLOCK_SIZE];\n\n\n\n#pragma omp threadprivate(localA, localB, localC)\n\n\n\nvoid MatMatTransposeMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n\n\n\n#pragma omp parallel for\n\n for (int i = 0; i < MATRIX_SIZE; i++)\n\n for (int j = 0; j < MATRIX_SIZE; j++)\n", "file_path": "DenseAlgebra/GEMM_Test_2_8/MatMatMultiply.cpp", "rank": 69, "score": 2.6641151168402706 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"MatMatMultiplyBlockHelper.h\"\n\n#include \"mkl.h\"\n\n\n\nalignas(64) float localA[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localB[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localC[BLOCK_SIZE][BLOCK_SIZE];\n\n\n\n#pragma omp threadprivate(localA, localB, localC)\n\n\n\nvoid MatMatMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n", "file_path": "DenseAlgebra/GEMM_Test_2_11/MatMatMultiply.cpp", "rank": 70, "score": 2.652470699545834 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nalignas(64) float localA[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localB[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localC[BLOCK_SIZE][BLOCK_SIZE];\n\n\n\n#pragma omp threadprivate(localA, localB, localC)\n\n\n\nvoid MatMatMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n", "file_path": "DenseAlgebra/GEMM_Test_2_9/MatMatMultiply.cpp", "rank": 71, "score": 2.6294845938133085 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nalignas(64) float localA[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localB[BLOCK_SIZE][BLOCK_SIZE];\n\nalignas(64) float localC[BLOCK_SIZE][BLOCK_SIZE];\n\n\n\n#pragma omp threadprivate(localA, localB, localC)\n\n\n\nvoid MatMatMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n const float (&B)[MATRIX_SIZE][MATRIX_SIZE], float (&C)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n static constexpr int NBLOCKS = MATRIX_SIZE / BLOCK_SIZE;\n\n\n\n using blocked_matrix_t = float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n using const_blocked_matrix_t = const float (&) [NBLOCKS][BLOCK_SIZE][NBLOCKS][BLOCK_SIZE];\n\n\n\n auto blockA = reinterpret_cast<const_blocked_matrix_t>(A[0][0]);\n\n auto blockB = reinterpret_cast<const_blocked_matrix_t>(B[0][0]);\n\n auto blockC = reinterpret_cast<blocked_matrix_t>(C[0][0]);\n", "file_path": "DenseAlgebra/GEMM_Test_2_10/MatMatMultiply.cpp", "rank": 72, "score": 2.6294845938133085 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_5/PointwiseOps.cpp", "rank": 74, "score": 2.5626833014685766 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_4/PointwiseOps.cpp", "rank": 75, "score": 2.5626833014685766 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_6/PointwiseOps.cpp", "rank": 76, "score": 2.5626833014685766 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_1/PointwiseOps.cpp", "rank": 77, "score": 2.5626833014685766 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_0/PointwiseOps.cpp", "rank": 78, "score": 2.5626833014685766 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_7/PointwiseOps.cpp", "rank": 79, "score": 2.5626833014685766 }, { "content": "#include \"PointwiseOps.h\"\n\n\n\nvoid Copy(const float (&x)[XDIM][YDIM][ZDIM], float (&y)[XDIM][YDIM][ZDIM])\n\n{\n\n#pragma omp parallel for \n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n y[i][j][k] = x[i][j][k];\n\n}\n\n\n\nvoid Saxpy(const float (&x)[XDIM][YDIM][ZDIM], const float (&y)[XDIM][YDIM][ZDIM],\n\n float (&z)[XDIM][YDIM][ZDIM],\n\n const float scale)\n\n{\n\n // Should we use OpenMP parallel for here?\n\n for (int i = 1; i < XDIM-1; i++)\n\n for (int j = 1; j < YDIM-1; j++)\n\n for (int k = 1; k < ZDIM-1; k++)\n\n z[i][j][k] = x[i][j][k] * scale + y[i][j][k];\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_3/PointwiseOps.cpp", "rank": 80, "score": 2.5626833014685766 }, { "content": "\n\n using matrix_t = float (&) [MATRIX_SIZE][MATRIX_SIZE];\n\n\n\n matrix_t A = reinterpret_cast<matrix_t>(*Araw);\n\n matrix_t B = reinterpret_cast<matrix_t>(*Braw);\n\n matrix_t C = reinterpret_cast<matrix_t>(*Craw);\n\n\n\n InitializeMatrices(A, B);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n MatMatMultiply(A, B, C);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n", "file_path": "DenseAlgebra/GEMM_Test_2_2/main.cpp", "rank": 81, "score": 2.499356354687021 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n // this strangeness is simply to place it on the heap\n\n // since it is very large\n\n // while still keeping it flattened\n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n", "file_path": "LaplacianStencil/LaplacianStencil_0_0/main.cpp", "rank": 82, "score": 2.4004913687362732 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nvoid MatTranspose(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n float (&AT)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n mkl_somatcopy(\n\n 'R', // Matrix A is in row-major format\n\n 'T', // We are performing a transposition operation\n\n MATRIX_SIZE, // Dimensions of matrix -- rows ...\n\n MATRIX_SIZE, // ... and columns\n\n 1., // No scaling\n\n &A[0][0], // Input matrix\n\n MATRIX_SIZE, // Leading dimension (here, just the matrix dimension)\n\n &AT[0][0], // Output matrix\n\n MATRIX_SIZE // Leading dimension\n\n );\n\n}\n\n\n\nalignas(64) float localA[BLOCK_SIZE][BLOCK_SIZE];\n", "file_path": "DenseAlgebra/GEMM_Test_2_8/MatMatMultiply.cpp", "rank": 83, "score": 2.364109040167911 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nvoid MatTranspose(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n float (&AT)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n mkl_somatcopy(\n\n 'R', // Matrix A is in row-major format\n\n 'T', // We are performing a transposition operation\n\n MATRIX_SIZE, // Dimensions of matrix -- rows ...\n\n MATRIX_SIZE, // ... and columns\n\n 1., // No scaling\n\n &A[0][0], // Input matrix\n\n MATRIX_SIZE, // Leading dimension (here, just the matrix dimension)\n\n &AT[0][0], // Output matrix\n\n MATRIX_SIZE // Leading dimension\n\n );\n\n}\n\n\n\n// Matrix A is presumed to be row-major, matrix B presumed to be column-major\n", "file_path": "DenseAlgebra/GEMM_Test_2_6/MatMatMultiply.cpp", "rank": 84, "score": 2.328398098602444 }, { "content": " const int j = columnIndices[k];\n\n y[i] += values[k] * x[j];\n\n }\n\n }\n\n#else\n\n // my intel mkl implementation said that this was called wrongly and panicked\n\n \n\n mkl_cspblas_scsrgemv( // (S)parse (CSR) (Ge)neral matrix (M)atrix-(V)ector product\n\n \"N\", // Use the normal matrix, not its transpose\n\n &N, // Size of the matrix\n\n values, // values array (MKL denotes this as \"a\")\n\n rowOffsets, // rowOffsets array (MKL denotes this as \"ia\")\n\n columnIndices, // columnIndices array (MKL denotes this as \"ja\")\n\n x, // Vector getting multiplied\n\n y // Vector where the product gets stored\n\n );\n\n#endif\n\n}\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/MatVecMultiply.cpp", "rank": 85, "score": 2.326858078064201 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nvoid MatTranspose(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n float (&AT)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n mkl_somatcopy(\n\n 'R', // Matrix A is in row-major format\n\n 'T', // We are performing a transposition operation\n\n MATRIX_SIZE, // Dimensions of matrix -- rows ...\n\n MATRIX_SIZE, // ... and columns\n\n 1., // No scaling\n\n &A[0][0], // Input matrix\n\n MATRIX_SIZE, // Leading dimension (here, just the matrix dimension)\n\n &AT[0][0], // Output matrix\n\n MATRIX_SIZE // Leading dimension\n\n );\n\n}\n\n\n\nvoid MatMatTransposeMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n", "file_path": "DenseAlgebra/GEMM_Test_2_7/MatMatMultiply.cpp", "rank": 86, "score": 2.310944167601779 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nvoid MatTranspose(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n float (&AT)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n mkl_somatcopy(\n\n 'R', // Matrix A is in row-major format\n\n 'T', // We are performing a transposition operation\n\n MATRIX_SIZE, // Dimensions of matrix -- rows ...\n\n MATRIX_SIZE, // ... and columns\n\n 1., // No scaling\n\n &A[0][0], // Input matrix\n\n MATRIX_SIZE, // Leading dimension (here, just the matrix dimension)\n\n &AT[0][0], // Output matrix\n\n MATRIX_SIZE // Leading dimension\n\n );\n\n}\n\n\n\nvoid MatMatTransposeMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n", "file_path": "DenseAlgebra/GEMM_Test_2_5/MatMatMultiply.cpp", "rank": 87, "score": 2.310944167601779 }, { "content": "#include \"MatMatMultiply.h\"\n\n#include \"mkl.h\"\n\n\n\nvoid MatTranspose(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n\n float (&AT)[MATRIX_SIZE][MATRIX_SIZE])\n\n{\n\n mkl_somatcopy(\n\n 'R', // Matrix A is in row-major format\n\n 'T', // We are performing a transposition operation\n\n MATRIX_SIZE, // Dimensions of matrix -- rows ...\n\n MATRIX_SIZE, // ... and columns\n\n 1., // No scaling\n\n &A[0][0], // Input matrix\n\n MATRIX_SIZE, // Leading dimension (here, just the matrix dimension)\n\n &AT[0][0], // Output matrix\n\n MATRIX_SIZE // Leading dimension\n\n );\n\n}\n\n\n\nvoid MatMatTransposeMultiply(const float (&A)[MATRIX_SIZE][MATRIX_SIZE],\n", "file_path": "DenseAlgebra/GEMM_Test_2_4/MatMatMultiply.cpp", "rank": 88, "score": 2.310944167601779 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_8/main.cpp", "rank": 89, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_3/main.cpp", "rank": 90, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_6/main.cpp", "rank": 91, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_5/main.cpp", "rank": 92, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_2/main.cpp", "rank": 93, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_4/main.cpp", "rank": 94, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM];\n\n float *LuRaw = new float [XDIM*YDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_1/main.cpp", "rank": 95, "score": 2.2576076397068494 }, { "content": "#include \"Timer.h\"\n\n\n\n#include \"Laplacian.h\"\n\n#include \"Utilities.h\"\n\n#include \"PointwiseOps.h\"\n\n#include \"Reductions.h\"\n\n\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM][ZDIM];\n\n\n\n float *xRaw = new float [XDIM*YDIM*ZDIM];\n\n float *fRaw = new float [XDIM*YDIM*ZDIM];\n\n float *pRaw = new float [XDIM*YDIM*ZDIM];\n\n float *rRaw = new float [XDIM*YDIM*ZDIM];\n\n float *zRaw = new float [XDIM*YDIM*ZDIM];\n\n \n\n array_t x = reinterpret_cast<array_t>(*xRaw);\n\n array_t f = reinterpret_cast<array_t>(*fRaw);\n", "file_path": "LaplaceSolver/LaplaceSolver_1_1/main.cpp", "rank": 96, "score": 2.2083157947529877 }, { "content": "#include \"Timer.h\"\n\n#include \"Laplacian.h\"\n\n\n\n#include <iomanip>\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM][ZDIM];\n\n \n\n float *uRaw = new float [XDIM*YDIM*ZDIM];\n\n float *LuRaw = new float [XDIM*YDIM*ZDIM];\n\n array_t u = reinterpret_cast<array_t>(*uRaw);\n\n array_t Lu = reinterpret_cast<array_t>(*LuRaw);\n\n\n\n Timer timer;\n\n\n\n for(int test = 1; test <= 10; test++)\n\n {\n\n std::cout << \"Running test iteration \" << std::setw(2) << test << \" \";\n\n timer.Start();\n\n ComputeLaplacian(u, Lu);\n\n timer.Stop(\"Elapsed time : \");\n\n }\n\n \n\n return 0;\n\n}\n", "file_path": "LaplacianStencil/LaplacianStencil_0_10/main.cpp", "rank": 97, "score": 2.2083157947529877 }, { "content": "#include \"ConjugateGradients.h\"\n\n#include \"Laplacian.h\"\n\n#include \"Timer.h\"\n\n#include \"Utilities.h\"\n\n\n\nTimer timerLaplacian;\n\nTimer timerSaxpy;\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM][ZDIM];\n\n\n\n float *xRaw = new float [XDIM*YDIM*ZDIM];\n\n float *fRaw = new float [XDIM*YDIM*ZDIM];\n\n float *pRaw = new float [XDIM*YDIM*ZDIM];\n\n float *rRaw = new float [XDIM*YDIM*ZDIM];\n\n float *zRaw = new float [XDIM*YDIM*ZDIM];\n\n \n\n array_t x = reinterpret_cast<array_t>(*xRaw);\n\n array_t f = reinterpret_cast<array_t>(*fRaw);\n", "file_path": "LaplaceSolver/LaplaceSolver_1_8/main.cpp", "rank": 98, "score": 2.145846864326646 }, { "content": "#include \"ConjugateGradients.h\"\n\n#include \"Laplacian.h\"\n\n#include \"Timer.h\"\n\n#include \"Utilities.h\"\n\n\n\nTimer timerLaplacian;\n\n\n\nint main(int argc, char *argv[])\n\n{\n\n using array_t = float (&) [XDIM][YDIM][ZDIM];\n\n\n\n float *xRaw = new float [XDIM*YDIM*ZDIM];\n\n float *fRaw = new float [XDIM*YDIM*ZDIM];\n\n float *pRaw = new float [XDIM*YDIM*ZDIM];\n\n float *rRaw = new float [XDIM*YDIM*ZDIM];\n\n float *zRaw = new float [XDIM*YDIM*ZDIM];\n\n \n\n array_t x = reinterpret_cast<array_t>(*xRaw);\n\n array_t f = reinterpret_cast<array_t>(*fRaw);\n\n array_t p = reinterpret_cast<array_t>(*pRaw);\n", "file_path": "LaplaceSolver/LaplaceSolver_1_5/main.cpp", "rank": 99, "score": 2.1159192576840447 } ]
Rust
rs/registry/canister/tests/tests/add_node_operator.rs
contropist/ic
9240bea7dc0239fcbc5d43ad11f3ca803ee9bb11
use candid::Encode; use dfn_candid::candid; use dfn_core::api::PrincipalId; use ic_nervous_system_common_test_keys::TEST_NEURON_1_OWNER_PRINCIPAL; use ic_nns_test_utils::registry::invariant_compliant_mutation_as_atomic_req; use ic_nns_test_utils::{ itest_helpers::{ forward_call_via_universal_canister, local_test_on_nns_subnet, set_up_registry_canister, set_up_universal_canister, }, registry::get_value, }; use ic_protobuf::registry::node_operator::v1::NodeOperatorRecord; use ic_registry_keys::make_node_operator_record_key; use registry_canister::{ init::{RegistryCanisterInitPayload, RegistryCanisterInitPayloadBuilder}, mutations::do_add_node_operator::AddNodeOperatorPayload, }; use assert_matches::assert_matches; use std::collections::BTreeMap; #[test] fn test_the_anonymous_user_cannot_add_a_node_operator() { local_test_on_nns_subnet(|runtime| async move { let registry = set_up_registry_canister(&runtime, RegistryCanisterInitPayload::default()).await; let payload = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 5, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; let response: Result<(), String> = registry .update_("add_node_operator", candid, (payload.clone(),)) .await; assert_matches!(response, Err(s) if s.contains("is not authorized to call this method: add_node_operator")); let key = make_node_operator_record_key(PrincipalId::new_anonymous()).into_bytes(); assert_eq!( get_value::<NodeOperatorRecord>(&registry, &key).await, NodeOperatorRecord::default() ); Ok(()) }); } #[test] fn test_a_canister_other_than_the_governance_canister_cannot_add_a_node_operator() { local_test_on_nns_subnet(|runtime| async move { let attacker_canister = set_up_universal_canister(&runtime).await; assert_ne!( attacker_canister.canister_id(), ic_nns_constants::GOVERNANCE_CANISTER_ID ); let registry = set_up_registry_canister(&runtime, RegistryCanisterInitPayload::default()).await; let payload = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 5, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( !forward_call_via_universal_canister( &attacker_canister, &registry, "add_node_operator", Encode!(&payload).unwrap() ) .await ); let key = make_node_operator_record_key(PrincipalId::new_anonymous()).into_bytes(); assert_eq!( get_value::<NodeOperatorRecord>(&registry, &key).await, NodeOperatorRecord::default() ); Ok(()) }); } #[test] fn test_accepted_proposal_mutates_the_registry() { local_test_on_nns_subnet(|runtime| async move { let registry = set_up_registry_canister( &runtime, RegistryCanisterInitPayloadBuilder::new() .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req()) .build(), ) .await; let fake_proposal_canister = set_up_universal_canister(&runtime).await; assert_eq!( fake_proposal_canister.canister_id(), ic_nns_constants::GOVERNANCE_CANISTER_ID ); let payload = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 5, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( forward_call_via_universal_canister( &fake_proposal_canister, &registry, "add_node_operator", Encode!(&payload).unwrap() ) .await ); assert_eq!( get_value::<NodeOperatorRecord>( &registry, make_node_operator_record_key(PrincipalId::new_anonymous()).as_bytes() ) .await, NodeOperatorRecord { node_operator_principal_id: PrincipalId::new_anonymous().to_vec(), node_allowance: 5, node_provider_principal_id: PrincipalId::new_anonymous().to_vec(), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, } ); let payload2 = AddNodeOperatorPayload { node_operator_principal_id: Some(*TEST_NEURON_1_OWNER_PRINCIPAL), node_allowance: 120, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "BC1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( forward_call_via_universal_canister( &fake_proposal_canister, &registry, "add_node_operator", Encode!(&payload2).unwrap() ) .await ); assert_eq!( get_value::<NodeOperatorRecord>( &registry, make_node_operator_record_key(*TEST_NEURON_1_OWNER_PRINCIPAL).as_bytes() ) .await, NodeOperatorRecord { node_operator_principal_id: TEST_NEURON_1_OWNER_PRINCIPAL.to_vec(), node_allowance: 120, node_provider_principal_id: PrincipalId::new_anonymous().to_vec(), dc_id: "BC1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, } ); let payload3 = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 567, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "CA1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( !forward_call_via_universal_canister( &fake_proposal_canister, &registry, "add_node_operator", Encode!(&payload3).unwrap() ) .await ); Ok(()) }); }
use candid::Encode; use dfn_candid::candid; use dfn_core::api::PrincipalId; use ic_nervous_system_common_test_keys::TEST_NEURON_1_OWNER_PRINCIPAL; use ic_nns_test_utils::registry::invariant_compliant_mutation_as_atomic_req; use ic_nns_test_utils::{ itest_helpers::{ forward_call_via_universal_canister, local_test_on_nns_subnet, set_up_registry_canister, set_up_universal_canister, }, registry::get_value, }; use ic_protobuf::registry::node_operator::v1::NodeOperatorRecord; use ic_registry_keys::make_node_operator_record_key; use registry_canister::{ init::{RegistryCanisterInitPayload, RegistryCanisterInitPayloadBuilder}, mutations::do_add_node_operator::AddNodeOperatorPayload, }; use assert_matches::assert_matches; use std::collections::BTreeMap; #[test] fn test_the_anonymous_user_cannot_add_a_node_operator() { local_test_on_nns_subnet(|runtime| async move { let registry = set_up_registry_canister(&runtime, RegistryCanisterInitPayload::default()).await; let payload = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 5, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; let response: Result<(), String> = registry .update_("add_node_operator", candid, (payload.clone(),)) .await; assert_matches!(response, Err(s) if s.contain
#[test] fn test_a_canister_other_than_the_governance_canister_cannot_add_a_node_operator() { local_test_on_nns_subnet(|runtime| async move { let attacker_canister = set_up_universal_canister(&runtime).await; assert_ne!( attacker_canister.canister_id(), ic_nns_constants::GOVERNANCE_CANISTER_ID ); let registry = set_up_registry_canister(&runtime, RegistryCanisterInitPayload::default()).await; let payload = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 5, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( !forward_call_via_universal_canister( &attacker_canister, &registry, "add_node_operator", Encode!(&payload).unwrap() ) .await ); let key = make_node_operator_record_key(PrincipalId::new_anonymous()).into_bytes(); assert_eq!( get_value::<NodeOperatorRecord>(&registry, &key).await, NodeOperatorRecord::default() ); Ok(()) }); } #[test] fn test_accepted_proposal_mutates_the_registry() { local_test_on_nns_subnet(|runtime| async move { let registry = set_up_registry_canister( &runtime, RegistryCanisterInitPayloadBuilder::new() .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req()) .build(), ) .await; let fake_proposal_canister = set_up_universal_canister(&runtime).await; assert_eq!( fake_proposal_canister.canister_id(), ic_nns_constants::GOVERNANCE_CANISTER_ID ); let payload = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 5, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( forward_call_via_universal_canister( &fake_proposal_canister, &registry, "add_node_operator", Encode!(&payload).unwrap() ) .await ); assert_eq!( get_value::<NodeOperatorRecord>( &registry, make_node_operator_record_key(PrincipalId::new_anonymous()).as_bytes() ) .await, NodeOperatorRecord { node_operator_principal_id: PrincipalId::new_anonymous().to_vec(), node_allowance: 5, node_provider_principal_id: PrincipalId::new_anonymous().to_vec(), dc_id: "AN1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, } ); let payload2 = AddNodeOperatorPayload { node_operator_principal_id: Some(*TEST_NEURON_1_OWNER_PRINCIPAL), node_allowance: 120, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "BC1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( forward_call_via_universal_canister( &fake_proposal_canister, &registry, "add_node_operator", Encode!(&payload2).unwrap() ) .await ); assert_eq!( get_value::<NodeOperatorRecord>( &registry, make_node_operator_record_key(*TEST_NEURON_1_OWNER_PRINCIPAL).as_bytes() ) .await, NodeOperatorRecord { node_operator_principal_id: TEST_NEURON_1_OWNER_PRINCIPAL.to_vec(), node_allowance: 120, node_provider_principal_id: PrincipalId::new_anonymous().to_vec(), dc_id: "BC1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, } ); let payload3 = AddNodeOperatorPayload { node_operator_principal_id: Some(PrincipalId::new_anonymous()), node_allowance: 567, node_provider_principal_id: Some(PrincipalId::new_anonymous()), dc_id: "CA1".into(), rewardable_nodes: BTreeMap::new(), ipv6: None, }; assert!( !forward_call_via_universal_canister( &fake_proposal_canister, &registry, "add_node_operator", Encode!(&payload3).unwrap() ) .await ); Ok(()) }); }
s("is not authorized to call this method: add_node_operator")); let key = make_node_operator_record_key(PrincipalId::new_anonymous()).into_bytes(); assert_eq!( get_value::<NodeOperatorRecord>(&registry, &key).await, NodeOperatorRecord::default() ); Ok(()) }); }
function_block-function_prefixed
[ { "content": "fn test_dapp_method_validate_(payload: i64) -> Result<String, String> {\n\n if payload > 10 {\n\n Ok(format!(\"Value is {}. Valid!\", payload))\n\n } else {\n\n Err(\"Value < 10. Invalid!\".to_string())\n\n }\n\n}\n\n\n", "file_path": "rs/sns/integration_tests/test_canisters/test_dapp_canister.rs", "rank": 0, "score": 443760.0904625361 }, { "content": "#[candid_method(update, rename = \"update_node_directly\")]\n\nfn update_node_directly_(payload: UpdateNodeDirectlyPayload) -> Result<(), String> {\n\n let result = registry_mut().do_update_node_directly(payload);\n\n recertify_registry();\n\n result\n\n}\n\n\n", "file_path": "rs/registry/canister/canister/canister.rs", "rank": 1, "score": 423923.986512926 }, { "content": "#[candid_method(update, rename = \"reroute_canister_range\")]\n\nfn reroute_canister_range_(payload: RerouteCanisterRangePayload) -> Result<(), String> {\n\n if let Err(msg) = registry_mut().reroute_canister_range(payload) {\n\n println!(\"{} Reject: {}\", LOG_PREFIX, msg);\n\n return Err(msg);\n\n }\n\n recertify_registry();\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/registry/canister/canister/canister.rs", "rank": 2, "score": 423923.986512926 }, { "content": "#[candid_method(update, rename = \"add_node\")]\n\nfn add_node_(payload: AddNodePayload) -> Result<NodeId, String> {\n\n let result = registry_mut().do_add_node(payload);\n\n recertify_registry();\n\n result\n\n}\n\n\n", "file_path": "rs/registry/canister/canister/canister.rs", "rank": 3, "score": 413541.2142364456 }, { "content": "#[test]\n\nfn multiple_nodes_are_live() -> Result<(), String> {\n\n // allow settings to be customized when running from commandline\n\n ConsensusRunnerConfig::new_from_env(4, 0)\n\n .and_then(|config| config.parse_extra_config())\n\n .map(|config| {\n\n run_n_rounds_and_collect_hashes(config);\n\n })\n\n}\n\n\n", "file_path": "rs/consensus/tests/integration.rs", "rank": 4, "score": 362931.5137528084 }, { "content": "#[query]\n\nfn query(operation: Operation) -> Result<Vec<u8>, String> {\n\n Ok(vec![0; operation.response_size_bytes])\n\n}\n\n\n", "file_path": "rs/rust_canisters/response_payload_test/src/main.rs", "rank": 5, "score": 348453.50629301026 }, { "content": "fn check_error_message<T: std::fmt::Debug>(result: Result<T, String>, expected_substring: &str) {\n\n match result {\n\n Ok(value) => panic!(\n\n \"expected the call to fail with message '{}', got Ok({:?})\",\n\n expected_substring, value\n\n ),\n\n Err(e) => assert!(\n\n e.contains(expected_substring),\n\n \"expected the call to fail with message '{}', got: {}\",\n\n expected_substring,\n\n e\n\n ),\n\n }\n\n}\n\n\n", "file_path": "rs/registry/canister/tests/tests/reroute_canister_range.rs", "rank": 6, "score": 344687.86202384206 }, { "content": "#[query]\n\nfn nans_are_canonicalized(_: ()) -> Result<(), String> {\n\n let nan: Floats = vec![(f32::NAN, f64::NAN)];\n\n\n\n let inf = vec![(f32::INFINITY, f64::INFINITY)];\n\n let neg_inf = vec![(f32::NEG_INFINITY, f64::NEG_INFINITY)];\n\n let pm_inf = or(&inf, &neg_inf);\n\n\n\n let zero = vec![(0.0, 0.0)];\n\n let neg_zero = vec![(-0.0, -0.0)];\n\n let pm_zero = or(&zero, &neg_zero);\n\n\n\n // numbers which are not (±∞), (±0) or NaN\n\n let other = vec![(42.0, 42.0), (-42.0, -42.0)];\n\n\n\n is_canon(\"Regular NaN\", &nan)?;\n\n\n\n is_canon(\"(±0) / (±0)\", &divide(&pm_zero, &pm_zero))?;\n\n is_canon(\"(±∞) / (±∞)\", &divide(&pm_inf, &pm_inf))?;\n\n\n\n is_canon(\"(±0) × (±∞)\", &multiply(&pm_zero, &pm_inf))?;\n", "file_path": "rs/rust_canisters/tests/src/nan_canonicalized.rs", "rank": 7, "score": 341638.8365236069 }, { "content": "/// Checks whether the input string as the form [hostname:port{,hostname:port}]\n\nfn parse_journalbeat_hosts(s: Option<String>) -> Result<Vec<String>> {\n\n const HOST_START: &str = r#\"^(([[:alnum:]]|[[:alnum:]][[:alnum:]\\-]*[[:alnum:]])\\.)*\"#;\n\n const HOST_STOP: &str = r#\"([[:alnum:]]|[[:alnum:]][[:alnum:]\\-]*[[:alnum:]])\"#;\n\n const PORT: &str = r#\":[[:digit:]]{2,5}$\"#;\n\n let s = match s {\n\n Some(s) => s,\n\n None => return Ok(vec![]),\n\n };\n\n let rgx = format!(\"{}{}{}\", HOST_START, HOST_STOP, PORT);\n\n let rgx = Regex::new(&rgx).unwrap();\n\n let mut res = vec![];\n\n for target in s.trim().split(',') {\n\n if !rgx.is_match(target) {\n\n bail!(\"Invalid journalbeat host: '{}'\", s);\n\n }\n\n res.push(target.to_string());\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "rs/tests/src/driver/cli.rs", "rank": 8, "score": 341364.17444762634 }, { "content": "fn parse_log_debug_overrides(s: Option<String>) -> Result<Vec<String>> {\n\n let s = match s {\n\n Some(s) => s,\n\n None => return Ok(vec![]),\n\n };\n\n let rgx = r#\"^([\\w]+::)+[\\w]+$\"#.to_string();\n\n let rgx = Regex::new(&rgx).unwrap();\n\n let mut res = vec![];\n\n for target in s.trim().split(',') {\n\n if !rgx.is_match(target) {\n\n bail!(\"Invalid log_debug_overrides: '{}'\", s);\n\n }\n\n res.push(target.to_string());\n\n }\n\n Ok(res)\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(target_os = \"linux\")]\n\nmod tests {\n", "file_path": "rs/tests/src/driver/cli.rs", "rank": 9, "score": 336822.1455515493 }, { "content": "fn parse_u64(s: &str) -> Result<u64, String> {\n\n s.parse::<u64>()\n\n .map_err(|e| format!(\"Can't parse u64: {:?}\", e))\n\n}\n\n\n", "file_path": "rs/tests/src/cli.rs", "rank": 10, "score": 330476.0172822182 }, { "content": "#[test]\n\nfn test_set_ipv6_none() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n let node_operator_key = make_node_operator_record_key(*TEST_NEURON_1_OWNER_PRINCIPAL);\n\n let node_operator_record = NodeOperatorRecord {\n\n node_operator_principal_id: (*TEST_NEURON_1_OWNER_PRINCIPAL).to_vec(),\n\n node_allowance: 5,\n\n ipv6: Some(\"0:0:0:0:0:0:0:0\".into()),\n\n ..Default::default()\n\n };\n\n let registry = set_up_registry_canister(\n\n &runtime,\n\n RegistryCanisterInitPayloadBuilder::new()\n\n .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req())\n\n .push_init_mutate_request(RegistryAtomicMutateRequest {\n\n mutations: vec![RegistryMutation {\n\n mutation_type: registry_mutation::Type::Insert as i32,\n\n key: node_operator_key.as_bytes().to_vec(),\n\n value: encode_or_panic(&node_operator_record),\n\n }],\n\n preconditions: vec![],\n", "file_path": "rs/registry/canister/tests/tests/update_node_operator_config.rs", "rank": 11, "score": 329954.9720744258 }, { "content": "fn passing(t: Test) -> Result<TestDescAndFn, Test> {\n\n match t {\n\n Test::Passing(t) => Ok(t),\n\n t => Err(t),\n\n }\n\n}\n\n\n", "file_path": "rs/scenario_tests/src/runner.rs", "rank": 12, "score": 329133.64417302515 }, { "content": "fn parse_duration(dur: &str) -> Result<Duration, String> {\n\n if let Some(dur) = dur.strip_suffix(\"ms\") {\n\n Ok(Duration::from_millis(parse_u64(dur)?))\n\n } else if let Some(dur) = dur.strip_suffix('s') {\n\n Ok(Duration::from_secs(parse_u64(dur)?))\n\n } else if let Some(dur) = dur.strip_suffix('m') {\n\n Ok(Duration::from_secs(60 * parse_u64(dur)?))\n\n } else {\n\n Err(\"Can't parse duration unit. Try 4000ms, 4s or 4m\".to_string())\n\n }\n\n}\n", "file_path": "rs/tests/src/cli.rs", "rank": 13, "score": 325097.18664114166 }, { "content": "/// Determine sha256 hash of the current replica binary\n\n///\n\n/// Returns tuple (path of the replica binary, hex encoded sha256 of binary)\n\nfn get_replica_binary_hash() -> std::result::Result<(PathBuf, String), String> {\n\n let mut hasher = Sha256::new();\n\n let replica_binary_path = env::current_exe()\n\n .map_err(|e| format!(\"Failed to determine replica binary path: {:?}\", e))?;\n\n\n\n let mut binary_file = std::fs::File::open(&replica_binary_path)\n\n .map_err(|e| format!(\"Failed to open replica binary to calculate hash: {:?}\", e))?;\n\n\n\n std::io::copy(&mut binary_file, &mut hasher)\n\n .map_err(|e| format!(\"Failed to calculate hash for replica binary: {:?}\", e))?;\n\n\n\n Ok((replica_binary_path, hex::encode(hasher.finish())))\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n run().await.unwrap();\n\n}\n\n\n\nasync fn run() -> io::Result<()> {\n", "file_path": "rs/replica/src/main.rs", "rank": 14, "score": 318529.57438152644 }, { "content": "fn transaction_notification(tn: TransactionNotification) -> Result<(), String> {\n\n let count = *COUNTER.read().unwrap();\n\n let res = match count {\n\n 0 => {\n\n println!(\"Rejecting\");\n\n Err(\"Rejected\".to_string())\n\n }\n\n // Succeeds\n\n 1 => Ok(()),\n\n _ => Err(\"This should not be called a third time\".to_string()),\n\n };\n\n let expected_tn = TransactionNotification {\n\n from_subaccount: None,\n\n from: PrincipalId::new_anonymous(),\n\n to_subaccount: None,\n\n amount: Tokens::from_tokens(1).unwrap(),\n\n memo: Memo(0),\n\n block_height: 3,\n\n to: id(),\n\n };\n\n\n\n // Cause the test to fail\n\n if tn != expected_tn {\n\n *COUNTER.write().unwrap() = 99;\n\n }\n\n\n\n *COUNTER.write().unwrap() = count.checked_add(1).unwrap();\n\n res\n\n}\n\n\n", "file_path": "rs/rosetta-api/ledger_canister/test/notified.rs", "rank": 15, "score": 318142.76621955505 }, { "content": "fn main() -> Result<(), String> {\n\n // Check if `drun` is running in the canister sandbox mode where it waits\n\n // for commands from the parent process. This check has to be performed\n\n // before the arguments are parsed because the parent process does not pass\n\n // all the normally required arguments of `drun`.\n\n if std::env::args().any(|arg| arg == RUN_AS_CANISTER_SANDBOX_FLAG) {\n\n canister_sandbox_main();\n\n Ok(())\n\n } else if std::env::args().any(|arg| arg == RUN_AS_SANDBOX_LAUNCHER_FLAG) {\n\n sandbox_launcher_main();\n\n Ok(())\n\n } else {\n\n drun_main()\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn drun_main() -> Result<(), String> {\n\n let matches = get_arg_matches();\n\n Config::run_with_temp_config(|default_config| {\n", "file_path": "rs/drun/src/main.rs", "rank": 16, "score": 317687.4462733853 }, { "content": "#[candid_method(query, rename = \"get_node_providers_monthly_xdr_rewards\")]\n\nfn get_node_providers_monthly_xdr_rewards_() -> Result<NodeProvidersMonthlyXdrRewards, String> {\n\n registry().get_node_providers_monthly_xdr_rewards()\n\n}\n\n\n", "file_path": "rs/registry/canister/canister/canister.rs", "rank": 17, "score": 312052.8080184377 }, { "content": "#[candid_method(update, rename = \"add_node_operator\")]\n\nfn add_node_operator_(payload: AddNodeOperatorPayload) {\n\n registry_mut().do_add_node_operator(payload);\n\n recertify_registry();\n\n}\n\n\n", "file_path": "rs/registry/canister/canister/canister.rs", "rank": 18, "score": 309777.82352378464 }, { "content": "fn execute_ssh_command(session: &Session, ssh_command: String) -> Result<String, io::Error> {\n\n let mut channel = session.channel_session()?;\n\n channel.exec(\"bash\")?;\n\n channel.write_all(ssh_command.as_bytes())?;\n\n channel.flush()?;\n\n channel.send_eof()?;\n\n let mut stderr = String::new();\n\n let mut command_output = String::new();\n\n channel.stderr().read_to_string(&mut stderr)?;\n\n channel.read_to_string(&mut command_output)?;\n\n if !stderr.is_empty() {\n\n panic!(\"Channel exited with an stderr=\\n{}\", stderr);\n\n }\n\n channel.close()?;\n\n channel.wait_close()?;\n\n let exit_code = channel.exit_status()?;\n\n if exit_code != 0 {\n\n panic!(\"Channel exited with an exit code {}.\", exit_code);\n\n }\n\n Ok(command_output)\n\n}\n\n\n", "file_path": "rs/tests/src/networking/network_reliability.rs", "rank": 19, "score": 308874.41988663335 }, { "content": "/// Is this a canonicalized NaN?\n\nfn is_canon(msg: &str, inputs: &Floats) -> Result<(), String> {\n\n for (i, (float, double)) in inputs.iter().enumerate() {\n\n {\n\n let bits = float.to_bits();\n\n let target = CANON_32BIT_NAN;\n\n if bits != target {\n\n return Err(format!(\n\n \"In {} f32 element {} \\nExpected: \\t0x{:x}\\nFound: \\t0x{:x}\",\n\n msg, i, target, bits\n\n ));\n\n }\n\n }\n\n {\n\n let bits = double.to_bits();\n\n let target = CANON_64BIT_NAN;\n\n if bits != target {\n\n return Err(format!(\n\n \"In {} f64 element {} \\nExpected: \\t0x{:x}\\nFound: \\t0x{:x}\",\n\n msg, i, target, bits\n\n ));\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/rust_canisters/tests/src/nan_canonicalized.rs", "rank": 20, "score": 303669.5983742932 }, { "content": "pub fn filter_special_keys(keys: Vec<String>) -> Vec<String> {\n\n keys.iter()\n\n .filter(|k| !k.starts_with(SPECIAL_FIELD_PREFIX))\n\n .cloned()\n\n .collect()\n\n}\n\n\n", "file_path": "rs/registry/regedit/src/tests.rs", "rank": 21, "score": 300078.0022759518 }, { "content": "fn slice_to_string(slice: CertifiedStreamSlice) -> String {\n\n UnpackedStreamSlice::try_from(slice.clone())\n\n .map(|unpacked| format!(\"{:?}\", unpacked))\n\n .unwrap_or(format!(\"{:?}\", slice))\n\n}\n", "file_path": "rs/xnet/payload_builder/tests/common/mod.rs", "rank": 22, "score": 299771.3010309817 }, { "content": "#[candid_method(query, rename = \"get_account\")]\n\nfn get_account_(address: String) -> Result<AccountState, String> {\n\n gtc().get_account(&address)\n\n}\n\n\n\n/// Claim the caller's GTC neurons (on behalf of the caller) and return the IDs\n\n/// of these neurons\n", "file_path": "rs/nns/gtc/canister/canister.rs", "rank": 23, "score": 298764.00909710536 }, { "content": "/// Validates and renders a proposal with action Motion.\n\nfn validate_and_render_motion(motion: &Motion) -> Result<String, String> {\n\n validate_len(\n\n \"motion.motion_text\",\n\n &motion.motion_text,\n\n 0, // min\n\n PROPOSAL_MOTION_TEXT_BYTES_MAX,\n\n )?;\n\n\n\n Ok(format!(\n\n r\"# Motion Proposal:\n\n## Motion Text:\n\n\n\n{}\",\n\n &motion.motion_text\n\n ))\n\n}\n\n\n", "file_path": "rs/sns/governance/src/proposal.rs", "rank": 24, "score": 298756.6680350703 }, { "content": "fn validate_method_name(method_name: &str) -> Result<String, String> {\n\n fn is_ident_start(c: char) -> bool {\n\n c.is_ascii() && (c.is_alphabetic() || c == '_')\n\n }\n\n\n\n fn is_ident_tail(c: char) -> bool {\n\n c.is_ascii() && (c.is_alphanumeric() || c == '_')\n\n }\n\n\n\n let mut chars = method_name.chars();\n\n let is_legal_start = chars.next().map(is_ident_start).unwrap_or(false);\n\n let is_legal_tail = chars.all(is_ident_tail);\n\n\n\n if !(is_legal_start && is_legal_tail) {\n\n Err(format!(\"Illegal method name: {}.\", method_name))\n\n } else {\n\n Ok(String::from(method_name))\n\n }\n\n}\n\n\n", "file_path": "rs/drun/src/message.rs", "rank": 25, "score": 298756.6680350703 }, { "content": "/// Processes a command from the command line.\n\npub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::Lib => clib::main(args),\n\n Command::Csp => csp::main(args),\n\n _ => unimplemented!(),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/mod.rs", "rank": 26, "score": 295520.4411886328 }, { "content": "fn core(signatures: &[String]) -> Result<(), (String, i32)> {\n\n let algorithm_id = AlgorithmId::MultiBls12_381;\n\n\n\n let signatures: Result<Vec<(CspPublicKey, CspSignature)>, (String, i32)> = signatures\n\n .iter()\n\n .map(|tuple| {\n\n let parts: Vec<&str> = tuple.split(';').collect();\n\n match parts[..] {\n\n [public_key, signature] => to_csp_key_signature_pair(public_key, signature)\n\n .map_err(|e| (format!(\"Parsing key;signature pairs failed: {:?}\", e), 2)),\n\n\n\n _ => Err((\n\n format!(\"Malformed <public_key>;<signature> pair: {:?}\", tuple),\n\n 2,\n\n )),\n\n }\n\n })\n\n .collect();\n\n let signatures = signatures?;\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/combine_signatures.rs", "rank": 27, "score": 295354.01802414097 }, { "content": "// Returns the subnet id for the given CUP.\n\nfn get_subnet_id(registry: &dyn RegistryClient, cup: &CatchUpPackage) -> Result<SubnetId, String> {\n\n let dkg_summary = &cup\n\n .content\n\n .block\n\n .get_value()\n\n .payload\n\n .as_ref()\n\n .as_summary()\n\n .dkg;\n\n // Note that although sometimes CUPs have no signatures (e.g. genesis and\n\n // recovery CUPs) they always have the signer id (the DKG id), which is taken\n\n // from the high-threshold transcript when we build a genesis/recovery CUP.\n\n let dkg_id = cup.signature.signer;\n\n use ic_types::crypto::threshold_sig::ni_dkg::NiDkgTargetSubnet;\n\n // If the DKG key material was signed by the subnet itself — use it, if not, get\n\n // the subnet id from the registry.\n\n match dkg_id.target_subnet {\n\n NiDkgTargetSubnet::Local => Ok(dkg_id.dealer_subnet),\n\n // If we hit this case, than the local CUP is a genesis or recovery CUP of an application\n\n // subnet. We cannot derive the subnet id from it, so we use the registry version of\n", "file_path": "rs/orchestrator/src/upgrade.rs", "rank": 28, "score": 294974.3524254777 }, { "content": "fn parse_pattern(p: Option<String>) -> Result<Option<Regex>, regex::Error> {\n\n match p.map(|p| Regex::new(&p)) {\n\n None => Ok(None),\n\n Some(Ok(r)) => Ok(Some(r)),\n\n Some(Err(e)) => Err(e),\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ValidatedCliArgs {\n\n pub log_base_dir: Option<PathBuf>,\n\n pub log_level: slog::Level,\n\n pub rand_seed: u64,\n\n pub job_id: Option<String>,\n\n pub initial_replica_version: ReplicaVersion,\n\n pub ic_os_img_sha256: String,\n\n pub ic_os_img_url: Url,\n\n pub boundary_node_img_sha256: String,\n\n pub boundary_node_img_url: Url,\n\n pub farm_base_url: Option<Url>,\n", "file_path": "rs/tests/src/driver/cli.rs", "rank": 29, "score": 292318.3637329213 }, { "content": "fn core(signatures: &[String]) -> Result<(), (String, i32)> {\n\n let signatures: CryptoResult<Vec<IndividualSignatureBytes>> = signatures\n\n .iter()\n\n .map(IndividualSignatureBytes::try_from)\n\n .collect();\n\n let signatures = signatures.map_err(|e| (format!(\"{:?}\", e), 2))?;\n\n let signature = combine(&signatures).map_err(|e| (format!(\"Error combining: {:?}\", e), 2))?;\n\n println!(\"Combined signature: {}\", Into::<String>::into(signature));\n\n Ok(())\n\n}\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/combine_signatures.rs", "rank": 30, "score": 292088.54343904497 }, { "content": "/// Processes a command from the command line.\n\npub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::Multi => multi::main(args),\n\n _ => unimplemented!(),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/csp/mod.rs", "rank": 31, "score": 291971.51268848235 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::Hash => hash::main(args),\n\n Command::BasicSig => basic_sig::main(args),\n\n Command::MultiSig => multi_sig::main(args),\n\n Command::ThresholdSig => threshold_sig::main(args),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/clib/mod.rs", "rank": 32, "score": 291971.51268848235 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: .. bench <size:usize>[units:[KMG]]\n\n \\n\n\n \\nE.g.: cargo run --release lib hash bench 4G\n\n \\nor: cargo run --release lib hash bench 27\"\n\n .to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/hash/bench.rs", "rank": 33, "score": 289821.29122423346 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <domain_separator> <message>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/hash/simple.rs", "rank": 34, "score": 289821.29122423346 }, { "content": "fn core() -> Result<(), (String, i32)> {\n\n let csp = csp();\n\n let generated = csp\n\n .gen_key_pair_with_pop(AlgorithmId::MultiBls12_381)\n\n .expect(\"Failed to generate key pair with PoP\");\n\n match generated {\n\n (key_id, CspPublicKey::MultiBls12_381(public_key), CspPop::MultiBls12_381(pop)) => {\n\n println!(\"KeyId: {}\", base64::encode(&key_id.get()));\n\n println!(\"PublicKey: {}\", Into::<String>::into(public_key));\n\n println!(\"Pop: {}\", Into::<String>::into(pop));\n\n }\n\n _ => panic!(\"Unexpected types\"),\n\n }\n\n Ok(())\n\n}\n", "file_path": "rs/crypto/src/cli/csp/multi/keygen.rs", "rank": 35, "score": 289821.29122423346 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <key_id>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/sign.rs", "rank": 36, "score": 289821.29122423346 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"This function takes no args.\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/keygen.rs", "rank": 37, "score": 289821.29122423346 }, { "content": "/// Given a `CandidType`, return the hex encoding of this object.\n\npub fn hex_encode_candid(candid: impl CandidType) -> String {\n\n let bytes = Encode!(&candid).unwrap();\n\n hex::encode(&bytes)\n\n}\n", "file_path": "rs/sns/cli/src/main.rs", "rank": 38, "score": 288679.550369484 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n [message_size] => core(message_size),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/hash/bench.rs", "rank": 39, "score": 288568.862677553 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::KeyGen => keygen::main(args),\n\n Command::Sign => sign::main(args),\n\n Command::VerifyIndividual => verify_individual::main(args),\n\n Command::CombineSignatures => combine_signatures::main(args),\n\n Command::VerifyCombined => verify_combined::main(args),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/csp/multi/mod.rs", "rank": 40, "score": 288568.862677553 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [domain_separator, message] => core(domain_separator, message),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/hash/simple.rs", "rank": 41, "score": 288568.862677553 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::Simple => simple::main(args),\n\n Command::Bench => bench::main(args),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/clib/hash/mod.rs", "rank": 42, "score": 288568.862677553 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => core(),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/keygen.rs", "rank": 43, "score": 288568.862677553 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, key_id] => core(message, key_id),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/sign.rs", "rank": 44, "score": 288568.862677553 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: <message> <signature> [public_key1] [public_key2] ...\".to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/verify_combined.rs", "rank": 45, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: This function taks no arguments\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/bench.rs", "rank": 46, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: .. bench <num_signers:u32>\n\n \\nE.g.:\n\n \\n cargo run --release lib threshold bench 64\"\n\n .to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/bench.rs", "rank": 47, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <public_coefficients> <index>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/sign.rs", "rank": 48, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"This function takes no args.\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/keygen.rs", "rank": 49, "score": 285946.157156855 }, { "content": "fn core() -> Result<(), (String, i32)> {\n\n let mut keygen_time = Duration::new(0, 0);\n\n let mut signing_time = Duration::new(0, 0);\n\n let mut verification_time = Duration::new(0, 0);\n\n\n\n let mut rng = StdRng::from_entropy();\n\n\n\n let iterations = 10_000;\n\n\n\n for _ in 0..iterations {\n\n let time_start = Instant::now();\n\n let (secret_key, public_key): (SecretKeyBytes, PublicKeyBytes) = keypair_from_rng(&mut rng);\n\n let after_keygen = Instant::now();\n\n let message =\n\n \"Twas brillig, and the slithy toves, did gyre and gimble in the wabe\".as_bytes();\n\n let signature = sign(message, &secret_key).map_err(|e| (format!(\"{:?}\", e), 2))?;\n\n let after_signing = Instant::now();\n\n verify(&signature, message, &public_key)\n\n .map_err(|_| (\"Signature verification failed\".to_string(), 2))?;\n\n let after_verification = Instant::now();\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/bench.rs", "rank": 50, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: <threshold> <receiver_eligibility as string of 0s and 1s> [seed]\".to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/keygen.rs", "rank": 51, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"This function takes no args.\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/keygen.rs", "rank": 52, "score": 285946.157156855 }, { "content": "fn core() -> Result<(), (String, i32)> {\n\n let mut rng = StdRng::from_entropy();\n\n let (secret_key, public_key) = keypair_from_rng(&mut rng);\n\n println!(\"SecretKey: {}\", Into::<String>::into(secret_key));\n\n println!(\"PublicKey: {}\", Into::<String>::into(public_key));\n\n Ok(())\n\n}\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/keygen.rs", "rank": 53, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: <pubkey;signature1> [pubkey;signature2] ...\".to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/combine_signatures.rs", "rank": 54, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: .. bench <num_signers:u32>\n\n \\nE.g.:\n\n \\n cargo run --release lib multi bench 64\"\n\n .to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/bench.rs", "rank": 55, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <signature> <public_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/verify.rs", "rank": 56, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <secret_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/sign.rs", "rank": 57, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <secret_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/sign.rs", "rank": 58, "score": 285946.157156855 }, { "content": "fn core() -> Result<(), (String, i32)> {\n\n let mut rng = StdRng::from_entropy();\n\n let (secret_key, public_key) = keypair_from_rng(&mut rng);\n\n println!(\"SecretKey: {}\", Into::<String>::into(secret_key));\n\n println!(\"PublicKey: {}\", Into::<String>::into(public_key));\n\n Ok(())\n\n}\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/keygen.rs", "rank": 59, "score": 285946.157156855 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <signature> <message> <public_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/verify_individual.rs", "rank": 60, "score": 285946.157156855 }, { "content": "fn workspace_path() -> String {\n\n match std::env::var(\"CI_PROJECT_DIR\") {\n\n Ok(dir) => format!(\"{}/rs/tests/rosetta_workspace\", dir),\n\n Err(_) => \"rosetta_workspace\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "rs/tests/src/rosetta_test.rs", "rank": 61, "score": 285739.3208636773 }, { "content": "fn opt_slice_to_string(slice: Option<CertifiedStreamSlice>) -> String {\n\n slice.map(slice_to_string).unwrap_or_else(|| \"None\".into())\n\n}\n\n\n", "file_path": "rs/xnet/payload_builder/tests/common/mod.rs", "rank": 62, "score": 285480.13588349987 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, secret_key] => core(message, secret_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/sign.rs", "rank": 63, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [signature, message, public_key] => core(signature, message, public_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/verify_individual.rs", "rank": 64, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [threshold, eligibility, seed] => core(threshold, eligibility, Some(seed)),\n\n [threshold, eligibility] => core(threshold, eligibility, None),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/keygen.rs", "rank": 65, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::KeyGen => keygen::main(args),\n\n Command::Sign => sign::main(args),\n\n Command::VerifyIndividual => verify_individual::main(args),\n\n Command::CombineSignatures => combine_signatures::main(args),\n\n Command::VerifyCombined => verify_combined::main(args),\n\n Command::Bench => bench::main(args),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/mod.rs", "rank": 66, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, signature, public_key] => core(message, signature, public_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/verify.rs", "rank": 67, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n _ => core(args),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/combine_signatures.rs", "rank": 68, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n if args.len() >= 2 {\n\n core(&args[0], &args[1], &args[2..])\n\n } else {\n\n usage()\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/csp/multi/verify_combined.rs", "rank": 69, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::Keygen => keygen::main(args),\n\n Command::Sign => sign::main(args),\n\n Command::Verify => verify::main(args),\n\n Command::Bench => bench::main(args),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/mod.rs", "rank": 70, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, secret_key] => core(message, secret_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/sign.rs", "rank": 71, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => core(),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/keygen.rs", "rank": 72, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => core(),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/keygen.rs", "rank": 73, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, secret_key] => core(message, secret_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/sign.rs", "rank": 74, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [] => Err((help_str(), 1)),\n\n _ => {\n\n let command = Command::from_str(&args[0]).map_err(|_| {\n\n (\n\n format!(\"Unsupported subcommand '{}'.\\n{}\", args[0], help_str()),\n\n 1,\n\n )\n\n })?;\n\n let args = &args[1..];\n\n match command {\n\n Command::Keygen => keygen::main(args),\n\n Command::IndividualPublicKey => individual_public_key::main(args),\n\n Command::CombinedPublicKey => combined_public_key::main(args),\n\n Command::Sign => sign::main(args),\n\n Command::CombineSignatures => combine_signatures::main(args),\n\n Command::VerifyIndividual => verify_individual::main(args),\n\n Command::VerifyCombined => verify_combined::main(args),\n\n Command::Bench => bench::main(args),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/mod.rs", "rank": 75, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n [num_signers] => core(num_signers),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/bench.rs", "rank": 76, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n [] => core(),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/basic_sig/bench.rs", "rank": 77, "score": 285303.388092457 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n [num_signers] => core(num_signers),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/bench.rs", "rank": 78, "score": 285303.388092457 }, { "content": "/// Generates a `RegistryClient` with an own node record (the minimum necessary\n\n/// to boot up an `XNetPayloadBuilderImpl`), subnet records for `SUBNET_1`\n\n/// through `SUBNET_5` (each consisting of one node, with a corresponding node\n\n/// record) and a subnet list record covering `SUBNET_1` through `SUBNET_5` (so\n\n/// they're considered for payload building).\n\nfn get_registry_for_test() -> Arc<dyn RegistryClient> {\n\n let data_provider = ProtoRegistryDataProvider::new();\n\n\n\n for (i, node) in [OWN_NODE, NODE_1, NODE_2, NODE_3, NODE_4, NODE_5]\n\n .iter()\n\n .enumerate()\n\n {\n\n data_provider\n\n .add(\n\n &make_node_record_key(*node),\n\n REGISTRY_VERSION,\n\n Some(NodeRecord {\n\n xnet: Some(ConnectionEndpoint {\n\n ip_addr: \"127.0.0.1\".to_string(),\n\n port: i as u32,\n\n protocol: Protocol::Http1 as i32,\n\n }),\n\n ..Default::default()\n\n }),\n\n )\n", "file_path": "rs/xnet/payload_builder/tests/xnet_payload_builder.rs", "rank": 79, "score": 284654.55070783035 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <signature1> [signature2] ...\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/combine_signatures.rs", "rank": 80, "score": 282252.8934253537 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: <threshold> [signature1 or '-'] [signature2 or '-'] ...\".to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/combine_signatures.rs", "rank": 81, "score": 282252.8934253537 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <signature> <public_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/verify_individual.rs", "rank": 82, "score": 282252.8934253537 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <signature> <public_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/verify_combined.rs", "rank": 83, "score": 282252.8934253537 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <message> <signature> <public_key>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/verify_individual.rs", "rank": 84, "score": 282252.8934253537 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\n\n \"Args: <message> <signature> [public_key1] [public_key2] ...\".to_string(),\n\n 1,\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/verify_combined.rs", "rank": 85, "score": 282252.8934253537 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n if args.len() >= 2 {\n\n core(&args[0], &args[1], &args[2..])\n\n } else {\n\n usage()\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/verify_combined.rs", "rank": 86, "score": 282166.74123163696 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, signature, public_key] => core(message, signature, public_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/verify_combined.rs", "rank": 87, "score": 282166.74123163696 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n args if !args.is_empty() => core(&args[0], &args[1..]),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/combine_signatures.rs", "rank": 88, "score": 282166.74123163696 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n _ => core(args),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/combine_signatures.rs", "rank": 89, "score": 282166.74123163696 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, signature, public_key] => core(message, signature, public_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/verify_individual.rs", "rank": 90, "score": 282166.74123163696 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [message, signature, public_key] => core(message, signature, public_key),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/verify_individual.rs", "rank": 91, "score": 282166.74123163696 }, { "content": "fn ensure_balance(cycles: Cycles) -> Result<(), String> {\n\n let now = dfn_core::api::now();\n\n\n\n {\n\n let mut state = STATE.write().unwrap();\n\n state.limiter.purge_old(now);\n\n let count = state.limiter.get_count();\n\n if count + cycles > state.cycles_limit {\n\n return Err(format!(\n\n \"More than {} cycles have been minted in the last {} seconds, please try again later.\",\n\n state.cycles_limit,\n\n state.limiter.get_max_age().as_secs(),\n\n ));\n\n }\n\n state.limiter.add(now, cycles);\n\n state.total_cycles_minted += cycles;\n\n }\n\n\n\n dfn_core::api::mint_cycles(\n\n cycles\n\n .get()\n\n .try_into()\n\n .map_err(|_| \"Cycles u64 overflow\".to_owned())?,\n\n );\n\n assert!(u128::from(dfn_core::api::canister_cycle_balance()) >= cycles.get());\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/nns/cmc/src/main.rs", "rank": 92, "score": 280981.78446504497 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [flag] if flag == \"--help\" => usage(),\n\n [public_coefficients] => core(public_coefficients),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/combined_public_key.rs", "rank": 93, "score": 279151.2518262478 }, { "content": "pub fn main(args: &[String]) -> Result<(), (String, i32)> {\n\n match args {\n\n [public_coefficients, index] => core(public_coefficients, index),\n\n _ => usage(),\n\n }\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/individual_public_key.rs", "rank": 94, "score": 279151.2518262478 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <public_coefficients> <index>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/individual_public_key.rs", "rank": 95, "score": 278728.99010474724 }, { "content": "fn usage() -> Result<(), (String, i32)> {\n\n Err((\"Args: <public_coefficients> <index>\".to_string(), 1))\n\n}\n\n\n", "file_path": "rs/crypto/src/cli/clib/threshold_sig/combined_public_key.rs", "rank": 96, "score": 278728.99010474724 }, { "content": "fn data_payload() -> Payload {\n\n Payload::Data(vec![1])\n\n}\n\n\n", "file_path": "rs/canonical_state/src/encoding/tests/compatibility.rs", "rank": 97, "score": 278323.40042005375 }, { "content": "fn reject_payload() -> Payload {\n\n Payload::Reject(reject_context())\n\n}\n\n\n", "file_path": "rs/canonical_state/src/encoding/tests/compatibility.rs", "rank": 98, "score": 278323.40042005375 }, { "content": "#[test]\n\nfn test_canister_installation_traps_on_bad_init_payload() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n assert_matches!(\n\n Project::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .cargo_bin(\"registry-canister\", &[])\n\n .install(&runtime)\n\n .bytes(b\"This is not legal candid\".to_vec())\n\n .await,\n\n Err(msg) if msg.contains(\"must be a Candid-encoded RegistryCanisterInitPayload\"));\n\n Ok(())\n\n });\n\n}\n\n\n", "file_path": "rs/registry/canister/tests/integration_tests.rs", "rank": 99, "score": 278139.1355052339 } ]
Rust
src/banner.rs
nirsarkar/feroxbuster
6921ac03a9a3e08a931bfe160cd4854d76ba6a02
use crate::{config::Configuration, utils::status_colorizer, VERSION}; macro_rules! format_banner_entry_helper { ($rune:expr, $name:expr, $value:expr, $indent:expr, $col_width:expr) => { format!( "\u{0020}{:\u{0020}<indent$}{:\u{0020}<col_w$}\u{2502}\u{0020}{}", $rune, $name, $value, indent = $indent, col_w = $col_width ) }; ($rune:expr, $name:expr, $value:expr, $value2:expr, $indent:expr, $col_width:expr) => { format!( "\u{0020}{:\u{0020}<indent$}{:\u{0020}<col_w$}\u{2502}\u{0020}{}:\u{0020}{}", $rune, $name, $value, $value2, indent = $indent, col_w = $col_width ) }; } macro_rules! format_banner_entry { ($rune:expr, $name:expr, $value:expr) => { format_banner_entry_helper!($rune, $name, $value, 3, 22) }; ($rune:expr, $name:expr, $value1:expr, $value2:expr) => { format_banner_entry_helper!($rune, $name, $value1, $value2, 3, 22) }; } pub fn initialize(targets: &[String], config: &Configuration) { let artwork = format!( r#" ___ ___ __ __ __ __ __ ___ |__ |__ |__) |__) | / ` / \ \_/ | | \ |__ | |___ | \ | \ | \__, \__/ / \ | |__/ |___ by Ben "epi" Risher {} ver: {}"#, '\u{1F913}', VERSION ); let top = "───────────────────────────┬──────────────────────"; let bottom = "───────────────────────────┴──────────────────────"; eprintln!("{}", artwork); eprintln!("{}", top); for target in targets { eprintln!( "{}", format_banner_entry!("\u{1F3af}", "Target Url", target) ); } let mut codes = vec![]; for code in &config.statuscodes { codes.push(status_colorizer(&code.to_string())) } eprintln!( "{}", format_banner_entry!("\u{1F680}", "Threads", config.threads) ); eprintln!( "{}", format_banner_entry!("\u{1f4d6}", "Wordlist", config.wordlist) ); eprintln!( "{}", format_banner_entry!( "\u{1F197}", "Status Codes", format!("[{}]", codes.join(", ")) ) ); eprintln!( "{}", format_banner_entry!("\u{1f4a5}", "Timeout (secs)", config.timeout) ); eprintln!( "{}", format_banner_entry!("\u{1F9a1}", "User-Agent", config.useragent) ); if !config.config.is_empty() { eprintln!( "{}", format_banner_entry!("\u{1f489}", "Config File", config.config) ); } if !config.proxy.is_empty() { eprintln!( "{}", format_banner_entry!("\u{1f48e}", "Proxy", config.proxy) ); } if !config.headers.is_empty() { for (name, value) in &config.headers { eprintln!( "{}", format_banner_entry!("\u{1f92f}", "Header", name, value) ); } } if !config.sizefilters.is_empty() { for filter in &config.sizefilters { eprintln!( "{}", format_banner_entry!("\u{1f4a2}", "Size Filter", filter) ); } } if config.extract_links { eprintln!( "{}", format_banner_entry!("\u{1F50E}", "Extract Links", config.extract_links) ); } if !config.queries.is_empty() { for query in &config.queries { eprintln!( "{}", format_banner_entry!( "\u{1f914}", "Query Parameter", format!("{}={}", query.0, query.1) ) ); } } if !config.output.is_empty() { eprintln!( "{}", format_banner_entry!("\u{1f4be}", "Output File", config.output) ); } if !config.extensions.is_empty() { eprintln!( "{}", format_banner_entry!( "\u{1f4b2}", "Extensions", format!("[{}]", config.extensions.join(", ")) ) ); } if config.insecure { eprintln!( "{}", format_banner_entry!("\u{1f513}", "Insecure", config.insecure) ); } if config.redirects { eprintln!( "{}", format_banner_entry!("\u{1f4cd}", "Follow Redirects", config.redirects) ); } if config.dontfilter { eprintln!( "{}", format_banner_entry!("\u{1f92a}", "Filter Wildcards", !config.dontfilter) ); } match config.verbosity { 1 => { eprintln!( "{}", format_banner_entry!("\u{1f508}", "Verbosity", config.verbosity) ); } 2 => { eprintln!( "{}", format_banner_entry!("\u{1f509}", "Verbosity", config.verbosity) ); } 3 => { eprintln!( "{}", format_banner_entry!("\u{1f50a}", "Verbosity", config.verbosity) ); } 4 => { eprintln!( "{}", format_banner_entry!("\u{1f4e2}", "Verbosity", config.verbosity) ); } _ => {} } if config.addslash { eprintln!( "{}", format_banner_entry!("\u{1fa93}", "Add Slash", config.addslash) ); } if !config.norecursion { if config.depth == 0 { eprintln!( "{}", format_banner_entry!("\u{1f503}", "Recursion Depth", "INFINITE") ); } else { eprintln!( "{}", format_banner_entry!("\u{1f503}", "Recursion Depth", config.depth) ); } } else { eprintln!( "{}", format_banner_entry!("\u{1f6ab}", "Do Not Recurse", config.norecursion) ); } eprintln!("{}", bottom); } #[cfg(test)] mod tests { use super::*; #[test] fn banner_without_targets() { let config = Configuration::default(); initialize(&[], &config); } #[test] fn banner_without_status_codes() { let mut config = Configuration::default(); config.statuscodes = vec![]; initialize(&[String::from("http://localhost")], &config); } #[test] fn banner_without_config_file() { let mut config = Configuration::default(); config.config = String::new(); initialize(&[String::from("http://localhost")], &config); } #[test] fn banner_without_queries() { let mut config = Configuration::default(); config.queries = vec![(String::new(), String::new())]; initialize(&[String::from("http://localhost")], &config); } }
use crate::{config::Configuration, utils::status_colorizer, VERSION}; macro_rules! format_banner_entry_helper { ($rune:expr, $name:expr, $value:expr, $indent:expr, $col_width:expr) => { format!( "\u{0020}{:\u{0020}<indent$}{:\u{0020}<col_w$}\u{2502}\u{0020}{}", $rune, $name, $value, indent = $indent, col_w = $col_width ) }; ($rune:expr, $name:expr, $value:expr, $value2:expr, $indent:expr, $col_width:expr) => { format!( "\u{0020}{:\u{0020}<indent$}{:\u{0020}<col_w$}\u{2502}\u{0020}{}:\u{0020}{}", $rune, $name, $value, $value2, indent = $indent, col_w = $col_width ) }; } macro_rules! format_banner_entry { ($rune:expr, $name:expr, $value:expr) => { format_banner_entry_helper!($rune, $name, $value, 3, 22) }; ($rune:expr, $name:expr, $value1:expr, $value2:expr) => { format_banner_entry_helper!($rune, $name, $value1, $value2, 3, 22) }; } pub fn initialize(targets: &[String], config: &Configuration) { let artwork = format!( r#" ___ ___ __ __ __ __ __ ___ |__ |__ |__) |__) | / ` / \ \_/ | | \ |__ | |___ | \ | \ | \__, \__/ / \ | |__/ |___ by Ben "epi" Risher {} ver: {}"#, '\u{1F913}', VERSION ); let top = "───────────────────────────┬──────────────────────"; let bottom = "───────────────────────────┴──────────────────────"; eprintln!("{}", artwork); eprintln!("{}", top); for target in targets { eprintln!( "{}", format_banner_entry!("\u{1F3af}", "Target Url", target) ); } let mut codes = vec![]; for code in &config.statuscodes { codes.push(status_colorizer(&code.to_string())) } eprintln!( "{}", format_banner_entry!("\u{1F680}", "Threads", config.threads) ); eprintln!( "{}", format_banner_entry!("\u{1f4d6}", "Wordlist", config.wordlist) ); eprintln!( "{}", format_banner_entry!( "\u{1F197}", "Status Codes", format!("[{}]", codes.join(", ")) ) ); eprintln!( "{}", format_banner_entry!("\u{1f4a5}", "Timeout (secs)", config.timeout) ); eprintln!( "{}", format_banner_entry!("\u{1F9a1}", "User-Agent", config.useragent) ); if !config.config.is_empty() { eprintln!( "{}", format_banner_entry!("\u{1f489}", "Config File", config.config) ); } if !config.proxy.is_empty() { eprintln!( "{}", format_banner_entry!("\u{1f48e}", "Proxy", config.proxy) ); } if !config.headers.is_empty() { for (name, value) in &config.headers { eprintln!( "{}", format_banner_entry!("\u{1f92f}", "Header", name, value) ); } } if !config.sizefilters.is_empty() { for filter in &config.sizefilters { eprintln!( "{}", format_banner_entry!("\u{1f4a2}", "Size Filter", filter) ); } } if config.extract_links { eprintln!( "{}", format_banner_entry!("\u{1F50E}", "Extract Links", config.extract_links) ); } if !config.queries.is_empty() { for query in &config.queries { eprintln!( "{}", format_banner_entry!( "\u{1f914}", "Query Parameter", format!("{}={}", query.0, query.1) ) ); } } if !config.output.is_empty() { eprintln!( "{}", format_banner_entry!("\u{1f4be}", "Output File", config.output) ); } if !config.extensions.is_empty() { eprintln!( "{}", format_banner_entry!( "\u{1f4b2}", "Extensions", format!("[{}]", config.extensions.join(", ")) ) ); } if config.insecure { eprintln!( "{}", format_banner_entry!("\u{1f513}", "Insecure", config.insecure) ); } if config.redirects { eprintln!( "{}", format_banner_entry!("\u{1f4cd}", "Follow Redirects", config.redirects) ); } if config.dontfilter { eprintln!( "{}", format_banner_entry!("\u{1f92a}", "Filter Wildcards", !config.dontfilter) ); } match config.verbosity { 1 => { eprintln!( "{}", format_banner_entry!("\u{1f508}", "Verbosity", config.verbosity) ); } 2 => { eprintln!( "{}", format_banner_entry!("\u{1f509}", "Verbosity", config.verbosity) ); } 3 => { eprintln!( "{}", format_banner_entry!("\u{1f50a}", "Verbosity", config.verbosity) ); } 4 => { eprintln!( "{}", format_banner_entry!("\u{1f4e2}", "Verbosity", config.verbosity) ); } _ => {} } if config.addslash { eprintln!( "{}", format_banner_entry!("\u{1fa93}", "Add Slash", config.addslash) ); }
eprintln!("{}", bottom); } #[cfg(test)] mod tests { use super::*; #[test] fn banner_without_targets() { let config = Configuration::default(); initialize(&[], &config); } #[test] fn banner_without_status_codes() { let mut config = Configuration::default(); config.statuscodes = vec![]; initialize(&[String::from("http://localhost")], &config); } #[test] fn banner_without_config_file() { let mut config = Configuration::default(); config.config = String::new(); initialize(&[String::from("http://localhost")], &config); } #[test] fn banner_without_queries() { let mut config = Configuration::default(); config.queries = vec![(String::new(), String::new())]; initialize(&[String::from("http://localhost")], &config); } }
if !config.norecursion { if config.depth == 0 { eprintln!( "{}", format_banner_entry!("\u{1f503}", "Recursion Depth", "INFINITE") ); } else { eprintln!( "{}", format_banner_entry!("\u{1f503}", "Recursion Depth", config.depth) ); } } else { eprintln!( "{}", format_banner_entry!("\u{1f6ab}", "Do Not Recurse", config.norecursion) ); }
if_condition
[ { "content": "/// simple helper to stay DRY, trys to join a url + fragment and add it to the `links` HashSet\n\nfn add_link_to_set_of_links(link: &str, url: &Url, links: &mut HashSet<String>) {\n\n log::trace!(\n\n \"enter: add_link_to_set_of_links({}, {}, {:?})\",\n\n link,\n\n url.to_string(),\n\n links\n\n );\n\n match url.join(&link) {\n\n Ok(new_url) => {\n\n links.insert(new_url.to_string());\n\n }\n\n Err(e) => {\n\n log::error!(\"Could not join given url to the base url: {}\", e);\n\n }\n\n }\n\n log::trace!(\"exit: add_link_to_set_of_links\");\n\n}\n\n\n\n/// Given a `reqwest::Response`, perform the following actions\n\n/// - parse the response's text for links using the linkfinder regex\n", "file_path": "src/extractor.rs", "rank": 0, "score": 215452.78988465772 }, { "content": "/// default wordlist\n\nfn wordlist() -> String {\n\n String::from(DEFAULT_WORDLIST)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 2, "score": 169527.845572893 }, { "content": "/// Creates a vector of formatted Urls\n\n///\n\n/// At least one value will be returned (base_url + word)\n\n///\n\n/// If any extensions were passed to the program, each extension will add a\n\n/// (base_url + word + ext) Url to the vector\n\nfn create_urls(target_url: &str, word: &str, extensions: &[String]) -> Vec<Url> {\n\n log::trace!(\n\n \"enter: create_urls({}, {}, {:?})\",\n\n target_url,\n\n word,\n\n extensions\n\n );\n\n\n\n let mut urls = vec![];\n\n\n\n if let Ok(url) = format_url(\n\n &target_url,\n\n &word,\n\n CONFIGURATION.addslash,\n\n &CONFIGURATION.queries,\n\n None,\n\n ) {\n\n urls.push(url); // default request, i.e. no extension\n\n }\n\n\n", "file_path": "src/scanner.rs", "rank": 3, "score": 165719.85783386964 }, { "content": "/// default useragent\n\nfn useragent() -> String {\n\n format!(\"feroxbuster/{}\", VERSION)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 4, "score": 143263.743828696 }, { "content": "/// Simple helper to generate a `Url`\n\n///\n\n/// Errors during parsing `url` or joining `word` are propagated up the call stack\n\npub fn format_url(\n\n url: &str,\n\n word: &str,\n\n addslash: bool,\n\n queries: &[(String, String)],\n\n extension: Option<&str>,\n\n) -> FeroxResult<Url> {\n\n log::trace!(\n\n \"enter: format_url({}, {}, {}, {:?} {:?})\",\n\n url,\n\n word,\n\n addslash,\n\n queries,\n\n extension\n\n );\n\n\n\n // from reqwest::Url::join\n\n // Note: a trailing slash is significant. Without it, the last path component\n\n // is considered to be a “file” name to be removed to get at the “directory”\n\n // that is used as the base\n", "file_path": "src/utils.rs", "rank": 5, "score": 137654.41687179994 }, { "content": "/// Gets the length of a url's path\n\n///\n\n/// example: http://localhost/stuff -> 5\n\npub fn get_url_path_length(url: &Url) -> u64 {\n\n log::trace!(\"enter: get_url_path_length({})\", url);\n\n\n\n let path = url.path();\n\n\n\n let segments = if path.starts_with('/') {\n\n path[1..].split_terminator('/')\n\n } else {\n\n log::trace!(\"exit: get_url_path_length -> 0\");\n\n return 0;\n\n };\n\n\n\n if let Some(last) = segments.last() {\n\n // failure on conversion should be very unlikely. While a usize can absolutely overflow a\n\n // u64, the generally accepted maximum for the length of a url is ~2000. so the value we're\n\n // putting into the u64 should never realistically be anywhere close to producing an\n\n // overflow.\n\n // usize max: 18,446,744,073,709,551,615\n\n // u64 max: 9,223,372,036,854,775,807\n\n let url_len: u64 = last\n", "file_path": "src/utils.rs", "rank": 6, "score": 131954.50117501157 }, { "content": "/// default timeout value\n\nfn timeout() -> u64 {\n\n 7\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 7, "score": 115337.69590842529 }, { "content": "/// Takes in a string and examines the first character to return a color version of the same string\n\npub fn status_colorizer(status: &str) -> String {\n\n match status.chars().next() {\n\n Some('1') => style(status).blue().to_string(), // informational\n\n Some('2') => style(status).green().to_string(), // success\n\n Some('3') => style(status).yellow().to_string(), // redirects\n\n Some('4') => style(status).red().to_string(), // client error\n\n Some('5') => style(status).red().to_string(), // server error\n\n Some('W') => style(status).cyan().to_string(), // wildcard\n\n Some('E') => style(status).red().to_string(), // error\n\n _ => status.to_string(), // ¯\\_(ツ)_/¯\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 8, "score": 114860.77328831708 }, { "content": "/// Takes in a string and colors it using console::style\n\n///\n\n/// mainly putting this here in case i want to change the color later, making any changes easy\n\npub fn module_colorizer(modname: &str) -> String {\n\n style(modname).cyan().to_string()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 9, "score": 114859.17591586206 }, { "content": "/// Helper function that determines the current depth of a given url\n\n///\n\n/// Essentially looks at the Url path and determines how many directories are present in the\n\n/// given Url\n\n///\n\n/// http://localhost -> 1\n\n/// http://localhost/ -> 1\n\n/// http://localhost/stuff -> 2\n\n/// ...\n\n///\n\n/// returns 0 on error and relative urls\n\npub fn get_current_depth(target: &str) -> usize {\n\n log::trace!(\"enter: get_current_depth({})\", target);\n\n\n\n let target = if !target.ends_with('/') {\n\n // target url doesn't end with a /, for the purposes of determining depth, we'll normalize\n\n // all urls to end in a / and then calculate accordingly\n\n format!(\"{}/\", target)\n\n } else {\n\n String::from(target)\n\n };\n\n\n\n match Url::parse(&target) {\n\n Ok(url) => {\n\n if let Some(parts) = url.path_segments() {\n\n // at least an empty string returned by the Split, meaning top-level urls\n\n let mut depth = 0;\n\n\n\n for _ in parts {\n\n depth += 1;\n\n }\n", "file_path": "src/utils.rs", "rank": 10, "score": 112714.73916746912 }, { "content": "/// Iterate over a given path, return a list of every sub-path found\n\n///\n\n/// example: `path` contains a link fragment `homepage/assets/img/icons/handshake.svg`\n\n/// the following fragments would be returned:\n\n/// - homepage/assets/img/icons/handshake.svg\n\n/// - homepage/assets/img/icons/\n\n/// - homepage/assets/img/\n\n/// - homepage/assets/\n\n/// - homepage/\n\nfn get_sub_paths_from_path(path: &str) -> Vec<String> {\n\n log::trace!(\"enter: get_sub_paths_from_path({})\", path);\n\n let mut paths = vec![];\n\n\n\n let mut parts: Vec<&str> = path.split('/').collect();\n\n\n\n let length = parts.len();\n\n\n\n for _ in 0..length {\n\n // iterate over all parts of the path, using .pop() to remove the last part of the path\n\n parts.pop();\n\n\n\n if parts.is_empty() {\n\n // pop left us with an empty vector, ignore\n\n continue;\n\n }\n\n\n\n let possible_path = parts.join(\"/\");\n\n\n\n if possible_path.is_empty() {\n", "file_path": "src/extractor.rs", "rank": 11, "score": 110240.06413655571 }, { "content": "/// default status codes\n\nfn statuscodes() -> Vec<u16> {\n\n DEFAULT_STATUS_CODES\n\n .iter()\n\n .map(|code| code.as_u16())\n\n .collect()\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 12, "score": 108703.17426254196 }, { "content": "/// Adds the given url to `SCANNED_URLS`\n\n///\n\n/// If `SCANNED_URLS` did not already contain the url, return true; otherwise return false\n\nfn add_url_to_list_of_scanned_urls(resp: &str, scanned_urls: &RwLock<HashSet<String>>) -> bool {\n\n log::trace!(\n\n \"enter: add_url_to_list_of_scanned_urls({}, {:?})\",\n\n resp,\n\n scanned_urls\n\n );\n\n\n\n match scanned_urls.write() {\n\n // check new url against what's already been scanned\n\n Ok(mut urls) => {\n\n let normalized_url = if resp.ends_with('/') {\n\n // append a / to the list of 'seen' urls, this is to prevent the case where\n\n // 3xx and 2xx duplicate eachother\n\n resp.to_string()\n\n } else {\n\n format!(\"{}/\", resp)\n\n };\n\n\n\n // If the set did not contain resp, true is returned.\n\n // If the set did contain resp, false is returned.\n", "file_path": "src/scanner.rs", "rank": 13, "score": 102322.52524776178 }, { "content": "/// simple helper to keep DRY; sends a message using the transmitter side of the given mpsc channel\n\n/// the receiver is expected to be the side that saves the message to CONFIGURATION.output.\n\nfn try_send_message_to_file(msg: &str, tx_file: UnboundedSender<String>, save_output: bool) {\n\n log::trace!(\"enter: try_send_message_to_file({}, {:?})\", msg, tx_file);\n\n\n\n if save_output {\n\n match tx_file.send(msg.to_string()) {\n\n Ok(_) => {\n\n log::trace!(\n\n \"sent message from heuristics::try_send_message_to_file to file handler\"\n\n );\n\n }\n\n Err(e) => {\n\n log::error!(\n\n \"{} {} {}\",\n\n status_colorizer(\"ERROR\"),\n\n module_colorizer(\"heuristics::try_send_message_to_file\"),\n\n e\n\n );\n\n }\n\n }\n\n }\n", "file_path": "src/heuristics.rs", "rank": 14, "score": 98848.22647807293 }, { "content": "/// test finds a static wildcard and reports as much to stdout and a file\n\nfn heuristics_wildcard_test_with_two_static_wildcards_and_output_to_file(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n let outfile = tmp_dir.path().join(\"outfile\");\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let mock2 = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{96}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n", "file_path": "tests/test_heuristics.rs", "rank": 15, "score": 96766.07501848326 }, { "content": "/// Given a string and a reference to a locked buffered file, write the contents and flush\n\n/// the buffer to disk.\n\npub fn safe_file_write(contents: &str, locked_file: Arc<RwLock<io::BufWriter<fs::File>>>) {\n\n // note to future self: adding logging of anything other than error to this function\n\n // is a bad idea. we call this function while processing records generated by the logger.\n\n // If we then call log::... while already processing some logging output, it results in\n\n // the second log entry being injected into the first.\n\n\n\n let contents = strip_ansi_codes(&contents);\n\n\n\n if let Ok(mut handle) = locked_file.write() {\n\n // write lock acquired\n\n match handle.write(contents.as_bytes()) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n log::error!(\"could not write report to disk: {}\", e);\n\n }\n\n }\n\n\n\n match handle.flush() {\n\n // this function is used within async functions/loops, so i'm flushing so that in\n\n // the event of a ctrl+c or w/e results seen so far are saved instead of left lying\n", "file_path": "src/reporter.rs", "rank": 16, "score": 96605.6518341064 }, { "content": "/// test finds a static wildcard that returns 3xx, expect redirects to => in response as well as\n\n/// in the output file\n\nfn heuristics_wildcard_test_with_redirect_as_response_code(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n let outfile = tmp_dir.path().join(\"outfile\");\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(301)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let mock2 = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{96}/\").unwrap())\n\n .return_status(301)\n\n .return_header(\"Location\", &srv.url(\"/some-redirect\"))\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n", "file_path": "tests/test_heuristics.rs", "rank": 17, "score": 94517.30201047016 }, { "content": "// Accessing a `static mut` is unsafe much of the time, but if we do so\n\n// in a synchronized fashion (e.g., write once or read all) then we're\n\n// good to go!\n\n//\n\n// This function will only call `open_file` once, and will\n\n// otherwise always return the value returned from the first invocation.\n\npub fn get_cached_file_handle(filename: &str) -> Option<Arc<RwLock<io::BufWriter<fs::File>>>> {\n\n unsafe {\n\n INIT.call_once(|| {\n\n LOCKED_FILE = open_file(&filename);\n\n });\n\n LOCKED_FILE.clone()\n\n }\n\n}\n\n\n", "file_path": "src/reporter.rs", "rank": 18, "score": 90951.74160244643 }, { "content": "/// default threads value\n\nfn threads() -> usize {\n\n 50\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 19, "score": 89013.75879420097 }, { "content": "/// Creates all required output handlers (terminal, file) and returns\n\n/// the transmitter sides of each mpsc along with each receiver's future's JoinHandle to be awaited\n\n///\n\n/// Any other module that needs to write a Response to stdout or output results to a file should\n\n/// be passed a clone of the appropriate returned transmitter\n\npub fn initialize(\n\n output_file: &str,\n\n save_output: bool,\n\n) -> (\n\n UnboundedSender<Response>,\n\n UnboundedSender<String>,\n\n JoinHandle<()>,\n\n Option<JoinHandle<()>>,\n\n) {\n\n log::trace!(\"enter: initialize({}, {})\", output_file, save_output);\n\n\n\n let (tx_rpt, rx_rpt): FeroxChannel<Response> = mpsc::unbounded_channel();\n\n let (tx_file, rx_file): FeroxChannel<String> = mpsc::unbounded_channel();\n\n\n\n let file_clone = tx_file.clone();\n\n\n\n let term_reporter =\n\n tokio::spawn(async move { spawn_terminal_reporter(rx_rpt, file_clone, save_output).await });\n\n\n\n let file_reporter = if save_output {\n", "file_path": "src/reporter.rs", "rank": 20, "score": 85502.75343409914 }, { "content": "/// Create and return an instance of [reqwest::Client](https://docs.rs/reqwest/latest/reqwest/struct.Client.html)\n\npub fn initialize(\n\n timeout: u64,\n\n useragent: &str,\n\n redirects: bool,\n\n insecure: bool,\n\n headers: &HashMap<String, String>,\n\n proxy: Option<&str>,\n\n) -> Client {\n\n let policy = if redirects {\n\n Policy::limited(10)\n\n } else {\n\n Policy::none()\n\n };\n\n\n\n // try_into returns infallible as its error, unwrap is safe here\n\n let header_map: HeaderMap = headers.try_into().unwrap();\n\n\n\n let client = Client::builder()\n\n .timeout(Duration::new(timeout, 0))\n\n .user_agent(useragent)\n", "file_path": "src/client.rs", "rank": 21, "score": 85495.51527693405 }, { "content": "/// Simple helper to return a uuid, formatted as lowercase without hyphens\n\n///\n\n/// `length` determines the number of uuids to string together. Each uuid\n\n/// is 32 characters long. So, a length of 1 return a 32 character string,\n\n/// a length of 2 returns a 64 character string, and so on...\n\nfn unique_string(length: usize) -> String {\n\n log::trace!(\"enter: unique_string({})\", length);\n\n let mut ids = vec![];\n\n\n\n for _ in 0..length {\n\n ids.push(Uuid::new_v4().to_simple().to_string());\n\n }\n\n\n\n let unique_id = ids.join(\"\");\n\n\n\n log::trace!(\"exit: unique_string -> {}\", unique_id);\n\n unique_id\n\n}\n\n\n\n/// Tests the given url to see if it issues a wildcard response\n\n///\n\n/// In the event that url returns a wildcard response, a\n\n/// [WildcardFilter](struct.WildcardFilter.html) is created and returned to the caller.\n\npub async fn wildcard_test(\n\n target_url: &str,\n", "file_path": "src/heuristics.rs", "rank": 22, "score": 83888.43819033063 }, { "content": "/// integration test helper: creates a temp directory, and writes `words` to\n\n/// a file named `filename` in the temp directory\n\npub fn setup_tmp_directory(\n\n words: &[String],\n\n filename: &str,\n\n) -> Result<(TempDir, PathBuf), Box<dyn std::error::Error>> {\n\n let tmp_dir = TempDir::new()?;\n\n let file = tmp_dir.path().join(&filename);\n\n write(&file, words.join(\"\\n\"))?;\n\n Ok((tmp_dir, file))\n\n}\n\n\n", "file_path": "tests/utils/mod.rs", "rank": 23, "score": 77554.17285677593 }, { "content": "/// Create a HashSet of Strings from the given wordlist then stores it inside an Arc\n\nfn get_unique_words_from_wordlist(path: &str) -> FeroxResult<Arc<HashSet<String>>> {\n\n log::trace!(\"enter: get_unique_words_from_wordlist({})\", path);\n\n\n\n let file = match File::open(&path) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n eprintln!(\n\n \"{} {} {}\",\n\n status_colorizer(\"ERROR\"),\n\n module_colorizer(\"main::get_unique_words_from_wordlist\"),\n\n e\n\n );\n\n log::error!(\"Could not open wordlist: {}\", e);\n\n log::trace!(\"exit: get_unique_words_from_wordlist -> {}\", e);\n\n\n\n return Err(Box::new(e));\n\n }\n\n };\n\n\n\n let reader = BufReader::new(file);\n", "file_path": "src/main.rs", "rank": 24, "score": 77011.88666690387 }, { "content": "/// Add an [indicatif::ProgressBar](https://docs.rs/indicatif/latest/indicatif/struct.ProgressBar.html)\n\n/// to the global [PROGRESS_BAR](../config/struct.PROGRESS_BAR.html)\n\npub fn add_bar(prefix: &str, length: u64, hidden: bool) -> ProgressBar {\n\n let style = if hidden || CONFIGURATION.quiet {\n\n ProgressStyle::default_bar().template(\"\")\n\n } else {\n\n ProgressStyle::default_bar()\n\n .template(\"[{bar:.cyan/blue}] - {elapsed:<4} {pos:>7}/{len:7} {per_sec:7} {prefix}\")\n\n .progress_chars(\"#>-\")\n\n };\n\n\n\n let progress_bar = PROGRESS_BAR.add(ProgressBar::new(length));\n\n\n\n progress_bar.set_style(style);\n\n\n\n progress_bar.set_prefix(&prefix);\n\n\n\n progress_bar\n\n}\n", "file_path": "src/progress.rs", "rank": 25, "score": 76659.45610169502 }, { "content": "/// Create a customized instance of\n\n/// [env_logger::Logger](https://docs.rs/env_logger/latest/env_logger/struct.Logger.html)\n\n/// with timer offset/color and set the log level based on `verbosity`\n\npub fn initialize(verbosity: u8) {\n\n // use occurrences of -v on commandline to or verbosity = N in feroxconfig.toml to set\n\n // log level for the application; respects already specified RUST_LOG environment variable\n\n match env::var(\"RUST_LOG\") {\n\n Ok(_) => {} // RUST_LOG found, don't override\n\n Err(_) => {\n\n // only set log level based on verbosity when RUST_LOG variable doesn't exist\n\n match verbosity {\n\n 0 => (),\n\n 1 => env::set_var(\"RUST_LOG\", \"warn\"),\n\n 2 => env::set_var(\"RUST_LOG\", \"info\"),\n\n 3 => env::set_var(\"RUST_LOG\", \"debug,hyper=info,reqwest=info\"),\n\n _ => env::set_var(\"RUST_LOG\", \"trace,hyper=info,reqwest=info\"),\n\n }\n\n }\n\n }\n\n\n\n let start = Instant::now();\n\n let mut builder = Builder::from_default_env();\n\n\n", "file_path": "src/logger.rs", "rank": 26, "score": 76304.10668343143 }, { "content": "/// send the function a file to which we dont have permission in order to execute error branch\n\nfn main_use_root_owned_file_as_wordlist() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(\"/etc/shadow\")\n\n .arg(\"-vvvv\")\n\n .assert()\n\n .success()\n\n .stderr(predicate::str::contains(\n\n \"ERROR main::get_unique_words_from_wordlist Permission denied (os error 13)\",\n\n ));\n\n\n\n // connectivity test hits it once\n\n assert_eq!(mock.times_called(), 1);\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_main.rs", "rank": 27, "score": 74528.00320890336 }, { "content": "/// Helper function that determines if the configured maximum recursion depth has been reached\n\n///\n\n/// Essentially looks at the Url path and determines how many directories are present in the\n\n/// given Url\n\nfn reached_max_depth(url: &Url, base_depth: usize, max_depth: usize) -> bool {\n\n log::trace!(\n\n \"enter: reached_max_depth({}, {}, {})\",\n\n url,\n\n base_depth,\n\n max_depth\n\n );\n\n\n\n if max_depth == 0 {\n\n // early return, as 0 means recurse forever; no additional processing needed\n\n log::trace!(\"exit: reached_max_depth -> false\");\n\n return false;\n\n }\n\n\n\n let depth = get_current_depth(url.as_str());\n\n\n\n if depth - base_depth >= max_depth {\n\n return true;\n\n }\n\n\n", "file_path": "src/scanner.rs", "rank": 28, "score": 74477.11021938863 }, { "content": "/// Create and return an instance of [clap::App](https://docs.rs/clap/latest/clap/struct.App.html), i.e. the Command Line Interface's configuration\n\npub fn initialize() -> App<'static, 'static> {\n\n App::new(\"feroxbuster\")\n\n .version(VERSION)\n\n .author(\"Ben 'epi' Risher (@epi052)\")\n\n .about(\"A fast, simple, recursive content discovery tool written in Rust\")\n\n .arg(\n\n Arg::with_name(\"wordlist\")\n\n .short(\"w\")\n\n .long(\"wordlist\")\n\n .value_name(\"FILE\")\n\n .help(\"Path to the wordlist\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"url\")\n\n .short(\"u\")\n\n .long(\"url\")\n\n .required_unless(\"stdin\")\n\n .value_name(\"URL\")\n\n .multiple(true)\n", "file_path": "src/parser.rs", "rank": 29, "score": 72422.83256976667 }, { "content": "/// send a single valid request, expect a 200 response\n\nfn read_in_config_file_for_settings() -> Result<(), Box<dyn std::error::Error>> {\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"threads = 37\".to_string()], \"ferox-config.toml\")?;\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .current_dir(&tmp_dir)\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"-vvvv\")\n\n .assert()\n\n .failure()\n\n .stderr(predicate::str::contains(\"│ 37\"));\n\n\n\n teardown_tmp_directory(tmp_dir);\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/test_config.rs", "rank": 30, "score": 68949.03543180441 }, { "content": "/// send a single valid request with -q, get a response, and write only the url to disk\n\nfn scanner_single_request_scan_with_file_output_and_tack_q(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/LICENSE\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n let outfile = tmp_dir.path().join(\"output\");\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n", "file_path": "tests/test_scanner.rs", "rank": 31, "score": 68799.52679197307 }, { "content": "/// integration test helper: removes a temporary directory, presumably created with\n\n/// [setup_tmp_directory](fn.setup_tmp_directory.html)\n\npub fn teardown_tmp_directory(directory: TempDir) {\n\n remove_dir_all(directory).unwrap();\n\n}\n", "file_path": "tests/utils/mod.rs", "rank": 32, "score": 67938.52243111422 }, { "content": "/// Simple helper to abstract away the check for an attached terminal.\n\n///\n\n/// If a terminal is attached, progress bars are visible and the progress bar is used to print\n\n/// to stderr. The progress bar must be used when bars are visible in order to not jack up any\n\n/// progress bar output (the bar knows how to print above itself)\n\n///\n\n/// If a terminal is not attached, `msg` is printed to stdout, with its ansi\n\n/// color codes stripped.\n\n///\n\n/// additionally, provides a location for future printing options (no color, etc) to be handled\n\npub fn ferox_print(msg: &str, bar: &ProgressBar) {\n\n if user_attended() {\n\n bar.println(msg);\n\n } else {\n\n let stripped = strip_ansi_codes(msg);\n\n println!(\"{}\", stripped);\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 33, "score": 65114.71126176923 }, { "content": "/// Given the path to a file, open the file in append mode (create it if it doesn't exist) and\n\n/// return a reference to the file that is buffered and locked\n\nfn open_file(filename: &str) -> Option<Arc<RwLock<io::BufWriter<fs::File>>>> {\n\n log::trace!(\"enter: open_file({})\", filename);\n\n\n\n match fs::OpenOptions::new() // std fs\n\n .create(true)\n\n .append(true)\n\n .open(filename)\n\n {\n\n Ok(file) => {\n\n let writer = io::BufWriter::new(file); // std io\n\n\n\n let locked_file = Some(Arc::new(RwLock::new(writer)));\n\n\n\n log::trace!(\"exit: open_file -> {:?}\", locked_file);\n\n locked_file\n\n }\n\n Err(e) => {\n\n log::error!(\"{}\", e);\n\n log::trace!(\"exit: open_file -> None\");\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/reporter.rs", "rank": 34, "score": 63797.38662882944 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + extract-links\n\nfn banner_prints_extract_links() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-e\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Extract Links\"))\n\n .and(predicate::str::contains(\"true\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n", "file_path": "tests/test_banner.rs", "rank": 35, "score": 60485.10450042694 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + multiple size filters\n\nfn banner_prints_size_filters() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-S\")\n\n .arg(\"789456123\")\n\n .arg(\"--sizefilter\")\n\n .arg(\"44444444\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Size Filter\"))\n\n .and(predicate::str::contains(\"789456123\"))\n\n .and(predicate::str::contains(\"44444444\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 36, "score": 60484.94398484833 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + add slash\n\nfn banner_prints_add_slash() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-f\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Add Slash\"))\n\n .and(predicate::str::contains(\"true\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 37, "score": 60459.12755925399 }, { "content": "/// send the function an empty file\n\nfn main_use_empty_wordlist() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"-vvvv\")\n\n .assert()\n\n .failure()\n", "file_path": "tests/test_main.rs", "rank": 38, "score": 60413.033377311935 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + output file\n\nfn banner_prints_output_file() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"--output\")\n\n .arg(\"/super/cool/path\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Output File\"))\n\n .and(predicate::str::contains(\"/super/cool/path\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 39, "score": 60304.18008363011 }, { "content": "/// default recursion depth\n\nfn depth() -> usize {\n\n 4\n\n}\n\n\n\nimpl Default for Configuration {\n\n /// Builds the default Configuration for feroxbuster\n\n fn default() -> Self {\n\n let timeout = timeout();\n\n let useragent = useragent();\n\n let client = client::initialize(timeout, &useragent, false, false, &HashMap::new(), None);\n\n\n\n Configuration {\n\n client,\n\n timeout,\n\n useragent,\n\n dontfilter: false,\n\n quiet: false,\n\n stdin: false,\n\n verbosity: 0,\n\n addslash: false,\n", "file_path": "src/config.rs", "rank": 40, "score": 59907.80657677636 }, { "content": "/// send nothing over stdin, expect heuristics to be upset during connectivity test\n\nfn main_use_empty_stdin_targets() -> Result<(), Box<dyn std::error::Error>> {\n\n let (tmp_dir, file) = setup_tmp_directory(&[], \"wordlist\")?;\n\n\n\n // get_targets is called before scan, so the empty wordlist shouldn't trigger\n\n // the 'Did not find any words' error\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--stdin\")\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"-vvv\")\n\n .pipe_stdin(file)\n\n .unwrap()\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"Could not connect to any target provided\")\n\n .and(predicate::str::contains(\"ERROR\"))\n\n .and(predicate::str::contains(\"heuristics::connectivity_test\"))\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .not(), // no target url found\n\n );\n\n\n\n teardown_tmp_directory(tmp_dir);\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/test_main.rs", "rank": 41, "score": 58343.89423660483 }, { "content": "/// send a single valid request, get a response, and write it to disk\n\nfn scanner_single_request_scan_with_file_output() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/LICENSE\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n let outfile = tmp_dir.path().join(\"output\");\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"-vvvv\")\n", "file_path": "tests/test_scanner.rs", "rank": 42, "score": 56384.04067171151 }, { "content": "/// test finds a static wildcard and reports nothing to stdout\n\nfn heuristics_wildcard_test_with_two_static_wildcards_with_quiet_enabled(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let mock2 = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{96}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let cmd = Command::cargo_bin(\"feroxbuster\")\n", "file_path": "tests/test_heuristics.rs", "rank": 43, "score": 54912.37814618432 }, { "content": "/// send an invalid output file, expect nothing to be written to disk\n\nfn scanner_single_request_scan_with_invalid_file_output() -> Result<(), Box<dyn std::error::Error>>\n\n{\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/LICENSE\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n let outfile = tmp_dir.path(); // outfile is a directory\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n", "file_path": "tests/test_scanner.rs", "rank": 44, "score": 54630.05571197528 }, { "content": "/// test finds a static wildcard and reports as much to stdout\n\nfn heuristics_wildcard_test_with_two_static_wildcards() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let mock2 = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{96}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let cmd = Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n", "file_path": "tests/test_heuristics.rs", "rank": 45, "score": 47613.280340320096 }, { "content": "/// send single valid request, get back a 301 without a Location header, expect false\n\nfn scanner_single_request_returns_301_without_location_header(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/LICENSE\")\n\n .return_status(301)\n\n .create_on(&srv);\n\n\n\n let cmd = Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"-T\")\n\n .arg(\"5\")\n\n .arg(\"-a\")\n", "file_path": "tests/test_scanner.rs", "rank": 46, "score": 47105.25042000323 }, { "content": "/// send a valid request, follow 200s into new directories, expect 200 responses\n\nfn scanner_recursive_request_scan_using_only_success_responses(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let urls = [\n\n \"js/\".to_string(),\n\n \"prod/\".to_string(),\n\n \"dev/\".to_string(),\n\n \"file.js\".to_string(),\n\n ];\n\n let (tmp_dir, file) = setup_tmp_directory(&urls, \"wordlist\")?;\n\n\n\n let js_mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/js/\")\n\n .return_status(200)\n\n .return_header(\"Location\", &srv.url(\"/js/\"))\n\n .create_on(&srv);\n\n\n\n let js_prod_mock = Mock::new()\n\n .expect_method(GET)\n", "file_path": "tests/test_scanner.rs", "rank": 47, "score": 47076.25943362289 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + insecure\n\nfn banner_prints_insecure() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-k\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Insecure\"))\n\n .and(predicate::str::contains(\"true\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 48, "score": 42452.99487323563 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + proxy\n\nfn banner_prints_proxy() -> Result<(), Box<dyn std::error::Error>> {\n\n let urls = vec![\n\n String::from(\"http://localhost\"),\n\n String::from(\"http://schmocalhost\"),\n\n ];\n\n let (tmp_dir, file) = setup_tmp_directory(&urls, \"wordlist\")?;\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--stdin\")\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"--proxy\")\n\n .arg(\"http://127.0.0.1:8080\")\n\n .pipe_stdin(file)\n\n .unwrap()\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n", "file_path": "tests/test_banner.rs", "rank": 49, "score": 42452.97126897058 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + queries\n\nfn banner_prints_queries() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-Q\")\n\n .arg(\"token=supersecret\")\n\n .arg(\"--query\")\n\n .arg(\"stuff=things\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Query Parameter\"))\n\n .and(predicate::str::contains(\"token=supersecret\"))\n\n .and(predicate::str::contains(\"stuff=things\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 50, "score": 42452.96885854697 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + follow redirects\n\nfn banner_prints_redirects() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-r\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Follow Redirects\"))\n\n .and(predicate::str::contains(\"true\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 51, "score": 42441.669088973315 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + multiple headers\n\nfn banner_prints_headers() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"--headers\")\n\n .arg(\"stuff:things\")\n\n .arg(\"-H\")\n\n .arg(\"mostuff:mothings\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Header\"))\n\n .and(predicate::str::contains(\"stuff: things\"))\n\n .and(predicate::str::contains(\"mostuff: mothings\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 52, "score": 42438.331924460086 }, { "content": "/// test allows non-existent wordlist to trigger the banner printing to stderr\n\n/// expect to see all mandatory prints + status codes\n\nfn banner_prints_status_codes() -> Result<(), Box<dyn std::error::Error>> {\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://localhost\")\n\n .arg(\"-s\")\n\n .arg(\"201,301,401\")\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"─┬─\")\n\n .and(predicate::str::contains(\"Target Url\"))\n\n .and(predicate::str::contains(\"http://localhost\"))\n\n .and(predicate::str::contains(\"Threads\"))\n\n .and(predicate::str::contains(\"Wordlist\"))\n\n .and(predicate::str::contains(\"Timeout (secs)\"))\n\n .and(predicate::str::contains(\"User-Agent\"))\n\n .and(predicate::str::contains(\"Status Codes\"))\n\n .and(predicate::str::contains(\"[201, 301, 401]\"))\n\n .and(predicate::str::contains(\"─┴─\")),\n\n );\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_banner.rs", "rank": 53, "score": 40995.217238671044 }, { "content": "/// test pipes two bad targets to the scanner, expected result is that the\n\n/// scanner dies\n\nfn test_two_targets_cannot_connect() -> Result<(), Box<dyn std::error::Error>> {\n\n let not_real =\n\n String::from(\"http://fjdksafjkdsajfkdsajkfdsajkfsdjkdsfdsafdsafdsajkr3l2ajfdskafdsjk\");\n\n let urls = vec![not_real.clone(), not_real];\n\n let (tmp_dir, file) = setup_tmp_directory(&urls, \"wordlist\")?;\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--stdin\")\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .pipe_stdin(file)\n\n .unwrap()\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"Could not connect to any target provided\")\n\n .and(predicate::str::contains(\"ERROR\"))\n\n .and(predicate::str::contains(\"heuristics::connectivity_test\")),\n\n );\n\n\n\n teardown_tmp_directory(tmp_dir);\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_heuristics.rs", "rank": 54, "score": 40928.02984857332 }, { "content": "/// test passes one bad target via -u to the scanner, expected result is that the\n\n/// scanner dies\n\nfn test_single_target_cannot_connect() -> Result<(), Box<dyn std::error::Error>> {\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(\"http://fjdksafjkdsajfkdsajkfdsajkfsdjkdsfdsafdsafdsajkr3l2ajfdskafdsjk\")\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .assert()\n\n .failure()\n\n .stderr(\n\n predicate::str::contains(\"Could not connect to any target provided\")\n\n .and(predicate::str::contains(\"ERROR\"))\n\n .and(predicate::str::contains(\"heuristics::connectivity_test\")),\n\n );\n\n\n\n teardown_tmp_directory(tmp_dir);\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_heuristics.rs", "rank": 55, "score": 40927.97520017276 }, { "content": "/// uses dontfilter, so the normal wildcard test should never happen\n\nfn heuristics_static_wildcard_request_with_dontfilter() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"--dontfilter\")\n\n .unwrap();\n\n\n\n teardown_tmp_directory(tmp_dir);\n\n\n\n assert_eq!(mock.times_called(), 0);\n\n Ok(())\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test_heuristics.rs", "rank": 56, "score": 39593.1853259802 }, { "content": "/// test finds a dynamic wildcard and reports as much to stdout and a file\n\nfn test_dynamic_wildcard_request_found() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n let outfile = tmp_dir.path().join(\"outfile\");\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let mock2 = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{96}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a testAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\")\n\n .create_on(&srv);\n\n\n\n let cmd = Command::cargo_bin(\"feroxbuster\")\n", "file_path": "tests/test_heuristics.rs", "rank": 57, "score": 39593.0889505548 }, { "content": "/// test finds a static wildcard and reports as much to stdout\n\nfn test_static_wildcard_request_found() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) = setup_tmp_directory(&[\"LICENSE\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path_matches(Regex::new(\"/[a-zA-Z0-9]{32}/\").unwrap())\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n let cmd = Command::cargo_bin(\"feroxbuster\")\n\n .unwrap()\n\n .arg(\"--url\")\n\n .arg(srv.url(\"/\"))\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n\n .arg(\"--addslash\")\n\n .unwrap();\n\n\n", "file_path": "tests/test_heuristics.rs", "rank": 58, "score": 39589.56110071167 }, { "content": "/// send a single valid request, filter the size of the response, expect one out of 2 urls\n\nfn scanner_single_request_scan_with_filtered_result() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n let (tmp_dir, file) =\n\n setup_tmp_directory(&[\"LICENSE\".to_string(), \"ignored\".to_string()], \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/LICENSE\")\n\n .return_status(200)\n\n .return_body(\"this is a not a test\")\n\n .create_on(&srv);\n\n\n\n let filtered_mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/ignored\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n let cmd = Command::cargo_bin(\"feroxbuster\")\n", "file_path": "tests/test_scanner.rs", "rank": 59, "score": 38409.057602080764 }, { "content": "/// test pipes one good target and one bad to the scanner, expected result is that the\n\n/// good target is scanned successfully while the bad target is ignored and handled properly\n\nfn test_one_good_and_one_bad_target_scan_succeeds() -> Result<(), Box<dyn std::error::Error>> {\n\n let srv = MockServer::start();\n\n\n\n let not_real =\n\n String::from(\"http://fjdksafjkdsajfkdsajkfdsajkfsdjkdsfdsafdsafdsajkr3l2ajfdskafdsjk\");\n\n let urls = vec![not_real, srv.url(\"/\"), String::from(\"LICENSE\")];\n\n let (tmp_dir, file) = setup_tmp_directory(&urls, \"wordlist\")?;\n\n\n\n let mock = Mock::new()\n\n .expect_method(GET)\n\n .expect_path(\"/LICENSE\")\n\n .return_status(200)\n\n .return_body(\"this is a test\")\n\n .create_on(&srv);\n\n\n\n let mut cmd = Command::cargo_bin(\"feroxbuster\").unwrap();\n\n\n\n cmd.arg(\"--stdin\")\n\n .arg(\"--wordlist\")\n\n .arg(file.as_os_str())\n", "file_path": "tests/test_heuristics.rs", "rank": 60, "score": 36088.312805270994 }, { "content": " insecure: false,\n\n redirects: false,\n\n norecursion: false,\n\n extract_links: false,\n\n proxy: String::new(),\n\n config: String::new(),\n\n output: String::new(),\n\n target_url: String::new(),\n\n queries: Vec::new(),\n\n extensions: Vec::new(),\n\n sizefilters: Vec::new(),\n\n headers: HashMap::new(),\n\n threads: threads(),\n\n depth: depth(),\n\n wordlist: wordlist(),\n\n statuscodes: statuscodes(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 61, "score": 31722.270920222665 }, { "content": "impl Configuration {\n\n /// Creates a [Configuration](struct.Configuration.html) object with the following\n\n /// built-in default values\n\n ///\n\n /// - **timeout**: `5` seconds\n\n /// - **redirects**: `false`\n\n /// - **extract-links**: `false`\n\n /// - **wordlist**: [`DEFAULT_WORDLIST`](constant.DEFAULT_WORDLIST.html)\n\n /// - **config**: `None`\n\n /// - **threads**: `50`\n\n /// - **timeout**: `7` seconds\n\n /// - **verbosity**: `0` (no logging enabled)\n\n /// - **proxy**: `None`\n\n /// - **statuscodes**: [`DEFAULT_RESPONSE_CODES`](constant.DEFAULT_RESPONSE_CODES.html)\n\n /// - **output**: `None` (print to stdout)\n\n /// - **quiet**: `false`\n\n /// - **useragent**: `feroxer/VERSION`\n\n /// - **insecure**: `false` (don't be insecure, i.e. don't allow invalid certs)\n\n /// - **extensions**: `None`\n\n /// - **sizefilters**: `None`\n", "file_path": "src/config.rs", "rank": 62, "score": 31719.320674685714 }, { "content": " for val in args.values_of(\"queries\").unwrap() {\n\n // same basic logic used as reading in the headers HashMap above\n\n let mut split_val = val.split('=');\n\n\n\n let name = split_val.next().unwrap().trim();\n\n\n\n let value = split_val.collect::<Vec<&str>>().join(\"=\");\n\n\n\n config.queries.push((name.to_string(), value.to_string()));\n\n }\n\n }\n\n\n\n // this if statement determines if we've gotten a Client configuration change from\n\n // either the config file or command line arguments; if we have, we need to rebuild\n\n // the client and store it in the config struct\n\n if !config.proxy.is_empty()\n\n || config.timeout != timeout()\n\n || config.useragent != useragent()\n\n || config.redirects\n\n || config.insecure\n", "file_path": "src/config.rs", "rank": 63, "score": 31718.59578860968 }, { "content": " // update the settings\n\n Self::merge_config(&mut config, settings);\n\n }\n\n }\n\n }\n\n\n\n /// Given two Configurations, overwrite `settings` with the fields found in `settings_to_merge`\n\n fn merge_config(settings: &mut Self, settings_to_merge: Self) {\n\n settings.threads = settings_to_merge.threads;\n\n settings.wordlist = settings_to_merge.wordlist;\n\n settings.statuscodes = settings_to_merge.statuscodes;\n\n settings.proxy = settings_to_merge.proxy;\n\n settings.timeout = settings_to_merge.timeout;\n\n settings.verbosity = settings_to_merge.verbosity;\n\n settings.quiet = settings_to_merge.quiet;\n\n settings.output = settings_to_merge.output;\n\n settings.useragent = settings_to_merge.useragent;\n\n settings.redirects = settings_to_merge.redirects;\n\n settings.insecure = settings_to_merge.insecure;\n\n settings.extract_links = settings_to_merge.extract_links;\n", "file_path": "src/config.rs", "rank": 64, "score": 31714.325972170198 }, { "content": "\n\n /// Sets the User-Agent (default: feroxbuster/VERSION)\n\n #[serde(default = \"useragent\")]\n\n pub useragent: String,\n\n\n\n /// Follow redirects\n\n #[serde(default)]\n\n pub redirects: bool,\n\n\n\n /// Disables TLS certificate validation\n\n #[serde(default)]\n\n pub insecure: bool,\n\n\n\n /// File extension(s) to search for\n\n #[serde(default)]\n\n pub extensions: Vec<String>,\n\n\n\n /// HTTP headers to be used in each request\n\n #[serde(default)]\n\n pub headers: HashMap<String, String>,\n", "file_path": "src/config.rs", "rank": 65, "score": 31713.050898432673 }, { "content": " /// creates a dummy configuration file for testing\n\n fn setup_config_test() -> Configuration {\n\n let data = r#\"\n\n wordlist = \"/some/path\"\n\n statuscodes = [201, 301, 401]\n\n threads = 40\n\n timeout = 5\n\n proxy = \"http://127.0.0.1:8080\"\n\n quiet = true\n\n verbosity = 1\n\n output = \"/some/otherpath\"\n\n redirects = true\n\n insecure = true\n\n extensions = [\"html\", \"php\", \"js\"]\n\n headers = {stuff = \"things\", mostuff = \"mothings\"}\n\n queries = [[\"name\",\"value\"], [\"rick\", \"astley\"]]\n\n norecursion = true\n\n addslash = true\n\n stdin = true\n\n dontfilter = true\n", "file_path": "src/config.rs", "rank": 66, "score": 31712.01366718229 }, { "content": "\n\n ////\n\n // organizational breakpoint; all options below alter the Client configuration\n\n ////\n\n if args.value_of(\"proxy\").is_some() {\n\n config.proxy = String::from(args.value_of(\"proxy\").unwrap());\n\n }\n\n\n\n if args.value_of(\"useragent\").is_some() {\n\n config.useragent = String::from(args.value_of(\"useragent\").unwrap());\n\n }\n\n\n\n if args.value_of(\"timeout\").is_some() {\n\n let timeout = value_t!(args.value_of(\"timeout\"), u64).unwrap_or_else(|e| e.exit());\n\n config.timeout = timeout;\n\n }\n\n\n\n if args.is_present(\"redirects\") {\n\n config.redirects = args.is_present(\"redirects\");\n\n }\n", "file_path": "src/config.rs", "rank": 67, "score": 31711.120949673415 }, { "content": " extract_links = true\n\n depth = 1\n\n sizefilters = [4120]\n\n \"#;\n\n let tmp_dir = TempDir::new().unwrap();\n\n let file = tmp_dir.path().join(DEFAULT_CONFIG_NAME);\n\n write(&file, data).unwrap();\n\n Configuration::parse_config(file).unwrap()\n\n }\n\n\n\n #[test]\n\n /// test that all default config values meet expectations\n\n fn default_configuration() {\n\n let config = Configuration::default();\n\n assert_eq!(config.wordlist, wordlist());\n\n assert_eq!(config.proxy, String::new());\n\n assert_eq!(config.target_url, String::new());\n\n assert_eq!(config.config, String::new());\n\n assert_eq!(config.statuscodes, statuscodes());\n\n assert_eq!(config.threads, threads());\n", "file_path": "src/config.rs", "rank": 68, "score": 31710.65850949512 }, { "content": "\n\n /// Maximum recursion depth, a depth of 0 is infinite recursion\n\n #[serde(default = \"depth\")]\n\n pub depth: usize,\n\n\n\n /// Filter out messages of a particular size\n\n #[serde(default)]\n\n pub sizefilters: Vec<u64>,\n\n\n\n /// Don't auto-filter wildcard responses\n\n #[serde(default)]\n\n pub dontfilter: bool,\n\n}\n\n\n\n// functions timeout, threads, statuscodes, useragent, wordlist, and depth are used to provide\n\n// defaults in the event that a ferox-config.toml is found but one or more of the values below\n\n// aren't listed in the config. This way, we get the correct defaults upon Deserialization\n\n\n\n/// default timeout value\n", "file_path": "src/config.rs", "rank": 69, "score": 31710.11213488495 }, { "content": " assert_eq!(config.sizefilters, vec![4120]);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the values parsed are correct\n\n fn config_reads_headers() {\n\n let config = setup_config_test();\n\n let mut headers = HashMap::new();\n\n headers.insert(\"stuff\".to_string(), \"things\".to_string());\n\n headers.insert(\"mostuff\".to_string(), \"mothings\".to_string());\n\n assert_eq!(config.headers, headers);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the values parsed are correct\n\n fn config_reads_queries() {\n\n let config = setup_config_test();\n\n let mut queries = vec![];\n\n queries.push((\"name\".to_string(), \"value\".to_string()));\n\n queries.push((\"rick\".to_string(), \"astley\".to_string()));\n\n assert_eq!(config.queries, queries);\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 70, "score": 31709.11249351998 }, { "content": "\n\n if args.is_present(\"insecure\") {\n\n config.insecure = args.is_present(\"insecure\");\n\n }\n\n\n\n if args.values_of(\"headers\").is_some() {\n\n for val in args.values_of(\"headers\").unwrap() {\n\n let mut split_val = val.split(':');\n\n\n\n // explicitly take first split value as header's name\n\n let name = split_val.next().unwrap().trim();\n\n\n\n // all other items in the iterator returned by split, when combined with the\n\n // original split deliminator (:), make up the header's final value\n\n let value = split_val.collect::<Vec<&str>>().join(\":\");\n\n config.headers.insert(name.to_string(), value.to_string());\n\n }\n\n }\n\n\n\n if args.values_of(\"queries\").is_some() {\n", "file_path": "src/config.rs", "rank": 71, "score": 31708.94261236028 }, { "content": " || !config.headers.is_empty()\n\n {\n\n if config.proxy.is_empty() {\n\n config.client = client::initialize(\n\n config.timeout,\n\n &config.useragent,\n\n config.redirects,\n\n config.insecure,\n\n &config.headers,\n\n None,\n\n )\n\n } else {\n\n config.client = client::initialize(\n\n config.timeout,\n\n &config.useragent,\n\n config.redirects,\n\n config.insecure,\n\n &config.headers,\n\n Some(&config.proxy),\n\n )\n", "file_path": "src/config.rs", "rank": 72, "score": 31708.381610901015 }, { "content": "\n\n /// Path to the config file used\n\n #[serde(default)]\n\n pub config: String,\n\n\n\n /// Proxy to use for requests (ex: http(s)://host:port, socks5://host:port)\n\n #[serde(default)]\n\n pub proxy: String,\n\n\n\n /// The target URL\n\n #[serde(default)]\n\n pub target_url: String,\n\n\n\n /// Status Codes of interest (default: 200 204 301 302 307 308 401 403 405)\n\n #[serde(default = \"statuscodes\")]\n\n pub statuscodes: Vec<u16>,\n\n\n\n /// Instance of [reqwest::Client](https://docs.rs/reqwest/latest/reqwest/struct.Client.html)\n\n #[serde(skip)]\n\n pub client: Client,\n", "file_path": "src/config.rs", "rank": 73, "score": 31708.064824910653 }, { "content": "\n\n /// URL query parameters\n\n #[serde(default)]\n\n pub queries: Vec<(String, String)>,\n\n\n\n /// Do not scan recursively\n\n #[serde(default)]\n\n pub norecursion: bool,\n\n\n\n /// Extract links from html/javscript\n\n #[serde(default)]\n\n pub extract_links: bool,\n\n\n\n /// Append / to each request\n\n #[serde(default)]\n\n pub addslash: bool,\n\n\n\n /// Read url(s) from STDIN\n\n #[serde(default)]\n\n pub stdin: bool,\n", "file_path": "src/config.rs", "rank": 74, "score": 31707.677378121272 }, { "content": " assert_eq!(config.depth, depth());\n\n assert_eq!(config.timeout, timeout());\n\n assert_eq!(config.verbosity, 0);\n\n assert_eq!(config.quiet, false);\n\n assert_eq!(config.dontfilter, false);\n\n assert_eq!(config.norecursion, false);\n\n assert_eq!(config.stdin, false);\n\n assert_eq!(config.addslash, false);\n\n assert_eq!(config.redirects, false);\n\n assert_eq!(config.extract_links, false);\n\n assert_eq!(config.insecure, false);\n\n assert_eq!(config.queries, Vec::new());\n\n assert_eq!(config.extensions, Vec::<String>::new());\n\n assert_eq!(config.sizefilters, Vec::<u64>::new());\n\n assert_eq!(config.headers, HashMap::new());\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_wordlist() {\n", "file_path": "src/config.rs", "rank": 75, "score": 31706.13576570524 }, { "content": "use crate::utils::{module_colorizer, status_colorizer};\n\nuse crate::{client, parser, progress};\n\nuse crate::{DEFAULT_CONFIG_NAME, DEFAULT_STATUS_CODES, DEFAULT_WORDLIST, VERSION};\n\nuse clap::value_t;\n\nuse indicatif::{MultiProgress, ProgressBar, ProgressDrawTarget};\n\nuse lazy_static::lazy_static;\n\nuse reqwest::{Client, StatusCode};\n\nuse serde::Deserialize;\n\nuse std::collections::HashMap;\n\nuse std::env::{current_dir, current_exe};\n\nuse std::fs::read_to_string;\n\nuse std::path::PathBuf;\n\nuse std::process::exit;\n\n\n\nlazy_static! {\n\n /// Global configuration state\n\n pub static ref CONFIGURATION: Configuration = Configuration::new();\n\n\n\n /// Global progress bar that houses other progress bars\n\n pub static ref PROGRESS_BAR: MultiProgress = MultiProgress::with_draw_target(ProgressDrawTarget::stdout());\n", "file_path": "src/config.rs", "rank": 76, "score": 31705.363420062135 }, { "content": " }\n\n }\n\n\n\n config\n\n }\n\n\n\n /// Given a configuration file's location and an instance of `Configuration`, read in\n\n /// the config file if found and update the current settings with the settings found therein\n\n fn parse_and_merge_config(config_file: PathBuf, mut config: &mut Self) {\n\n if config_file.exists() {\n\n // save off a string version of the path before it goes out of scope\n\n let conf_str = match config_file.to_str() {\n\n Some(cs) => String::from(cs),\n\n None => String::new(),\n\n };\n\n\n\n if let Some(settings) = Self::parse_config(config_file) {\n\n // set the config used for viewing in the banner\n\n config.config = conf_str;\n\n\n", "file_path": "src/config.rs", "rank": 77, "score": 31705.141630136735 }, { "content": " // overwritten from Struct defaults, to file config, to command line args, soooo ¯\\_(ツ)_/¯\n\n if args.value_of(\"threads\").is_some() {\n\n let threads = value_t!(args.value_of(\"threads\"), usize).unwrap_or_else(|e| e.exit());\n\n config.threads = threads;\n\n }\n\n\n\n if args.value_of(\"depth\").is_some() {\n\n let depth = value_t!(args.value_of(\"depth\"), usize).unwrap_or_else(|e| e.exit());\n\n config.depth = depth;\n\n }\n\n\n\n if args.value_of(\"wordlist\").is_some() {\n\n config.wordlist = String::from(args.value_of(\"wordlist\").unwrap());\n\n }\n\n\n\n if args.value_of(\"output\").is_some() {\n\n config.output = String::from(args.value_of(\"output\").unwrap());\n\n }\n\n\n\n if args.values_of(\"statuscodes\").is_some() {\n", "file_path": "src/config.rs", "rank": 78, "score": 31704.378213548793 }, { "content": "\n\n /// Global progress bar that is only used for printing messages that don't jack up other bars\n\n pub static ref PROGRESS_PRINTER: ProgressBar = progress::add_bar(\"\", 0, true);\n\n}\n\n\n\n/// Represents the final, global configuration of the program.\n\n///\n\n/// This struct is the combination of the following:\n\n/// - default configuration values\n\n/// - plus overrides read from a configuration file\n\n/// - plus command-line options\n\n///\n\n/// In that order.\n\n///\n\n/// Inspired by and derived from https://github.com/PhilipDaniels/rust-config-example\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct Configuration {\n\n /// Path to the wordlist\n\n #[serde(default = \"wordlist\")]\n\n pub wordlist: String,\n", "file_path": "src/config.rs", "rank": 79, "score": 31704.120096135342 }, { "content": " settings.extensions = settings_to_merge.extensions;\n\n settings.headers = settings_to_merge.headers;\n\n settings.queries = settings_to_merge.queries;\n\n settings.norecursion = settings_to_merge.norecursion;\n\n settings.addslash = settings_to_merge.addslash;\n\n settings.stdin = settings_to_merge.stdin;\n\n settings.depth = settings_to_merge.depth;\n\n settings.sizefilters = settings_to_merge.sizefilters;\n\n settings.dontfilter = settings_to_merge.dontfilter;\n\n }\n\n\n\n /// If present, read in `DEFAULT_CONFIG_NAME` and deserialize the specified values\n\n ///\n\n /// uses serde to deserialize the toml into a `Configuration` struct\n\n fn parse_config(config_file: PathBuf) -> Option<Self> {\n\n if let Ok(content) = read_to_string(config_file) {\n\n match toml::from_str(content.as_str()) {\n\n Ok(config) => {\n\n return Some(config);\n\n }\n", "file_path": "src/config.rs", "rank": 80, "score": 31703.78355579453 }, { "content": " config.verbosity = args.occurrences_of(\"verbosity\") as u8;\n\n }\n\n\n\n if args.is_present(\"norecursion\") {\n\n config.norecursion = args.is_present(\"norecursion\");\n\n }\n\n\n\n if args.is_present(\"addslash\") {\n\n config.addslash = args.is_present(\"addslash\");\n\n }\n\n\n\n if args.is_present(\"extract_links\") {\n\n config.extract_links = args.is_present(\"extract_links\");\n\n }\n\n\n\n if args.is_present(\"stdin\") {\n\n config.stdin = args.is_present(\"stdin\");\n\n } else {\n\n config.target_url = String::from(args.value_of(\"url\").unwrap());\n\n }\n", "file_path": "src/config.rs", "rank": 81, "score": 31703.322306397044 }, { "content": " config.extensions = args\n\n .values_of(\"extensions\")\n\n .unwrap()\n\n .map(|val| val.to_string())\n\n .collect();\n\n }\n\n\n\n if args.values_of(\"sizefilters\").is_some() {\n\n config.sizefilters = args\n\n .values_of(\"sizefilters\")\n\n .unwrap() // already known good\n\n .map(|size| {\n\n size.parse::<u64>().unwrap_or_else(|e| {\n\n eprintln!(\n\n \"{} {}: {}\",\n\n status_colorizer(\"ERROR\"),\n\n module_colorizer(\"Configuration::new\"),\n\n e\n\n );\n\n exit(1)\n", "file_path": "src/config.rs", "rank": 82, "score": 31700.055107903467 }, { "content": " Self::parse_and_merge_config(config_file, &mut config);\n\n };\n\n\n\n // merge a config found in same the directory as feroxbuster executable\n\n if let Ok(exe_path) = current_exe() {\n\n if let Some(bin_dir) = exe_path.parent() {\n\n let config_file = bin_dir.join(DEFAULT_CONFIG_NAME);\n\n Self::parse_and_merge_config(config_file, &mut config);\n\n };\n\n };\n\n\n\n // merge a config found in the user's current working directory\n\n if let Ok(cwd) = current_dir() {\n\n let config_file = cwd.join(DEFAULT_CONFIG_NAME);\n\n Self::parse_and_merge_config(config_file, &mut config);\n\n }\n\n\n\n let args = parser::initialize().get_matches();\n\n\n\n // the .is_some appears clunky, but it allows default values to be incrementally\n", "file_path": "src/config.rs", "rank": 83, "score": 31699.812279742364 }, { "content": " /// - **headers**: `None`\n\n /// - **queries**: `None`\n\n /// - **norecursion**: `false` (recursively scan enumerated sub-directories)\n\n /// - **addslash**: `false`\n\n /// - **stdin**: `false`\n\n /// - **dontfilter**: `false` (auto filter wildcard responses)\n\n /// - **depth**: `4` (maximum recursion depth)\n\n ///\n\n /// After which, any values defined in a\n\n /// [ferox-config.toml](constant.DEFAULT_CONFIG_NAME.html) config file will override the\n\n /// built-in defaults.\n\n ///\n\n /// `ferox-config.toml` can be placed in any of the following locations (in the order shown):\n\n /// - `/etc/feroxbuster/`\n\n /// - `CONFIG_DIR/ferxobuster/`\n\n /// - The same directory as the `feroxbuster` executable\n\n /// - The user's current working directory\n\n ///\n\n /// If more than one valid configuration file is found, each one overwrites the values found previously.\n\n ///\n", "file_path": "src/config.rs", "rank": 84, "score": 31699.40523118698 }, { "content": "\n\n /// Number of concurrent threads (default: 50)\n\n #[serde(default = \"threads\")]\n\n pub threads: usize,\n\n\n\n /// Number of seconds before a request times out (default: 7)\n\n #[serde(default = \"timeout\")]\n\n pub timeout: u64,\n\n\n\n /// Level of verbosity, equates to log level\n\n #[serde(default)]\n\n pub verbosity: u8,\n\n\n\n /// Only print URLs\n\n #[serde(default)]\n\n pub quiet: bool,\n\n\n\n /// Output file to write results to (default: stdout)\n\n #[serde(default)]\n\n pub output: String,\n", "file_path": "src/config.rs", "rank": 85, "score": 31698.445716036877 }, { "content": " }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_extract_links() {\n\n let config = setup_config_test();\n\n assert_eq!(config.extract_links, true);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_extensions() {\n\n let config = setup_config_test();\n\n assert_eq!(config.extensions, vec![\"html\", \"php\", \"js\"]);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_sizefilters() {\n\n let config = setup_config_test();\n", "file_path": "src/config.rs", "rank": 86, "score": 31698.216984024322 }, { "content": " // search for a config using the following order of precedence\n\n // - /etc/feroxbuster/\n\n // - CONFIG_DIR/ferxobuster/\n\n // - same directory as feroxbuster executable\n\n // - current directory\n\n\n\n // merge a config found at /etc/feroxbuster/ferox-config.toml\n\n let config_file = PathBuf::new()\n\n .join(\"/etc/feroxbuster\")\n\n .join(DEFAULT_CONFIG_NAME);\n\n Self::parse_and_merge_config(config_file, &mut config);\n\n\n\n // merge a config found at ~/.config/feroxbuster/ferox-config.toml\n\n if let Some(config_dir) = dirs::config_dir() {\n\n // config_dir() resolves to one of the following\n\n // - linux: $XDG_CONFIG_HOME or $HOME/.config\n\n // - macOS: $HOME/Library/Application Support\n\n // - windows: {FOLDERID_RoamingAppData}\n\n\n\n let config_file = config_dir.join(\"feroxbuster\").join(DEFAULT_CONFIG_NAME);\n", "file_path": "src/config.rs", "rank": 87, "score": 31697.890986410403 }, { "content": " config.statuscodes = args\n\n .values_of(\"statuscodes\")\n\n .unwrap() // already known good\n\n .map(|code| {\n\n StatusCode::from_bytes(code.as_bytes())\n\n .unwrap_or_else(|e| {\n\n eprintln!(\n\n \"{} {}: {}\",\n\n status_colorizer(\"ERROR\"),\n\n module_colorizer(\"Configuration::new\"),\n\n e\n\n );\n\n exit(1)\n\n })\n\n .as_u16()\n\n })\n\n .collect();\n\n }\n\n\n\n if args.values_of(\"extensions\").is_some() {\n", "file_path": "src/config.rs", "rank": 88, "score": 31697.662768706603 }, { "content": " let config = setup_config_test();\n\n assert_eq!(config.wordlist, \"/some/path\");\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_statuscodes() {\n\n let config = setup_config_test();\n\n assert_eq!(config.statuscodes, vec![201, 301, 401]);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_threads() {\n\n let config = setup_config_test();\n\n assert_eq!(config.threads, 40);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n", "file_path": "src/config.rs", "rank": 89, "score": 31697.517584231835 }, { "content": " /// Finally, any options/arguments given on the commandline will override both built-in and\n\n /// config-file specified values.\n\n ///\n\n /// The resulting [Configuration](struct.Configuration.html) is a singleton with a `static`\n\n /// lifetime.\n\n pub fn new() -> Self {\n\n // when compiling for test, we want to eliminate the runtime dependency of the parser\n\n if cfg!(test) {\n\n return Configuration::default();\n\n }\n\n\n\n // Get the default configuration, this is what will apply if nothing\n\n // else is specified.\n\n let mut config = Configuration::default();\n\n\n\n // Next, we parse the ferox-config.toml file, if present and set the values\n\n // therein to overwrite our default values. Deserialized defaults are specified\n\n // in the Configuration struct so that we don't change anything that isn't\n\n // actually specified in the config file\n\n //\n", "file_path": "src/config.rs", "rank": 90, "score": 31696.80465694598 }, { "content": " fn config_reads_depth() {\n\n let config = setup_config_test();\n\n assert_eq!(config.depth, 1);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_timeout() {\n\n let config = setup_config_test();\n\n assert_eq!(config.timeout, 5);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_proxy() {\n\n let config = setup_config_test();\n\n assert_eq!(config.proxy, \"http://127.0.0.1:8080\");\n\n }\n\n\n\n #[test]\n", "file_path": "src/config.rs", "rank": 91, "score": 31696.244005396526 }, { "content": " #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_redirects() {\n\n let config = setup_config_test();\n\n assert_eq!(config.redirects, true);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_insecure() {\n\n let config = setup_config_test();\n\n assert_eq!(config.insecure, true);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_norecursion() {\n\n let config = setup_config_test();\n\n assert_eq!(config.norecursion, true);\n\n }\n", "file_path": "src/config.rs", "rank": 92, "score": 31696.1970521799 }, { "content": " /// parse the test config and see that the value parsed is correct\n\n fn config_reads_quiet() {\n\n let config = setup_config_test();\n\n assert_eq!(config.quiet, true);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_verbosity() {\n\n let config = setup_config_test();\n\n assert_eq!(config.verbosity, 1);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_output() {\n\n let config = setup_config_test();\n\n assert_eq!(config.output, \"/some/otherpath\");\n\n }\n\n\n", "file_path": "src/config.rs", "rank": 93, "score": 31693.294725023563 }, { "content": "\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_stdin() {\n\n let config = setup_config_test();\n\n assert_eq!(config.stdin, true);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_dontfilter() {\n\n let config = setup_config_test();\n\n assert_eq!(config.dontfilter, true);\n\n }\n\n\n\n #[test]\n\n /// parse the test config and see that the value parsed is correct\n\n fn config_reads_addslash() {\n\n let config = setup_config_test();\n\n assert_eq!(config.addslash, true);\n", "file_path": "src/config.rs", "rank": 94, "score": 31690.27255381616 }, { "content": " Err(e) => {\n\n println!(\n\n \"{} {} {}\",\n\n status_colorizer(\"ERROR\"),\n\n module_colorizer(\"config::parse_config\"),\n\n e\n\n );\n\n }\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::write;\n\n use tempfile::TempDir;\n\n\n", "file_path": "src/config.rs", "rank": 95, "score": 31689.87166643056 }, { "content": " })\n\n })\n\n .collect();\n\n }\n\n\n\n if args.is_present(\"quiet\") {\n\n // the reason this is protected by an if statement:\n\n // consider a user specifying quiet = true in ferox-config.toml\n\n // if the line below is outside of the if, we'd overwrite true with\n\n // false if no -q is used on the command line\n\n config.quiet = args.is_present(\"quiet\");\n\n }\n\n\n\n if args.is_present(\"dontfilter\") {\n\n config.dontfilter = args.is_present(\"dontfilter\");\n\n }\n\n\n\n if args.occurrences_of(\"verbosity\") > 0 {\n\n // occurrences_of returns 0 if none are found; this is protected in\n\n // an if block for the same reason as the quiet option\n", "file_path": "src/config.rs", "rank": 96, "score": 31687.929252161794 }, { "content": "/// Spawn a single consumer task (sc side of mpsc)\n\n///\n\n/// The consumer simply receives Urls and scans them\n\nfn spawn_recursion_handler(\n\n mut recursion_channel: UnboundedReceiver<String>,\n\n wordlist: Arc<HashSet<String>>,\n\n base_depth: usize,\n\n tx_term: UnboundedSender<Response>,\n\n tx_file: UnboundedSender<String>,\n\n) -> BoxFuture<'static, Vec<JoinHandle<()>>> {\n\n log::trace!(\n\n \"enter: spawn_recursion_handler({:?}, wordlist[{} words...], {}, {:?}, {:?})\",\n\n recursion_channel,\n\n wordlist.len(),\n\n base_depth,\n\n tx_term,\n\n tx_file\n\n );\n\n\n\n let boxed_future = async move {\n\n let mut scans = vec![];\n\n while let Some(resp) = recursion_channel.recv().await {\n\n let unknown = add_url_to_list_of_scanned_urls(&resp, &SCANNED_URLS);\n", "file_path": "src/scanner.rs", "rank": 97, "score": 30595.939737166613 }, { "content": "mod utils;\n\nuse assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::process::Command;\n\nuse utils::{setup_tmp_directory, teardown_tmp_directory};\n\n\n\n#[test]\n\n/// send a single valid request, expect a 200 response\n", "file_path": "tests/test_config.rs", "rank": 98, "score": 29849.310573649313 }, { "content": "/// Helper function to determine suitability for recursion\n\n///\n\n/// handles 2xx and 3xx responses by either checking if the url ends with a / (2xx)\n\n/// or if the Location header is present and matches the base url + / (3xx)\n\nfn response_is_directory(response: &Response) -> bool {\n\n log::trace!(\"enter: is_directory({:?})\", response);\n\n\n\n if response.status().is_redirection() {\n\n // status code is 3xx\n\n match response.headers().get(\"Location\") {\n\n // and has a Location header\n\n Some(loc) => {\n\n // get absolute redirect Url based on the already known base url\n\n log::debug!(\"Location header: {:?}\", loc);\n\n\n\n if let Ok(loc_str) = loc.to_str() {\n\n if let Ok(abs_url) = response.url().join(loc_str) {\n\n if format!(\"{}/\", response.url()) == abs_url.as_str() {\n\n // if current response's Url + / == the absolute redirection\n\n // location, we've found a directory suitable for recursion\n\n log::debug!(\n\n \"found directory suitable for recursion: {}\",\n\n response.url()\n\n );\n", "file_path": "src/scanner.rs", "rank": 99, "score": 27303.602558919934 } ]
Rust
src/cheat.rs
ruabmbua/navi
dc4fe98f5611f40af9f232aaae4b3cfe6058bef5
use crate::display; use crate::filesystem; use crate::option::Config; use regex::Regex; use std::collections::HashMap; use std::fs; use std::io::Write; pub struct SuggestionOpts { pub header_lines: u8, pub column: Option<u8>, pub multi: bool, } pub type Value = (String, Option<SuggestionOpts>); fn gen_snippet(snippet: &str, line: &str) -> String { if snippet.is_empty() { line.to_string() } else { format!("{}{}", &snippet[..snippet.len() - 2], line) } } fn parse_opts(text: &str) -> SuggestionOpts { let mut header_lines: u8 = 0; let mut column: Option<u8> = None; let mut multi = false; let mut parts = text.split(' '); while let Some(p) = parts.next() { match p { "--multi" => multi = true, "--header" | "--header-lines" => { header_lines = parts.next().unwrap().parse::<u8>().unwrap() } "--column" => column = Some(parts.next().unwrap().parse::<u8>().unwrap()), _ => (), } } SuggestionOpts { header_lines, column, multi, } } fn parse_variable_line(line: &str) -> (&str, &str, Option<SuggestionOpts>) { let re = Regex::new(r"^\$\s*([^:]+):(.*)").unwrap(); let caps = re.captures(line).unwrap(); let variable = caps.get(1).unwrap().as_str().trim(); let mut command_plus_opts = caps.get(2).unwrap().as_str().split("---"); let command = command_plus_opts.next().unwrap(); let opts = match command_plus_opts.next() { Some(o) => Some(parse_opts(o)), None => None, }; (variable, command, opts) } fn read_file( path: &str, variables: &mut HashMap<String, Value>, stdin: &mut std::process::ChildStdin, ) { let mut tags = String::from(""); let mut comment = String::from(""); let mut snippet = String::from(""); let (tag_width, comment_width) = display::widths(); if let Ok(lines) = filesystem::read_lines(path) { for l in lines { let line = l.unwrap(); if line.starts_with('%') { tags = String::from(&line[2..]); } else if line.starts_with('#') { comment = String::from(&line[2..]); } else if line.starts_with('$') { let (variable, command, opts) = parse_variable_line(&line[..]); variables.insert( format!("{};{}", tags, variable), (String::from(command), opts), ); } else if line.ends_with('\\') { snippet = if !snippet.is_empty() { format!("{}{}", &snippet[..snippet.len() - 2], line) } else { line } } else if line.is_empty() { } else { let full_snippet = gen_snippet(&snippet, &line); match stdin.write( display::format_line( &tags[..], &comment[..], &full_snippet[..], tag_width, comment_width, ) .as_bytes(), ) { Ok(_) => snippet = String::from(""), Err(_) => break, } } } } } pub fn read_all(config: &Config, stdin: &mut std::process::ChildStdin) -> HashMap<String, Value> { let mut variables: HashMap<String, Value> = HashMap::new(); let fallback = format!("{}/cheats", filesystem::exe_path_string()); let folders_str = config.path.as_ref().unwrap_or(&fallback); let folders = folders_str.split(':'); for folder in folders { if let Ok(paths) = fs::read_dir(folder) { for path in paths { read_file( path.unwrap().path().into_os_string().to_str().unwrap(), &mut variables, stdin, ); } } } variables }
use crate::display; use crate::filesystem; use crate::option::Config; use regex::Regex; use std::collections::HashMap; use std::fs; use std::io::Write; pub struct SuggestionOpts { pub header_lines: u8, pub column: Option<u8>, pub multi: bool, } pub type Value = (String, Option<SuggestionOpts>); fn gen_snippet(snippet: &str, line: &str) -> String { if snippet.is_empty() { line.to_string() } else { format!("{}{}", &snippet[..snippet.len() - 2], line) } } fn parse_opts(text: &str) -> SuggestionOpts {
fn parse_variable_line(line: &str) -> (&str, &str, Option<SuggestionOpts>) { let re = Regex::new(r"^\$\s*([^:]+):(.*)").unwrap(); let caps = re.captures(line).unwrap(); let variable = caps.get(1).unwrap().as_str().trim(); let mut command_plus_opts = caps.get(2).unwrap().as_str().split("---"); let command = command_plus_opts.next().unwrap(); let opts = match command_plus_opts.next() { Some(o) => Some(parse_opts(o)), None => None, }; (variable, command, opts) } fn read_file( path: &str, variables: &mut HashMap<String, Value>, stdin: &mut std::process::ChildStdin, ) { let mut tags = String::from(""); let mut comment = String::from(""); let mut snippet = String::from(""); let (tag_width, comment_width) = display::widths(); if let Ok(lines) = filesystem::read_lines(path) { for l in lines { let line = l.unwrap(); if line.starts_with('%') { tags = String::from(&line[2..]); } else if line.starts_with('#') { comment = String::from(&line[2..]); } else if line.starts_with('$') { let (variable, command, opts) = parse_variable_line(&line[..]); variables.insert( format!("{};{}", tags, variable), (String::from(command), opts), ); } else if line.ends_with('\\') { snippet = if !snippet.is_empty() { format!("{}{}", &snippet[..snippet.len() - 2], line) } else { line } } else if line.is_empty() { } else { let full_snippet = gen_snippet(&snippet, &line); match stdin.write( display::format_line( &tags[..], &comment[..], &full_snippet[..], tag_width, comment_width, ) .as_bytes(), ) { Ok(_) => snippet = String::from(""), Err(_) => break, } } } } } pub fn read_all(config: &Config, stdin: &mut std::process::ChildStdin) -> HashMap<String, Value> { let mut variables: HashMap<String, Value> = HashMap::new(); let fallback = format!("{}/cheats", filesystem::exe_path_string()); let folders_str = config.path.as_ref().unwrap_or(&fallback); let folders = folders_str.split(':'); for folder in folders { if let Ok(paths) = fs::read_dir(folder) { for path in paths { read_file( path.unwrap().path().into_os_string().to_str().unwrap(), &mut variables, stdin, ); } } } variables }
let mut header_lines: u8 = 0; let mut column: Option<u8> = None; let mut multi = false; let mut parts = text.split(' '); while let Some(p) = parts.next() { match p { "--multi" => multi = true, "--header" | "--header-lines" => { header_lines = parts.next().unwrap().parse::<u8>().unwrap() } "--column" => column = Some(parts.next().unwrap().parse::<u8>().unwrap()), _ => (), } } SuggestionOpts { header_lines, column, multi, } }
function_block-function_prefix_line
[ { "content": "pub fn variable_prompt(varname: &str) -> String {\n\n format!(\"{}: \", varname)\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 1, "score": 147276.47230093126 }, { "content": "fn gen_replacement(value: &str) -> String {\n\n if value.contains(' ') {\n\n format!(\"\\\"{}\\\"\", &value[..value.len() - 1])\n\n } else {\n\n value[..value.len() - 1].to_string()\n\n }\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 2, "score": 134239.33661455236 }, { "content": "pub fn main(line: String) -> Result<(), Box<dyn Error>> {\n\n let (tags, comment, snippet) = extract_elements(&line[..]);\n\n display::preview(comment, tags, snippet);\n\n process::exit(0)\n\n}\n", "file_path": "src/cmds/preview.rs", "rank": 3, "score": 133654.71514686322 }, { "content": "pub fn format_line(\n\n tags: &str,\n\n comment: &str,\n\n full_snippet: &str,\n\n tag_width: usize,\n\n comment_width: usize,\n\n) -> String {\n\n format!(\n\n \"{tag_color}{tags_short}\\t{comment_color}{comment_short}\\t{snippet_color}{snippet_short}\\t{tags}\\t{comment}\\t{snippet}\\t\\n\",\n\n tags_short = limit_str(tags, tag_width),\n\n comment_short = limit_str(comment, comment_width),\n\n snippet_short = full_snippet,\n\n comment_color = color::Fg(COMMENT_COLOR),\n\n tag_color = color::Fg(TAG_COLOR),\n\n snippet_color = color::Fg(SNIPPET_COLOR),\n\n tags = tags,\n\n comment = comment,\n\n snippet = &full_snippet)\n\n}\n", "file_path": "src/display.rs", "rank": 5, "score": 125993.8204858413 }, { "content": "pub fn exe_string() -> String {\n\n exe_pathbuf().as_os_str().to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "src/filesystem.rs", "rank": 6, "score": 122536.80843395216 }, { "content": "pub fn exe_path_string() -> String {\n\n exe_pathbuf()\n\n .parent()\n\n .unwrap()\n\n .as_os_str()\n\n .to_str()\n\n .unwrap()\n\n .to_string()\n\n}\n", "file_path": "src/filesystem.rs", "rank": 7, "score": 120580.27662176068 }, { "content": "fn extract_from_selections(raw_output: &str, contains_key: bool) -> (&str, &str, &str) {\n\n let mut lines = raw_output.split('\\n');\n\n let key = if contains_key {\n\n lines.next().unwrap()\n\n } else {\n\n \"enter\"\n\n };\n\n let mut parts = lines.next().unwrap().split('\\t');\n\n parts.next();\n\n parts.next();\n\n parts.next();\n\n let tags = parts.next().unwrap();\n\n parts.next();\n\n let snippet = parts.next().unwrap();\n\n (key, tags, snippet)\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 8, "score": 119959.74162443625 }, { "content": "pub fn preview(comment: &str, tags: &str, snippet: &str) {\n\n println!(\n\n \"{comment_color}{comment} {tag_color}{tags} \\n{snippet_color}{snippet}\",\n\n comment = format!(\"# {}\", comment),\n\n tags = format!(\"[{}]\", tags),\n\n snippet = snippet,\n\n comment_color = color::Fg(COMMENT_COLOR),\n\n tag_color = color::Fg(TAG_COLOR),\n\n snippet_color = color::Fg(SNIPPET_COLOR),\n\n );\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 9, "score": 117333.61804050588 }, { "content": "fn limit_str(text: &str, length: usize) -> String {\n\n if text.len() > length {\n\n format!(\"{}…\", &text[..length - 1])\n\n } else {\n\n format!(\"{:width$}\", text, width = length)\n\n }\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 10, "score": 112141.13702451534 }, { "content": "fn prompt_without_suggestions(varname: &str) -> String {\n\n let opts = fzf::Opts {\n\n preview: false,\n\n autoselect: false,\n\n suggestions: false,\n\n prompt: Some(display::variable_prompt(varname)),\n\n ..Default::default()\n\n };\n\n\n\n let (output, _) = fzf::call(opts, |_stdin| None);\n\n\n\n String::from_utf8(output.stdout).unwrap()\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 11, "score": 110174.58248132936 }, { "content": "pub fn call<F>(opts: Opts, stdin_fn: F) -> (process::Output, Option<HashMap<String, cheat::Value>>)\n\nwhere\n\n F: Fn(&mut process::ChildStdin) -> Option<HashMap<String, cheat::Value>>,\n\n{\n\n let mut c = Command::new(\"fzf\");\n\n\n\n c.args(&[\n\n \"--preview-window\",\n\n \"up:2\",\n\n \"--with-nth\",\n\n \"1,2,3\",\n\n \"--delimiter\",\n\n \"\\t\",\n\n \"--ansi\",\n\n \"--bind\",\n\n \"ctrl-j:down,ctrl-k:up\",\n\n \"--exact\",\n\n ]);\n\n\n\n if opts.autoselect {\n", "file_path": "src/fzf.rs", "rank": 12, "score": 101511.62496496917 }, { "content": "pub fn main(_func: String, _args: Vec<String>) -> Result<(), Box<dyn Error>> {\n\n aux::abort(\"calling `navi fn`\", 201)\n\n}\n", "file_path": "src/cmds/func.rs", "rank": 14, "score": 99571.9515480056 }, { "content": "pub fn main(shell: &str) -> Result<(), Box<dyn Error>> {\n\n let file = match shell {\n\n \"zsh\" => \"navi.plugin.zsh\",\n\n \"fish\" => \"navi.plugin.fish\",\n\n _ => \"navi.plugin.bash\",\n\n };\n\n\n\n println!(\"{}/shell/{}\", filesystem::exe_path_string(), file);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cmds/shell.rs", "rank": 15, "score": 96724.62393137952 }, { "content": "pub fn main(query: String, args: Vec<String>, config: Config) -> Result<(), Box<dyn Error>> {\n\n if !args.is_empty() {\n\n cmds::aux::abort(\"passing arguments to 'navi best'\", 201)\n\n } else {\n\n cmds::core::main(Variant::Filter(query), config, false)\n\n }\n\n}\n", "file_path": "src/cmds/best.rs", "rank": 16, "score": 94501.77617471901 }, { "content": "pub fn main(query: String, config: Config) -> Result<(), Box<dyn Error>> {\n\n cmds::core::main(Variant::Query(query), config, true)\n\n}\n", "file_path": "src/cmds/query.rs", "rank": 17, "score": 89798.94926918016 }, { "content": "pub fn main(_query: String, _config: Config) -> Result<(), Box<dyn Error>> {\n\n aux::abort(\"searching for cheats online\", 201)\n\n}\n", "file_path": "src/cmds/search.rs", "rank": 18, "score": 89798.94926918016 }, { "content": "pub fn abort(operation: &str, issue_number: u32) -> Result<(), Box<dyn Error>> {\n\n eprintln!(\"This version of navi doesn't support {}.\", operation);\n\n eprintln!(\n\n \"Please check https://github.com/denisidoro/navi/issues/{} for more info.\",\n\n issue_number\n\n );\n\n eprintln!(\"\");\n\n eprintln!(\"You were probably using the bash implementation of navi and are now using the Rust one, which isn't feature complete yet.\");\n\n eprintln!(\"In the near future, the Rust version will have all previous features.\");\n\n eprintln!(\"\");\n\n eprintln!(\"I'm sorry for the inconvenience.\");\n\n process::exit(42)\n\n}\n", "file_path": "src/cmds/aux.rs", "rank": 19, "score": 89099.81317993999 }, { "content": "fn extract_elements(argstr: &str) -> (&str, &str, &str) {\n\n let mut parts = argstr.split('\\t').skip(3);\n\n let tags = parts.next().unwrap();\n\n let comment = parts.next().unwrap();\n\n let snippet = parts.next().unwrap();\n\n (tags, comment, snippet)\n\n}\n\n\n", "file_path": "src/cmds/preview.rs", "rank": 20, "score": 88778.70533737958 }, { "content": "pub fn main(variant: Variant, config: Config, contains_key: bool) -> Result<(), Box<dyn Error>> {\n\n let (output, variables) = fzf::call(gen_core_fzf_opts(variant, &config), |stdin| {\n\n Some(cheat::read_all(&config, stdin))\n\n });\n\n\n\n match output.status.code() {\n\n Some(0) => {\n\n let raw_output = String::from_utf8(output.stdout)?;\n\n let (key, tags, snippet) = extract_from_selections(&raw_output[..], contains_key);\n\n let interpolated_snippet =\n\n replace_variables_from_snippet(snippet, tags, variables.unwrap(), &config);\n\n\n\n if key == \"ctrl-y\" {\n\n cmds::aux::abort(\"copying snippets to the clipboard\", 201)?\n\n } else if config.print {\n\n println!(\"{}\", interpolated_snippet);\n\n } else if let Some(s) = config.save {\n\n fs::write(s, interpolated_snippet)?;\n\n } else {\n\n Command::new(\"bash\")\n", "file_path": "src/cmds/core.rs", "rank": 21, "score": 85603.85503807789 }, { "content": "fn prompt_with_suggestions(config: &Config, suggestion: &cheat::Value) -> String {\n\n let child = Command::new(\"bash\")\n\n .stdout(Stdio::piped())\n\n .arg(\"-c\")\n\n .arg(&suggestion.0)\n\n .spawn()\n\n .unwrap();\n\n\n\n let suggestions = String::from_utf8(child.wait_with_output().unwrap().stdout).unwrap();\n\n\n\n let mut opts = fzf::Opts {\n\n preview: false,\n\n autoselect: !config.no_autoselect,\n\n ..Default::default()\n\n };\n\n\n\n if let Some(o) = &suggestion.1 {\n\n opts.multi = o.multi;\n\n opts.header_lines = o.header_lines;\n\n opts.nth = o.column;\n\n };\n\n\n\n let (output, _) = fzf::call(opts, |stdin| {\n\n stdin.write_all(suggestions.as_bytes()).unwrap();\n\n None\n\n });\n\n\n\n String::from_utf8(output.stdout).unwrap()\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 22, "score": 83672.2087798499 }, { "content": "pub fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let file = File::open(filename)?;\n\n Ok(io::BufReader::new(file).lines())\n\n}\n\n\n", "file_path": "src/filesystem.rs", "rank": 23, "score": 81807.10191883684 }, { "content": "pub fn parse() -> Config {\n\n Config::from_args()\n\n}\n\n\n", "file_path": "src/option.rs", "rank": 24, "score": 75008.77697739363 }, { "content": "pub fn widths() -> (usize, usize) {\n\n let full_width = terminal_size().unwrap().0;\n\n let tag_width = full_width * 10 / 100;\n\n let comment_width = full_width * 50 / 100;\n\n (usize::from(tag_width), usize::from(comment_width))\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 25, "score": 71935.54241608016 }, { "content": "pub fn internal_command() -> Option<InternalCommand> {\n\n let mut args = env::args();\n\n args.next();\n\n if args.next() == Some(String::from(\"preview\")) {\n\n Some(InternalCommand::Preview {\n\n line: args.next().unwrap(),\n\n })\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/option.rs", "rank": 27, "score": 69308.90073403451 }, { "content": "pub fn main() -> Result<(), Box<dyn Error>> {\n\n println!(\"{}\", filesystem::exe_path_string());\n\n Ok(())\n\n}\n", "file_path": "src/cmds/home.rs", "rank": 28, "score": 65322.72067974003 }, { "content": "fn replace_variables_from_snippet(\n\n snippet: &str,\n\n tags: &str,\n\n variables: HashMap<String, cheat::Value>,\n\n config: &Config,\n\n) -> String {\n\n let mut interpolated_snippet = String::from(snippet);\n\n\n\n let re = Regex::new(r\"<(\\w[\\w\\d\\-_]*)>\").unwrap();\n\n for cap in re.captures_iter(snippet) {\n\n let bracketed_varname = &cap[0];\n\n let varname = &bracketed_varname[1..bracketed_varname.len() - 1];\n\n let k = format!(\"{};{}\", tags, varname);\n\n\n\n let value = match variables.get(&k[..]) {\n\n Some(suggestion) => prompt_with_suggestions(&config, suggestion),\n\n None => prompt_without_suggestions(varname),\n\n };\n\n\n\n interpolated_snippet =\n\n interpolated_snippet.replace(bracketed_varname, gen_replacement(&value[..]).as_str());\n\n }\n\n\n\n interpolated_snippet\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 30, "score": 34134.618201010104 }, { "content": "fn exe_pathbuf() -> PathBuf {\n\n let pathbuf = std::env::current_exe().unwrap().to_path_buf();\n\n follow_symlink(pathbuf)\n\n}\n\n\n", "file_path": "src/filesystem.rs", "rank": 31, "score": 33323.04837048632 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n match option::internal_command() {\n\n Some(InternalCommand::Preview { line }) => cmds::preview::main(line),\n\n _ => {\n\n let mut config = option::parse();\n\n match config.cmd.as_mut() {\n\n None => cmds::core::main(Variant::Core, config, true),\n\n Some(c) => match c {\n\n Command::Query { query } => cmds::query::main(query.clone(), config),\n\n Command::Best { query, args } => {\n\n cmds::best::main(query.clone(), args.to_vec(), config)\n\n }\n\n Command::Search { query } => cmds::search::main(query.clone(), config),\n\n Command::Widget { shell } => cmds::shell::main(&shell[..]),\n\n Command::Func { func, args } => cmds::func::main(func.clone(), args.to_vec()),\n\n Command::Home => cmds::home::main(),\n\n },\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 32, "score": 30722.542207839506 }, { "content": "fn follow_symlink(pathbuf: PathBuf) -> PathBuf {\n\n let other = fs::read_link(pathbuf.clone());\n\n match other {\n\n Ok(o) => follow_symlink(o),\n\n Err(_) => pathbuf,\n\n }\n\n}\n\n\n", "file_path": "src/filesystem.rs", "rank": 33, "score": 30132.871854984187 }, { "content": "fn gen_core_fzf_opts(variant: Variant, config: &Config) -> fzf::Opts {\n\n let mut opts = fzf::Opts {\n\n preview: !config.no_preview,\n\n autoselect: !config.no_autoselect,\n\n overrides: config.fzf_overrides.as_ref(),\n\n copyable: true,\n\n ..Default::default()\n\n };\n\n\n\n match variant {\n\n Variant::Core => (),\n\n Variant::Filter(f) => opts.filter = Some(f),\n\n Variant::Query(q) => opts.query = Some(q),\n\n }\n\n\n\n opts\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 34, "score": 26569.084541002918 }, { "content": "use crate::cheat;\n\nuse crate::filesystem;\n\n\n\nuse std::collections::HashMap;\n\nuse std::process;\n\nuse std::process::{Command, Stdio};\n\n\n\npub struct Opts<'a> {\n\n pub query: Option<String>,\n\n pub filter: Option<String>,\n\n pub prompt: Option<String>,\n\n pub preview: bool,\n\n pub autoselect: bool,\n\n pub overrides: Option<&'a String>, // TODO: remove &'a\n\n pub header_lines: u8,\n\n pub multi: bool,\n\n pub copyable: bool,\n\n pub suggestions: bool,\n\n pub nth: Option<u8>,\n\n}\n", "file_path": "src/fzf.rs", "rank": 36, "score": 14.506447750653306 }, { "content": " #[structopt(long)]\n\n pub fzf_overrides: Option<String>,\n\n\n\n #[structopt(subcommand)]\n\n pub cmd: Option<Command>,\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub enum Command {\n\n /// Filters results\n\n Query { query: String },\n\n /// Shows navi's home directory\n\n Home,\n\n /// Uses online repositories for cheatsheets\n\n Search { query: String },\n\n /// Autoselects the snippet that best matches the query\n\n Best { query: String, args: Vec<String> },\n\n /// Performs ad-hoc functions provided by navi\n\n Func { func: String, args: Vec<String> },\n\n /// Shows the path for shell widget files\n\n Widget { shell: String },\n\n}\n\n\n\npub enum InternalCommand {\n\n Preview { line: String },\n\n}\n\n\n", "file_path": "src/option.rs", "rank": 37, "score": 8.37027095972645 }, { "content": "### Variables\n\n\n\nThe interface prompts for variable names inside brackets (eg `<branch>`).\n\n\n\nVariable names should only include alphanumeric characters and `_`.\n\n\n\nThe command for generating possible inputs can refer other variables:\n\n```sh\n\n# If you select 2 for x, the possible values of y will be 12 and 22\n\necho <x> <y>\n\n\n\n$ x: echo -e '1\\n2\\n3'\n\n$ y: echo -e \"$((x+10))\\n$((x+20))\"\n\n```\n\n\n\n### Variable options\n\n\n\nFor lines starting with `$` you can add extra options using `---`.\n\n\n\n#### Table formatting\n\n\n\nYou can pick a specific column of a selection and set the number of lines considered as headers via `--column` and `--headers`:\n\n\n\n```sh\n\n# This will pick the 3rd column and use the first line as header\n\ndocker rmi <image_id>\n\n\n\n$ image_id: docker images --- --column 3 --headers 1\n\n```\n\n\n\n#### Multiple choice\n\n\n\nYou can select multiple values via `--multi` and hitting `<TAB>`:\n\n\n\n```sh\n\n# The resulting command will be something like: cat \"a.txt\" \"b.txt\"\n\ncat <files>\n\n\n\n$ files: ls --- --multi true\n\n```\n\n\n\nList customization\n\n------------------\n\n\n\nLists can be stylized with the [$FZF_DEFAULT_OPTS](https://github.com/junegunn/fzf) environment variable. This way, you can change the [color scheme](https://github.com/junegunn/fzf/wiki/Color-schemes), for example.\n\n\n\nIn addition:\n\n- the `--fzf-overrides` option allows you to hide columns, for example\n\n- the `--col-widths` option allows you to limit column widths\n\n\n\nPlease refer to `navi --help` for more details.\n\n\n", "file_path": "README.md", "rank": 38, "score": 8.213822208121625 }, { "content": "use std::env;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(after_help = \"EXAMPLES:\n\n navi # default behavior\n\n navi --print # doesn't execute the snippet\n\n navi --path '/some/dir:/other/dir' # uses custom cheats\n\n navi search docker # uses online data\n\n navi query git # filters results by \\\"git\\\"\n\n navi best 'sql create db' root mydb # uses a snippet as a CLI\n\n source \\\"$(navi widget zsh)\\\" # loads the zsh widget\n\n navi --fzf-overrides ' --with-nth 1,2' # shows only the comment and tag columns\n\n navi --fzf-overrides ' --nth 1,2' # search will consider only the first two columns\n\n navi --fzf-overrides ' --no-exact' # looser search algorithm\")]\n\npub struct Config {\n\n /// List of :-separated paths containing .cheat files\n\n #[structopt(short, long, env = \"NAVI_PATH\")]\n\n pub path: Option<String>,\n\n\n", "file_path": "src/option.rs", "rank": 39, "score": 7.574225954585852 }, { "content": " /// [alpha] Instead of executing a snippet, saves it to a file\n\n #[structopt(short, long)]\n\n pub save: Option<String>,\n\n\n\n /// Instead of executing a snippet, prints it to stdout\n\n #[structopt(long)]\n\n pub print: bool,\n\n\n\n /// Prevents autoselection in case of single entry\n\n #[structopt(long)]\n\n pub no_autoselect: bool,\n\n\n\n /// Hides preview window\n\n #[structopt(long)]\n\n pub no_preview: bool,\n\n\n\n // #[structopt(long)]\n\n // pub col_widths: Option<String>,\n\n /// Overrides for fzf commands (must start with an empty space)\n\n #[structopt(long)]\n", "file_path": "src/option.rs", "rank": 40, "score": 6.760916357904146 }, { "content": " }\n\n\n\n if let Some(f) = opts.filter {\n\n c.args(&[\"--filter\", &f]);\n\n }\n\n\n\n if let Some(p) = opts.prompt {\n\n c.args(&[\"--prompt\", &p]);\n\n }\n\n\n\n if let Some(n) = opts.nth {\n\n c.args(&[\"--nth\", &n.to_string()]);\n\n }\n\n\n\n if opts.header_lines > 0 {\n\n c.args(&[\"--header-lines\", format!(\"{}\", opts.header_lines).as_str()]);\n\n }\n\n\n\n if let Some(o) = opts.overrides {\n\n o.as_str()\n", "file_path": "src/fzf.rs", "rank": 41, "score": 6.62513671098619 }, { "content": " c.arg(\"--select-1\");\n\n }\n\n\n\n if opts.multi {\n\n c.arg(\"--multi\");\n\n }\n\n\n\n if opts.copyable {\n\n c.args(&[\"--expect\", \"ctrl-y,enter\"]);\n\n }\n\n\n\n if opts.preview {\n\n c.args(&[\n\n \"--preview\",\n\n format!(\"{} preview {{}}\", filesystem::exe_string()).as_str(),\n\n ]);\n\n }\n\n\n\n if let Some(q) = opts.query {\n\n c.args(&[\"--query\", &q]);\n", "file_path": "src/fzf.rs", "rank": 42, "score": 6.065486847812874 }, { "content": "use crate::cheat;\n\nuse crate::cmds;\n\nuse crate::display;\n\nuse crate::fzf;\n\nuse crate::option::Config;\n\n\n\nuse regex::Regex;\n\nuse std::collections::HashMap;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::io::Write;\n\nuse std::process;\n\nuse std::process::{Command, Stdio};\n\n\n\npub enum Variant {\n\n Core,\n\n Filter(String),\n\n Query(String),\n\n}\n\n\n", "file_path": "src/cmds/core.rs", "rank": 43, "score": 5.79871322236515 }, { "content": "use std::fs;\n\nuse std::fs::File;\n\nuse std::io::{self, BufRead, BufReader, Lines};\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "src/filesystem.rs", "rank": 45, "score": 4.121578616564591 }, { "content": "\n\nimpl Default for Opts<'_> {\n\n fn default() -> Self {\n\n Self {\n\n query: None,\n\n filter: None,\n\n preview: true,\n\n autoselect: true,\n\n overrides: None,\n\n header_lines: 0,\n\n prompt: None,\n\n multi: false,\n\n copyable: false,\n\n suggestions: true,\n\n nth: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/fzf.rs", "rank": 47, "score": 3.5189846020903897 }, { "content": "pub mod aux;\n\npub mod best;\n\npub mod core;\n\npub mod func;\n\npub mod home;\n\npub mod preview;\n\npub mod query;\n\npub mod search;\n\npub mod shell;\n", "file_path": "src/cmds/mod.rs", "rank": 48, "score": 3.056451525109359 }, { "content": "Motivation\n\n----------\n\n\n\nThe main objectives are:\n\n- to increase discoverability, by finding snippets given keywords or descriptions;\n\n- to prevent you from running auxiliar commands, copying the result into the clipboard and then pasting into the original command;\n\n- to easily share one-liners with others so that they don't need to figure out how to write the commands;\n\n- to improve terminal usage as a whole.\n\n\n\nSure, you can find autocompleters out there for all your favorite commands. However, they are very specific and each one may offer a different learning curve.\n\n\n\nOr you can launch a browser and search for instructions on Google, but that takes some time.\n\n\n\n**navi**, on the other hand, intends to be a general purpose platform for bookmarking any snippet at a very low cost.\n\n\n\nCheatsheets\n\n-----------\n\n\n\n### Using your own custom cheatsheets\n\n\n\nIn this case, you need to pass a `:`-separated list of separated directories which contain `.cheat` files:\n\n```sh\n\nnavi --path \"/folder/with/cheats\"\n\n```\n\n\n\nAlternatively, you can set an environment variable in your `.bashrc`-like file:\n\n```sh\n\nexport NAVI_PATH=\"/folder/with/cheats:/another/folder\"\n\n```\n\n\n\n### Submitting cheatsheets\n\n\n\nFeel free to fork this project and open a PR for me to include your contributions.\n\n\n\nCheatsheet syntax\n\n-----------------\n\n\n\nCheatsheets are described in `.cheat` files.\n\n\n\n### Syntax overview\n\n\n\n- lines starting with `%` should contain tags which will be added to any command in a given file;\n\n- lines starting with `#` should be descriptions of commands;\n\n- lines starting with `$` should contain commands that generate a list of possible values for a given argument;\n\n- all the other non-empty lines are considered as executable commands.\n\n\n\nFor example, this is a valid `.cheat` file:\n\n```sh\n\n% git, code\n\n\n\n# Change branch\n\ngit checkout <branch>\n\n\n\n$ branch: git branch | awk '{print $NF}'\n\n```\n\n\n", "file_path": "README.md", "rank": 49, "score": 2.980691425648495 }, { "content": "Table of contents\n\n-----------------\n\n\n\n * [Installation](#installation)\n\n * [Using Homebrew or Linuxbrew](#using-homebrew-or-linuxbrew)\n\n * [Downloading pre-compiled binaries](#downloading-pre-compiled-binaries)\n\n * [Building from source](#building-from-source)\n\n * [Upgrading](#upgrading)\n\n * [Usage](#usage)\n\n * [Preventing execution](#preventing-execution)\n\n * [Pre-filtering](#pre-filtering)\n\n * [Searching online repositories](#searching-online-repositories)\n\n * [Shell widget](#shell-widget)\n\n * [More options](#more-options)\n\n * [Trying out online](#trying-out-online)\n\n * [Motivation](#motivation)\n\n * [Cheatsheets](#cheatsheets)\n\n * [Using your own custom cheatsheets](#using-your-own-custom-cheatsheets)\n\n * [Submitting cheatsheets](#submitting-cheatsheets)\n\n * [Cheatsheet syntax](#cheatsheet-syntax)\n\n * [Syntax overview](#syntax-overview)\n\n * [Variables](#variables)\n\n * [Variable options](#variable-options)\n\n * [Table formatting](#table-formatting)\n\n * [Multiple choice](#multiple-choice)\n\n * [List customization](#list-customization)\n\n * [Related projects](#related-projects)\n\n * [Etymology](#etymology)\n\n\n\nInstallation\n\n------------\n\n\n\n### Using Homebrew or Linuxbrew\n\n\n\nYou can use [Homebrew](http://brew.sh/) or [Linuxbrew](http://linuxbrew.sh/)\n\nto install **navi**:\n\n```sh\n\nbrew install navi\n\n```\n\n\n\n### Downloading pre-compiled binaries\n\n\n\n```bash\n\nbash <(curl -sL https://github.com/denisidoro/navi/blob/master/scripts/install)\n\n```\n\n\n\nAlternatively, you can download the binaries manually [here](https://github.com/denisidoro/navi/releases/latest).\n\n\n\n### Building from source\n\n\n\n```bash\n\ngit clone https://github.com/denisidoro/navi ~/.navi\n\ncd ~/.navi\n\nmake install\n\n```\n\n\n\nUsage\n\n-----\n\n\n\nBy simply running `navi` you will be prompted with the default cheatsheets.\n\n\n\n### Preventing execution\n\n\n\nIf you run `navi --print`, the selected snippet won't be executed. It will be printed to stdout instead.\n\n\n", "file_path": "README.md", "rank": 50, "score": 2.8247990587096545 }, { "content": "use std::error::Error;\n\n\n\nuse crate::cmds;\n\nuse crate::cmds::core::Variant;\n\nuse crate::option::Config;\n\n\n", "file_path": "src/cmds/query.rs", "rank": 51, "score": 2.346356911297992 }, { "content": "use std::error::Error;\n\n\n\nuse crate::cmds;\n\nuse crate::cmds::core::Variant;\n\nuse crate::option::Config;\n\n\n", "file_path": "src/cmds/best.rs", "rank": 52, "score": 2.346356911297992 }, { "content": "use std::error::Error;\n\nuse std::process;\n\n\n\nuse crate::display;\n\n\n", "file_path": "src/cmds/preview.rs", "rank": 53, "score": 2.3228382356646993 }, { "content": "use std::error::Error;\n\n\n\nuse super::aux;\n\nuse crate::option::Config;\n\n\n", "file_path": "src/cmds/search.rs", "rank": 54, "score": 2.309664507555842 }, { "content": "use std::error::Error;\n\n\n\nuse super::aux;\n\n\n", "file_path": "src/cmds/func.rs", "rank": 55, "score": 2.2212944128976133 }, { "content": "use std::error::Error;\n\nuse std::process;\n\n\n", "file_path": "src/cmds/aux.rs", "rank": 56, "score": 2.2212944128976133 }, { "content": "use std::error::Error;\n\n\n\nuse crate::filesystem;\n\n\n", "file_path": "src/cmds/shell.rs", "rank": 57, "score": 2.2212944128976133 }, { "content": "use std::error::Error;\n\n\n\nuse crate::filesystem;\n\n\n", "file_path": "src/cmds/home.rs", "rank": 58, "score": 2.2212944128976133 }, { "content": "// #[macro_use]\n\n// extern crate lazy_static;\n\n\n\nuse std::error::Error;\n\n\n\nmod cheat;\n\nmod cmds;\n\nmod display;\n\nmod filesystem;\n\nmod fzf;\n\nmod option;\n\n\n\nuse crate::cmds::core::Variant;\n\nuse option::{Command, InternalCommand};\n\n\n", "file_path": "src/main.rs", "rank": 59, "score": 2.194625149232836 }, { "content": "### Pre-filtering\n\n\n\nIf you run `navi query <cmd>`, the results will be pre-filtered.\n\n\n\n### Searching online repositories\n\n\n\nIf you run `navi search <cmd>`, **navi** will try to download cheatsheets from online repositories as well.\n\n\n\nPlease note that these cheatsheets aren't curated by **navi**'s maintainers and should be taken with a grain of salt. If you're not sure about executing these snippets, make sure to check the preview window or use the `--print` option.\n\n\n\n### Shell widget\n\n\n\nYou can use **navi** as a widget to your shell. This way, your history is correctly populated and you can edit the command as you wish before executing it.\n\n\n\nIn order to use it, add this line to your `.bashrc`-like file:\n\n```sh\n\n# bash\n\nsource \"$(navi widget bash)\"\n\n\n\n# zsh\n\nsource \"$(navi widget zsh)\"\n\n\n\n# fish\n\nsource (navi widget fish)\n\n```\n\n\n\nBy default, `Ctrl+G` is assigned to launching **navi**. If you want to change the keybinding, replace the argument of `bind` or `bindkey` in [the widget file](https://github.com/denisidoro/navi/search?q=filename%3Anavi.plugin.*&unscoped_q=filename%3Anavi.plugin.*).\n\n\n\nIf you want a widget for other shells, please upvote [this issue](https://github.com/denisidoro/navi/issues/37).\n\n\n\n\n\n### More options\n\n\n\nPlease refer to `navi --help` for more details.\n\n\n\nTrying out online\n\n--------------------\n\n\n\nIf you don't have access to bash at the moment and you want to live preview **navi**, head to [this playground](https://www.katacoda.com/denisidoro/scenarios/navi). It'll start a docker container with instructions for you to install and use the tool. Note: login required.\n\n\n", "file_path": "README.md", "rank": 60, "score": 2.1306897911694493 }, { "content": " .arg(\"-c\")\n\n .arg(&interpolated_snippet[..])\n\n .spawn()?;\n\n }\n\n\n\n Ok(())\n\n }\n\n Some(130) => process::exit(130),\n\n _ => {\n\n let err = String::from_utf8(output.stderr)?;\n\n panic!(\"External command failed:\\n {}\", err)\n\n }\n\n }\n\n}\n", "file_path": "src/cmds/core.rs", "rank": 61, "score": 1.8907011063898453 }, { "content": "# navi <img src=\"https://user-images.githubusercontent.com/3226564/65362934-b4432500-dbdf-11e9-8f75-815fbc5cbf8f.png\" alt=\"icon\" height=\"28px\"/> [![Actions Status](https://github.com/denisidoro/navi/workflows/Quickstart/badge.svg)](https://github.com/denisidoro/navi/actions) ![GitHub release](https://img.shields.io/github/v/release/denisidoro/navi?include_prereleases)\n\n \n\n> :information_source: This project has recently been rewritten in Rust. The old implementation was written in bash. If you're facing any issue after updating, please check [this thread](https://github.com/denisidoro/navi/issues/201).\n\n\n\nAn interactive cheatsheet tool for the command-line so that you won't say the following anymore:\n\n\n\n>— *How to run that command again?*<br>\n\n— *Oh, it's not in my shell history*<br>\n\n— *Geez, it's almost what I wanted but I need to change some args*\n\n\n\n![Demo](https://user-images.githubusercontent.com/3226564/67864139-ebbcbf80-fb03-11e9-9abb-8e6664f77915.gif)\n\n\n\n**navi** allows you to browse through cheatsheets (that you may write yourself or download from maintainers) and execute commands, with argument values prompted to you.\n\n\n", "file_path": "README.md", "rank": 62, "score": 1.7986622390431117 }, { "content": " Err(_) => {\n\n eprintln!( \"navi was unable to call fzf.\\nPlease make sure it's correctly installed\\nRefer to https://github.com/junegunn/fzf for more info.\");\n\n process::exit(33)\n\n }\n\n };\n\n\n\n let stdin = child\n\n .stdin\n\n .as_mut()\n\n .ok_or(\"Child process stdin has not been captured!\")\n\n .unwrap();\n\n\n\n let result = stdin_fn(stdin);\n\n\n\n (child.wait_with_output().unwrap(), result)\n\n}\n", "file_path": "src/fzf.rs", "rank": 63, "score": 1.7178017240003232 }, { "content": " .split(' ')\n\n .map(|s| s.to_string())\n\n .filter(|s| !s.is_empty())\n\n .for_each(|s| {\n\n c.arg(s);\n\n });\n\n }\n\n\n\n if !opts.suggestions {\n\n c.args(&[\"--print-query\", \"--no-select-1\", \"--height\", \"1\"]);\n\n }\n\n\n\n let child = c\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::inherit())\n\n .spawn();\n\n\n\n let mut child = match child {\n\n Ok(x) => x,\n", "file_path": "src/fzf.rs", "rank": 64, "score": 1.6697484304842725 }, { "content": "use termion::{color, terminal_size};\n\n\n\nstatic COMMENT_COLOR: color::LightCyan = color::LightCyan;\n\nstatic TAG_COLOR: color::LightGreen = color::LightGreen;\n\nstatic SNIPPET_COLOR: color::White = color::White;\n\n\n", "file_path": "src/display.rs", "rank": 65, "score": 1.4270265611457562 } ]
Rust
iroha/src/queue.rs
EmelianPiker/iroha
d6097b81572554444b2e9a9a560c581006fc895a
use self::config::QueueConfiguration; use crate::prelude::*; use std::time::Duration; #[derive(Debug)] pub struct Queue { pending_tx: Vec<AcceptedTransaction>, maximum_transactions_in_block: usize, transaction_time_to_live: Duration, } impl Queue { pub fn from_configuration(config: &QueueConfiguration) -> Queue { Queue { pending_tx: Vec::new(), maximum_transactions_in_block: config.maximum_transactions_in_block as usize, transaction_time_to_live: Duration::from_millis(config.transaction_time_to_live_ms), } } pub fn push_pending_transaction(&mut self, tx: AcceptedTransaction) { self.pending_tx.push(tx); } pub fn pop_pending_transactions(&mut self) -> Vec<AcceptedTransaction> { self.pending_tx = self .pending_tx .iter() .cloned() .filter(|transaction| !transaction.is_expired(self.transaction_time_to_live)) .collect(); let pending_transactions_length = self.pending_tx.len(); let amount_to_drain = if self.maximum_transactions_in_block > pending_transactions_length { pending_transactions_length } else { self.maximum_transactions_in_block }; self.pending_tx.drain(..amount_to_drain).collect() } } pub mod config { use serde::Deserialize; use std::env; const MAXIMUM_TRANSACTIONS_IN_BLOCK: &str = "MAXIMUM_TRANSACTIONS_IN_BLOCK"; const DEFAULT_MAXIMUM_TRANSACTIONS_IN_BLOCK: u32 = 10; const TRANSACTION_TIME_TO_LIVE_MS: &str = "TRANSACTION_TIME_TO_LIVE_MS"; const DEFAULT_TRANSACTION_TIME_TO_LIVE_MS: u64 = 100_000; #[derive(Clone, Deserialize, Debug)] #[serde(rename_all = "UPPERCASE")] pub struct QueueConfiguration { #[serde(default = "default_maximum_transactions_in_block")] pub maximum_transactions_in_block: u32, #[serde(default = "default_transaction_time_to_live_ms")] pub transaction_time_to_live_ms: u64, } impl QueueConfiguration { pub fn load_environment(&mut self) -> Result<(), String> { if let Ok(max_block_tx) = env::var(MAXIMUM_TRANSACTIONS_IN_BLOCK) { self.maximum_transactions_in_block = serde_json::from_str(&max_block_tx).map_err(|e| { format!( "Failed to parse maximum number of transactions per block: {}", e ) })?; } if let Ok(transaction_ttl_ms) = env::var(TRANSACTION_TIME_TO_LIVE_MS) { self.transaction_time_to_live_ms = serde_json::from_str(&transaction_ttl_ms) .map_err(|e| format!("Failed to parse transaction's ttl: {}", e))?; } Ok(()) } } fn default_maximum_transactions_in_block() -> u32 { DEFAULT_MAXIMUM_TRANSACTIONS_IN_BLOCK } fn default_transaction_time_to_live_ms() -> u64 { DEFAULT_TRANSACTION_TIME_TO_LIVE_MS } } #[cfg(test)] mod tests { use super::*; #[test] fn push_pending_transaction() { let mut queue = Queue::from_configuration(&QueueConfiguration { maximum_transactions_in_block: 2, transaction_time_to_live_ms: 100000, }); queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 100000, ) .accept() .expect("Failed to create Transaction."), ); } #[test] fn pop_pending_transactions() { let max_block_tx = 2; let mut queue = Queue::from_configuration(&QueueConfiguration { maximum_transactions_in_block: max_block_tx, transaction_time_to_live_ms: 100000, }); for _ in 0..5 { queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 100000, ) .accept() .expect("Failed to create Transaction."), ); } assert_eq!( queue.pop_pending_transactions().len(), max_block_tx as usize ) } #[test] fn pop_pending_transactions_with_timeout() { let max_block_tx = 6; let mut queue = Queue::from_configuration(&QueueConfiguration { maximum_transactions_in_block: max_block_tx, transaction_time_to_live_ms: 200, }); for _ in 0..(max_block_tx - 1) { queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 100, ) .accept() .expect("Failed to create Transaction."), ); } queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 200, ) .accept() .expect("Failed to create Transaction."), ); std::thread::sleep(Duration::from_millis(101)); assert_eq!(queue.pop_pending_transactions().len(), 1); queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 300, ) .accept() .expect("Failed to create Transaction."), ); std::thread::sleep(Duration::from_millis(201)); assert_eq!(queue.pop_pending_transactions().len(), 0); } }
use self::config::QueueConfiguration; use crate::prelude::*; use std::time::Duration; #[derive(Debug)] pub struct Queue { pending_tx: Vec<AcceptedTransaction>, maximum_transactions_in_block: usize, transaction_time_to_live: Duration, } impl Queue { pub fn from_configuration(config: &QueueConfiguration) -> Queue { Queue { pending_tx: Vec::new(), maximum_transactions_in_block: config.maximum_transactions_in_block as usize, transaction_time_to_live: Duration::from_millis(config.transaction_time_to_live_ms), } } pub fn push_pending_transaction(&mut self, tx: AcceptedTransaction) { self.pending_tx.push(tx); } pub fn pop_pending_transactions(&mut self) -> Vec<AcceptedTransaction> { self.pending_tx = self .pending_tx .iter() .cloned() .filter(|transaction| !transaction.is_expired(self.transaction_time_to_live)) .collect(); let pending_transactions_length = self.pending_tx.len(); let amount_to_drain = if self.maximum_transactions_in_block > pending_transactions_length { pending_transactions_length } else { self.maximum_transactions_in_block }; self.pending_tx.drain(..amount_to_drain).collect() } } pub mod config { use serde::Deserialize; use std::env; const MAXIMUM_TRANSACTIONS_IN_BLOCK: &str = "MAXIMUM_TRANSACTIONS_IN_BLOCK"; const DEFAULT_MAXIMUM_TRANSACTIONS_IN_BLOCK: u32 = 10; const TRANSACTION_TIME_TO_LIVE_MS: &str = "TRANSACTION_TIME_TO_LIVE_MS"; const DEFAULT_TRANSACTION_TIME_TO_LIVE_MS: u64 = 100_000; #[derive(Clone, Deserialize, Debug)] #[serde(rename_all = "UPPERCASE")] pub struct QueueConfiguration { #[serde(default = "default_maximum_transactions_in_block")] pub maximum_transactions_in_block: u32, #[serde(default = "default_transaction_time_to_live_ms")] pub transaction_time_to_live_ms: u64, } impl QueueConfiguration { pub fn load_environment(&mut self) -> Result<(), String> { if let Ok(max_block_tx) = env::var(MAXIMUM_TRANSACTIONS_IN_BLOCK) { self.maximum_transactions_in_block = serde_json::from_str(&max_block_tx).map_err(|e| { format!( "Failed to parse maximum number of transactions per block: {}", e ) })?; } if let Ok(transaction_ttl_ms) = env::var(TRANSACTION_TIME_TO_LIVE_MS) { self.transaction_time_to_live_ms = serde_json::from_str(&transaction_ttl_ms) .map_err(|e| format!("Failed to parse transaction's ttl: {}", e))?; } Ok(()) } } fn default_maximum_transactions_in_block() -> u32 { DEFAULT_MAXIMUM_TRANSACTIONS_IN_BLOCK } fn default_transaction_time_to_live_ms() -> u64 { DEFAULT_TRANSACTION_TIME_TO_LIVE_MS } } #[cfg(test)] mod tests { use super::*; #[test] fn push_pending_transaction() { let mut queue = Queue::from_configuration(&QueueConfiguration { maximum_transactions_in_block: 2, transaction_time_to_live_ms: 100000, }); queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 100000, ) .accept() .expect("Failed to create Transaction."), ); } #[test]
#[test] fn pop_pending_transactions_with_timeout() { let max_block_tx = 6; let mut queue = Queue::from_configuration(&QueueConfiguration { maximum_transactions_in_block: max_block_tx, transaction_time_to_live_ms: 200, }); for _ in 0..(max_block_tx - 1) { queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 100, ) .accept() .expect("Failed to create Transaction."), ); } queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 200, ) .accept() .expect("Failed to create Transaction."), ); std::thread::sleep(Duration::from_millis(101)); assert_eq!(queue.pop_pending_transactions().len(), 1); queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 300, ) .accept() .expect("Failed to create Transaction."), ); std::thread::sleep(Duration::from_millis(201)); assert_eq!(queue.pop_pending_transactions().len(), 0); } }
fn pop_pending_transactions() { let max_block_tx = 2; let mut queue = Queue::from_configuration(&QueueConfiguration { maximum_transactions_in_block: max_block_tx, transaction_time_to_live_ms: 100000, }); for _ in 0..5 { queue.push_pending_transaction( RequestedTransaction::new( Vec::new(), <Account as Identifiable>::Id::new("account", "domain"), 100000, ) .accept() .expect("Failed to create Transaction."), ); } assert_eq!( queue.pop_pending_transactions().len(), max_block_tx as usize ) }
function_block-full_function
[ { "content": "/// Initializes `Logger` with given `LoggerConfiguration`.\n\n/// After the initialization `log` macros will print with the use of this `Logger`.\n\n/// For more information see [log crate](https://docs.rs/log/0.4.8/log/).\n\npub fn init(configuration: &config::LoggerConfiguration) -> Result<(), SetLoggerError> {\n\n let mut logger_set = LOGGER_SET.write().expect(\"Failed to acquire lock.\");\n\n if !*logger_set {\n\n log::set_boxed_logger(Box::new(Logger::new(configuration)))\n\n .map(|()| log::set_max_level(configuration.max_log_level))?;\n\n *logger_set = true;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// This module contains all configuration related logic.\n\npub mod config {\n\n use log::LevelFilter;\n\n use serde::Deserialize;\n\n use std::env;\n\n\n\n const MAX_LOG_LEVEL: &str = \"MAX_LOG_LEVEL\";\n\n const DEFAULT_MAX_LOG_LEVEL: LevelFilter = LevelFilter::Debug;\n\n const TERMINAL_COLOR_ENABLED: &str = \"TERMINAL_COLOR_ENABLED\";\n\n const DEFAULT_TERMINAL_COLOR_ENABLED: bool = false;\n", "file_path": "iroha_logger/src/lib.rs", "rank": 0, "score": 181198.8135727134 }, { "content": "fn accept_transaction(criterion: &mut Criterion) {\n\n let domain_name = \"domain\";\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId {\n\n address: \"127.0.0.1:8080\".to_string(),\n\n public_key: KeyPair::generate()\n\n .expect(\"Failed to generate KeyPair.\")\n\n .public_key,\n\n },\n\n };\n\n let account_name = \"account\";\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(\n\n account_name,\n\n domain_name,\n\n KeyPair::generate()\n\n .expect(\"Failed to generate KeyPair.\")\n\n .public_key,\n\n ),\n", "file_path": "iroha/benches/validation.rs", "rank": 1, "score": 180779.131112412 }, { "content": "fn default_transaction_time_to_live_ms() -> u64 {\n\n DEFAULT_TRANSACTION_TIME_TO_LIVE_MS\n\n}\n", "file_path": "iroha_client/src/config.rs", "rank": 2, "score": 179114.33716558127 }, { "content": "fn default_transaction_time_to_live_ms() -> u64 {\n\n DEFAULT_TRANSACTION_TIME_TO_LIVE_MS\n\n}\n", "file_path": "iroha_client_no_std/src/config.rs", "rank": 3, "score": 175275.63628116506 }, { "content": "pub fn new(configuration: &Configuration) -> Self {\n\n Client {\n\n torii_url: configuration.torii_url.clone(),\n\n //TODO: The `public_key` from `configuration` will be different. Fix this inconsistency.\n\n key_pair: KeyPair::generate().expect(\"Failed to generate KeyPair.\"),\n\n proposed_transaction_ttl_ms: configuration.transaction_time_to_live_ms,\n\n }\n\n}\n\n\n\n/// Instructions API entry point. Submits one Iroha Special Instruction to `Iroha` peers.\n\n// #[log]\n\npub async fn submit(&mut self, instruction: Instruction) -> Result<(), String> {\n\n let network = Network::new(&self.torii_url);\n\n let mut v = Vec::new();\n\n v.push(instruction);\n\n let transaction: RequestedTransaction = RequestedTransaction::new(\n\n v,\n\n crate::account::Id::new(\"root\", \"global\"),\n\n self.proposed_transaction_ttl_ms,\n\n )\n", "file_path": "iroha_client_no_std/src/client.rs", "rank": 4, "score": 150876.1439869696 }, { "content": "fn default_torii_url() -> String {\n\n DEFAULT_TORII_URL.to_string()\n\n}\n\n\n", "file_path": "iroha_client/src/config.rs", "rank": 5, "score": 149933.90960182407 }, { "content": "fn default_torii_url() -> String {\n\n DEFAULT_TORII_URL.to_string()\n\n}\n\n\n", "file_path": "iroha_client_no_std/src/config.rs", "rank": 6, "score": 146708.14568446495 }, { "content": "fn default_torii_connect_url() -> String {\n\n DEFAULT_TORII_CONNECT_URL.to_string()\n\n}\n\n\n", "file_path": "iroha_client/src/config.rs", "rank": 7, "score": 146708.14568446495 }, { "content": "fn default_torii_connect_url() -> String {\n\n DEFAULT_TORII_CONNECT_URL.to_string()\n\n}\n\n\n", "file_path": "iroha_client_no_std/src/config.rs", "rank": 8, "score": 143664.3454681597 }, { "content": "fn validate_transaction(criterion: &mut Criterion) {\n\n let domain_name = \"domain\";\n\n let key_pair = KeyPair::generate().expect(\"Failed to generate KeyPair.\");\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId {\n\n address: \"127.0.0.1:8080\".to_string(),\n\n public_key: key_pair.public_key,\n\n },\n\n };\n\n let account_name = \"account\";\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(account_name, domain_name, key_pair.public_key),\n\n destination_id: String::from(domain_name),\n\n };\n\n let asset_definition_id = AssetDefinitionId::new(\"xor\", domain_name);\n\n let create_asset = isi::Register {\n\n object: AssetDefinition::new(asset_definition_id),\n\n destination_id: domain_name.to_string(),\n\n };\n", "file_path": "iroha/benches/validation.rs", "rank": 9, "score": 143594.2880347521 }, { "content": "fn sign_transaction(criterion: &mut Criterion) {\n\n let domain_name = \"domain\";\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId {\n\n address: \"127.0.0.1:8080\".to_string(),\n\n public_key: KeyPair::generate()\n\n .expect(\"Failed to generate KeyPair.\")\n\n .public_key,\n\n },\n\n };\n\n let account_name = \"account\";\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(\n\n account_name,\n\n domain_name,\n\n KeyPair::generate()\n\n .expect(\"Failed to generate KeyPair.\")\n\n .public_key,\n\n ),\n", "file_path": "iroha/benches/validation.rs", "rank": 10, "score": 143594.2880347521 }, { "content": "fn chain_blocks(criterion: &mut Criterion) {\n\n let domain_name = \"domain\";\n\n let key_pair = KeyPair::generate().expect(\"Failed to generate KeyPair.\");\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId {\n\n address: \"127.0.0.1:8080\".to_string(),\n\n public_key: key_pair.public_key,\n\n },\n\n };\n\n let account_name = \"account\";\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(account_name, domain_name, key_pair.public_key),\n\n destination_id: String::from(domain_name),\n\n };\n\n let asset_definition_id = AssetDefinitionId::new(\"xor\", domain_name);\n\n let create_asset = isi::Register {\n\n object: AssetDefinition::new(asset_definition_id),\n\n destination_id: domain_name.to_string(),\n\n };\n", "file_path": "iroha/benches/validation.rs", "rank": 11, "score": 143420.53671544514 }, { "content": "fn validate_blocks(criterion: &mut Criterion) {\n\n let domain_name = \"domain\";\n\n let key_pair = KeyPair::generate().expect(\"Failed to generate KeyPair.\");\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId {\n\n address: \"127.0.0.1:8080\".to_string(),\n\n public_key: key_pair.public_key,\n\n },\n\n };\n\n let account_name = \"account\";\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(account_name, domain_name, key_pair.public_key),\n\n destination_id: String::from(domain_name),\n\n };\n\n let asset_definition_id = AssetDefinitionId::new(\"xor\", domain_name);\n\n let create_asset = isi::Register {\n\n object: AssetDefinition::new(asset_definition_id),\n\n destination_id: domain_name.to_string(),\n\n };\n", "file_path": "iroha/benches/validation.rs", "rank": 12, "score": 143420.53671544514 }, { "content": "fn sign_blocks(criterion: &mut Criterion) {\n\n let domain_name = \"domain\";\n\n let key_pair = KeyPair::generate().expect(\"Failed to generate KeyPair.\");\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId {\n\n address: \"127.0.0.1:8080\".to_string(),\n\n public_key: key_pair.public_key,\n\n },\n\n };\n\n let account_name = \"account\";\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(account_name, domain_name, key_pair.public_key),\n\n destination_id: String::from(domain_name),\n\n };\n\n let asset_definition_id = AssetDefinitionId::new(\"xor\", domain_name);\n\n let create_asset = isi::Register {\n\n object: AssetDefinition::new(asset_definition_id),\n\n destination_id: domain_name.to_string(),\n\n };\n", "file_path": "iroha/benches/validation.rs", "rank": 13, "score": 143420.53671544514 }, { "content": "#[inline]\n\npub fn bridge_incoming_external_transactions_asset_definition_id(\n\n) -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_incoming_external_transactions_asset\", \"bridge\")\n\n}\n\n\n\n/// \"Bridge incoming external transactions\" Asset Definition ID.\n", "file_path": "iroha/src/bridge.rs", "rank": 14, "score": 138235.87054853918 }, { "content": "#[inline]\n\npub fn bridge_outgoing_external_transactions_asset_definition_id(\n\n) -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_outgoing_external_transactions_asset\", \"bridge\")\n\n}\n\n\n\n/// This module provides structures for working with external assets.\n\n///\n\n/// # Note\n\n/// `ExternalAsset` is incompatible with Iroha `Asset`.\n\npub mod asset {\n\n use super::*;\n\n\n\n /// External asset Identifier.\n\n pub type Id = String;\n\n\n\n /// A data required for `ExternalAsset` entity initialization.\n\n #[cfg_attr(test, derive(PartialEq, Eq))]\n\n #[derive(Debug, Clone, Encode, Decode)]\n\n pub struct ExternalAsset {\n\n /// Component Identification.\n", "file_path": "iroha/src/bridge.rs", "rank": 15, "score": 138235.87054853918 }, { "content": "#[inline]\n\npub fn bridge_outgoing_external_transactions_asset_definition_id(\n\n) -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_outgoing_external_transactions_asset\", \"bridge\")\n\n}\n\n\n\n/// This module provides structures for working with external assets.\n\n///\n\n/// # Note\n\n/// `ExternalAsset` is incompatible with Iroha `Asset`.\n\npub mod asset {\n\n use super::*;\n\n\n\n /// External asset Identifier.\n\n pub type Id = String;\n\n\n\n /// A data required for `ExternalAsset` entity initialization.\n\n #[cfg_attr(test, derive(PartialEq, Eq))]\n\n #[derive(Debug, Clone, Encode, Decode)]\n\n pub struct ExternalAsset {\n\n /// Component Identification.\n", "file_path": "iroha_client_no_std/src/bridge.rs", "rank": 16, "score": 132931.90115980213 }, { "content": "#[inline]\n\npub fn bridge_incoming_external_transactions_asset_definition_id(\n\n) -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_incoming_external_transactions_asset\", \"bridge\")\n\n}\n\n\n", "file_path": "iroha_client_no_std/src/bridge.rs", "rank": 17, "score": 132931.90115980213 }, { "content": "fn check_response_assets(response: &QueryResult, expected_xor_amount: u32) {\n\n if let QueryResult::GetAccount(get_account_result) = response {\n\n let account = &get_account_result.account;\n\n let assets = &account.assets;\n\n let xor_amount = assets\n\n .iter()\n\n .find(|(_, asset)| asset.id.definition_id.name == \"XOR\")\n\n .map(|(_, asset)| asset.quantity)\n\n .unwrap_or(0);\n\n assert_eq!(xor_amount, expected_xor_amount);\n\n println!(\n\n \"{} account balance on Iroha is: {} XOR\",\n\n account.id, expected_xor_amount\n\n );\n\n } else {\n\n panic!(\"Test failed.\");\n\n }\n\n}\n\n\n\n#[async_std::main]\n", "file_path": "iroha_substrate/bridge-tester/src/main.rs", "rank": 18, "score": 120424.73944781089 }, { "content": "fn sort_peers(criterion: &mut Criterion) {\n\n let mut network_topology = NetworkTopology::new(&get_n_peers(N_PEERS), None, 1)\n\n .init()\n\n .expect(\"Failed to initialize topology.\");\n\n criterion.bench_function(\"sort_peers\", |b| {\n\n b.iter(|| network_topology.sort_peers(Some([0u8; 32])));\n\n });\n\n}\n\n\n\ncriterion_group!(benches, sort_peers);\n\ncriterion_main!(benches);\n", "file_path": "iroha/benches/sumeragi.rs", "rank": 19, "score": 106454.86652664095 }, { "content": "#[derive(Debug)]\n\nstruct BlockStore {\n\n path: PathBuf,\n\n}\n\n\n\nimpl BlockStore {\n\n fn new(path: &Path) -> BlockStore {\n\n if fs::read_dir(path).is_err() {\n\n fs::create_dir_all(path).expect(\"Failed to create Block Store directory.\");\n\n }\n\n BlockStore {\n\n path: path.to_path_buf(),\n\n }\n\n }\n\n\n\n fn with_genesis_block(path: &Path, genesis_block: ValidBlock) -> BlockStore {\n\n let block_store = BlockStore::new(path);\n\n task::block_on(async { block_store.write(&genesis_block).await })\n\n .expect(\"Failed to write a Genesis Block.\");\n\n block_store\n\n }\n", "file_path": "iroha/src/kura.rs", "rank": 20, "score": 106263.7346152942 }, { "content": "fn query_requests(criterion: &mut Criterion) {\n\n thread::spawn(create_and_start_iroha);\n\n thread::sleep(std::time::Duration::from_millis(50));\n\n let mut group = criterion.benchmark_group(\"query-reqeuests\");\n\n let configuration =\n\n Configuration::from_path(CONFIGURATION_PATH).expect(\"Failed to load configuration.\");\n\n let domain_name = \"domain\";\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId::new(\n\n &configuration.torii_configuration.torii_url,\n\n &configuration.public_key,\n\n ),\n\n };\n\n let account_name = \"account\";\n\n let account_id = AccountId::new(account_name, domain_name);\n\n let (public_key, _) = configuration.key_pair();\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(account_name, domain_name, public_key),\n\n destination_id: String::from(domain_name),\n", "file_path": "iroha_client/benches/torii.rs", "rank": 21, "score": 104304.58538811447 }, { "content": "fn instruction_submits(criterion: &mut Criterion) {\n\n thread::spawn(create_and_start_iroha);\n\n thread::sleep(std::time::Duration::from_millis(50));\n\n let mut group = criterion.benchmark_group(\"instruction-reqeuests\");\n\n let configuration =\n\n Configuration::from_path(CONFIGURATION_PATH).expect(\"Failed to load configuration.\");\n\n let domain_name = \"domain\";\n\n let create_domain = isi::Add {\n\n object: Domain::new(domain_name.to_string()),\n\n destination_id: PeerId::new(\n\n &configuration.torii_configuration.torii_url,\n\n &configuration.public_key,\n\n ),\n\n };\n\n let account_name = \"account\";\n\n let account_id = AccountId::new(account_name, domain_name);\n\n let (public_key, _) = configuration.key_pair();\n\n let create_account = isi::Register {\n\n object: Account::with_signatory(account_name, domain_name, public_key),\n\n destination_id: String::from(domain_name),\n", "file_path": "iroha_client/benches/torii.rs", "rank": 22, "score": 104304.58538811447 }, { "content": "type Name = String;\n\n\n\n/// Named group of `Account` and `Asset` entities.\n\n#[derive(Debug, Clone, Io, Encode, Decode)]\n\npub struct Domain {\n\n /// Domain name, for example company name.\n\n pub name: Name,\n\n /// Accounts of the domain.\n\n pub accounts: BTreeMap<<Account as Identifiable>::Id, Account>,\n\n /// Assets of the domain.\n\n pub asset_definitions: BTreeMap<<AssetDefinition as Identifiable>::Id, AssetDefinition>,\n\n}\n\n\n\nimpl Domain {\n\n /// Creates new detached `Domain`.\n\n ///\n\n /// Should be used for creation of a new `Domain` or while making queries.\n\n pub fn new(name: Name) -> Self {\n\n Domain {\n\n name,\n", "file_path": "iroha/src/domain.rs", "rank": 23, "score": 103853.62493631661 }, { "content": "pub fn permission_asset_definition_id() -> AssetDefinitionId {\n\n AssetDefinitionId::new(\"permissions\", \"global\")\n\n}\n\n\n\n#[derive(Clone, Debug, Default, Encode, Decode)]\n\npub struct Permissions {\n\n origin: Vec<Permission>,\n\n}\n\n\n\n#[derive(Clone, Debug, Encode, Decode, PartialEq)]\n\npub enum Permission {\n\n Anything,\n\n AddDomain,\n\n AddListener,\n\n RegisterAssetDefinition(Option<<Domain as Identifiable>::Id>),\n\n RegisterAccount(Option<<Domain as Identifiable>::Id>),\n\n MintAsset(\n\n Option<<Domain as Identifiable>::Id>,\n\n Option<<AssetDefinition as Identifiable>::Id>,\n\n ),\n", "file_path": "iroha/src/permission.rs", "rank": 24, "score": 101019.8010953497 }, { "content": "type Name = String;\n\n/// Named group of `Account` and `Asset` entities.\n\n#[derive(Debug, Clone, Encode, Decode)]\n\npub struct Domain {\n\n /// Domain name, for example company name.\n\n pub name: Name,\n\n /// Accounts of the domain.\n\n pub accounts: BTreeMap<<Account as Identifiable>::Id, Account>,\n\n /// Assets of the domain.\n\n pub asset_definitions: BTreeMap<<AssetDefinition as Identifiable>::Id, AssetDefinition>,\n\n}\n\n\n\nimpl Domain {\n\n /// Creates new detached `Domain`.\n\n ///\n\n /// Should be used for creation of a new `Domain` or while making queries.\n\n pub fn new(name: Name) -> Self {\n\n Domain {\n\n name,\n\n accounts: BTreeMap::new(),\n", "file_path": "iroha_client_no_std/src/domain.rs", "rank": 25, "score": 98826.89205412204 }, { "content": "/// Calculates hash of the given bytes.\n\npub fn hash(bytes: Vec<u8>) -> Hash {\n\n let vec_hash = VarBlake2b::new(32)\n\n .expect(\"Failed to initialize variable size hash\")\n\n .chain(bytes)\n\n .vec_result();\n\n let mut hash = [0; 32];\n\n hash.copy_from_slice(&vec_hash);\n\n hash\n\n}\n\n\n\n/// Represents signature of the data (`Block` or `Transaction` for example).\n\n#[derive(Clone, Encode, Decode)]\n\npub struct Signature {\n\n /// Ed25519 (Edwards-curve Digital Signature Algorithm scheme using SHA-512 and Curve25519)\n\n /// public-key of an approved authority.\n\n pub public_key: PublicKey,\n\n /// Ed25519 signature is placed here.\n\n signature: Ed25519Signature,\n\n}\n\n\n", "file_path": "iroha/src/crypto.rs", "rank": 26, "score": 98142.25502423255 }, { "content": "pub fn permission_asset_definition_id() -> AssetDefinitionId {\n\n AssetDefinitionId::new(\"permissions\", \"global\")\n\n}\n\n\n\n#[derive(Clone, Debug, Default, Encode, Decode)]\n\npub struct Permissions {\n\n pub origin: Vec<Permission>,\n\n}\n\n\n\n#[derive(Clone, Debug, Encode, Decode, PartialEq)]\n\npub enum Permission {\n\n Anything,\n\n AddDomain,\n\n AddListener,\n\n RegisterAssetDefinition(Option<<Domain as Identifiable>::Id>),\n\n RegisterAccount(Option<<Domain as Identifiable>::Id>),\n\n MintAsset(\n\n Option<<Domain as Identifiable>::Id>,\n\n Option<<AssetDefinition as Identifiable>::Id>,\n\n ),\n", "file_path": "iroha_client_no_std/src/permission.rs", "rank": 27, "score": 97488.45305709801 }, { "content": "fn find_sender(server_url: &str) -> Sender<RequestStream> {\n\n unsafe {\n\n for tuple in ENDPOINTS.iter() {\n\n if tuple.0 == server_url {\n\n return tuple.1.clone();\n\n }\n\n }\n\n }\n\n panic!(\"Can't find ENDPOINT: {}\", server_url);\n\n}\n\n\n\npub type State<T> = Arc<RwLock<T>>;\n", "file_path": "iroha_network/src/mock.rs", "rank": 28, "score": 96810.0575397989 }, { "content": "fn create_and_start_iroha() {\n\n let temp_dir = TempDir::new().expect(\"Failed to create TempDir.\");\n\n let mut configuration =\n\n Configuration::from_path(CONFIGURATION_PATH).expect(\"Failed to load configuration.\");\n\n configuration\n\n .kura_configuration\n\n .kura_block_store_path(temp_dir.path());\n\n let iroha = Iroha::new(configuration);\n\n task::block_on(iroha.start()).expect(\"Failed to start Iroha.\");\n\n //Prevents temp_dir from clean up untill the end of the tests.\n\n #[allow(clippy::empty_loop)]\n\n loop {}\n\n}\n\n\n\ncriterion_group!(instructions, instruction_submits);\n\ncriterion_group!(queries, query_requests);\n\ncriterion_main!(queries, instructions);\n", "file_path": "iroha_client/benches/torii.rs", "rank": 29, "score": 94650.13952504979 }, { "content": "/// \"Bridges\" Asset Definition ID.\n\npub fn bridges_asset_definition_id() -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridges_asset\", \"bridge\")\n\n}\n\n\n\n#[inline]\n", "file_path": "iroha/src/bridge.rs", "rank": 30, "score": 92388.12570958177 }, { "content": "/// \"Bridge\" Asset Definition ID.\n\npub fn bridge_asset_definition_id() -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_asset\", \"bridge\")\n\n}\n\n\n\n/// \"Bridge external assets\" Asset Definition ID.\n", "file_path": "iroha/src/bridge.rs", "rank": 31, "score": 92388.12570958177 }, { "content": "#[proc_macro_derive(IntoContract)]\n\npub fn into_contract_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).expect(\"Failed to parse input Token Stream.\");\n\n impl_into_contract(&ast)\n\n}\n\n\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 32, "score": 90976.1716928738 }, { "content": "#[proc_macro_derive(Io)]\n\npub fn io_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).expect(\"Failed to parse input Token Stream.\");\n\n impl_io(&ast)\n\n}\n\n\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 33, "score": 90976.1716928738 }, { "content": "#[proc_macro_derive(IntoQuery)]\n\npub fn into_query_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).expect(\"Failed to parse input Token Stream.\");\n\n impl_into_query(&ast)\n\n}\n\n\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 34, "score": 90976.1716928738 }, { "content": "#[inline]\n\npub fn bridge_asset_definition_id() -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_asset\", \"bridge\")\n\n}\n\n\n", "file_path": "iroha_client_no_std/src/bridge.rs", "rank": 35, "score": 89038.17353068889 }, { "content": "#[inline]\n\npub fn bridge_external_assets_asset_definition_id() -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_external_assets_asset\", \"bridge\")\n\n}\n\n\n\n/// \"Bridge outgoing external transactions\" Asset Definition ID.\n", "file_path": "iroha/src/bridge.rs", "rank": 36, "score": 89038.17353068889 }, { "content": "#[inline]\n\npub fn bridges_asset_definition_id() -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridges_asset\", \"bridge\")\n\n}\n\n\n", "file_path": "iroha_client_no_std/src/bridge.rs", "rank": 37, "score": 89038.17353068889 }, { "content": "#[inline]\n\npub fn bridge_external_assets_asset_definition_id() -> <AssetDefinition as Identifiable>::Id {\n\n AssetDefinitionId::new(\"bridge_external_assets_asset\", \"bridge\")\n\n}\n\n\n", "file_path": "iroha_client_no_std/src/bridge.rs", "rank": 38, "score": 86011.45860096553 }, { "content": "fn get_n_peers(n: usize) -> Vec<PeerId> {\n\n (0..n)\n\n .map(|i| PeerId {\n\n address: format!(\"127.0.0.{}\", i),\n\n public_key: KeyPair::generate()\n\n .expect(\"Failed to generate KeyPair.\")\n\n .public_key,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "iroha/benches/sumeragi.rs", "rank": 39, "score": 84840.65253817246 }, { "content": "#[proc_macro_attribute]\n\npub fn log(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let input: ItemFn = syn::parse_macro_input!(item as ItemFn);\n\n let args = syn::parse_macro_input!(attr as AttributeArgs);\n\n if args.len() > 1 {\n\n panic!(\n\n \"Unexpected number of arguments: 1 or 0 arguments expected, got {}\",\n\n args.len()\n\n )\n\n }\n\n let log_level = args\n\n .first()\n\n .map(|nested_meta| {\n\n if let NestedMeta::Lit(Lit::Str(lit_str)) = nested_meta {\n\n Level::from_str(&lit_str.value()).expect(\"Failed to parse log level.\")\n\n } else {\n\n panic!(\"Invalid argument. String expected.\")\n\n }\n\n })\n\n .unwrap_or(Level::Debug);\n\n let log_level = format!(\"{}\", log_level);\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 40, "score": 84322.37354143898 }, { "content": "fn impl_into_contract(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n\n\n impl std::convert::From<#name> for Contract {\n\n fn from(origin: #name) -> Self {\n\n Contract::#name(origin)\n\n }\n\n }\n\n };\n\n gen.into()\n\n}\n\n\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 41, "score": 75708.48767835712 }, { "content": "fn impl_into_query(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n\n\n impl std::convert::From<#name> for IrohaQuery {\n\n fn from(origin: #name) -> Self {\n\n IrohaQuery::#name(origin)\n\n }\n\n }\n\n };\n\n gen.into()\n\n}\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 42, "score": 75708.48767835712 }, { "content": "fn impl_io(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n\n\n impl std::convert::From<#name> for Vec<u8> {\n\n fn from(origin: #name) -> Self {\n\n origin.encode()\n\n }\n\n }\n\n\n\n impl std::convert::From<&#name> for Vec<u8> {\n\n fn from(origin: &#name) -> Self {\n\n origin.encode()\n\n }\n\n }\n\n\n\n impl std::convert::TryFrom<Vec<u8>> for #name {\n\n type Error = String;\n\n\n\n fn try_from(vector: Vec<u8>) -> Result<Self, Self::Error> {\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 43, "score": 75708.48767835712 }, { "content": "fn param_names(pat: Pat) -> Box<dyn Iterator<Item = Ident>> {\n\n match pat {\n\n Pat::Ident(PatIdent { ident, .. }) => Box::new(std::iter::once(ident)),\n\n Pat::Reference(PatReference { pat, .. }) => param_names(*pat),\n\n Pat::Struct(PatStruct { fields, .. }) => Box::new(\n\n fields\n\n .into_iter()\n\n .flat_map(|FieldPat { pat, .. }| param_names(*pat)),\n\n ),\n\n Pat::Tuple(PatTuple { elems, .. }) => Box::new(elems.into_iter().flat_map(param_names)),\n\n Pat::TupleStruct(PatTupleStruct {\n\n pat: PatTuple { elems, .. },\n\n ..\n\n }) => Box::new(elems.into_iter().flat_map(param_names)),\n\n _ => Box::new(std::iter::empty()),\n\n }\n\n}\n\n\n", "file_path": "iroha_macro/iroha_derive/src/lib.rs", "rank": 44, "score": 71836.96787106093 }, { "content": "#[derive(Debug)]\n\nstruct ToriiState {\n\n world_state_view: Arc<RwLock<WorldStateView>>,\n\n transaction_sender: Arc<RwLock<TransactionSender>>,\n\n sumeragi_message_sender: Arc<RwLock<SumeragiMessageSender>>,\n\n block_sync_message_sender: Arc<RwLock<BlockSyncMessageSender>>,\n\n consumers: Arc<RwLock<Vec<Consumer>>>,\n\n system: Arc<RwLock<System>>,\n\n events_sender: EventsSender,\n\n}\n\n\n\nasync fn handle_requests(\n\n state: State<ToriiState>,\n\n stream: Box<dyn AsyncStream>,\n\n) -> Result<(), String> {\n\n let state_arc = Arc::clone(&state);\n\n task::spawn(async {\n\n if let Err(e) = Network::handle_message_async(state_arc, stream, handle_request).await {\n\n log::error!(\"Failed to handle message: {}\", e);\n\n }\n\n })\n", "file_path": "iroha/src/torii.rs", "rank": 45, "score": 65656.72756112313 }, { "content": "#[derive(Default)]\n\nstruct Logger {\n\n terminal_color_enabled: bool,\n\n date_time_format: String,\n\n}\n\n\n\nimpl Logger {\n\n pub fn new(configuration: &config::LoggerConfiguration) -> Logger {\n\n Logger {\n\n terminal_color_enabled: configuration.terminal_color_enabled,\n\n date_time_format: configuration.date_time_format.clone(),\n\n }\n\n }\n\n\n\n /// Default values were taken from the `pretty_env_logger` [source code](https://github.com/seanmonstar/pretty-env-logger/blob/master/src/lib.rs).\n\n fn color(&self, level: &Level) -> u8 {\n\n match level {\n\n Level::Error => RED,\n\n Level::Warn => YELLOW,\n\n Level::Info => GREEN,\n\n Level::Debug => BLUE,\n", "file_path": "iroha_logger/src/lib.rs", "rank": 46, "score": 65651.67952688405 }, { "content": "struct RequestStream {\n\n bytes: Vec<u8>,\n\n tx: Sender<Vec<u8>>,\n\n}\n\n\n\nimpl Unpin for RequestStream {}\n\n\n\nimpl Read for RequestStream {\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<async_std::io::Result<usize>> {\n\n let bytes = &mut self.get_mut().bytes;\n\n let length = if buf.len() > bytes.len() {\n\n bytes.len()\n\n } else {\n\n buf.len()\n\n };\n\n for (i, byte) in bytes.drain(..length).enumerate() {\n", "file_path": "iroha_network/src/mock.rs", "rank": 47, "score": 64353.68653206619 }, { "content": "/// This trait should be implemented for all Iroha Queries.\n\npub trait Query {\n\n /// Execute query on the `WorldStateView`.\n\n ///\n\n /// Returns Ok(QueryResult) if succeeded and Err(String) if failed.\n\n fn execute(&self, world_state_view: &WorldStateView) -> Result<QueryResult, String>;\n\n}\n", "file_path": "iroha/src/query.rs", "rank": 48, "score": 60433.58083649166 }, { "content": "/// This trait marks entity that implement it as identifiable with an `Id` type to find them by.\n\npub trait Identifiable {\n\n /// Defines the type of entity's identification.\n\n type Id;\n\n}\n\n\n\npub mod prelude {\n\n //! Re-exports important traits and types. Meant to be glob imported when using `Iroha`.\n\n\n\n #[doc(inline)]\n\n pub use crate::{\n\n account::{Account, Id as AccountId},\n\n asset::{Asset, AssetDefinition, AssetDefinitionId, AssetId},\n\n block::{CommittedBlock, PendingBlock, ValidBlock},\n\n crypto::{Hash, KeyPair, PrivateKey, PublicKey, Signature},\n\n domain::Domain,\n\n isi::{Add, Demint, Instruction, Mint, Register, Remove, Transfer},\n\n peer::{Peer, PeerId},\n\n query::{IrohaQuery, Query, QueryRequest, QueryResult},\n\n tx::{AcceptedTransaction, RequestedTransaction, SignedTransaction, ValidTransaction},\n\n wsv::WorldStateView,\n\n CommittedBlockReceiver, CommittedBlockSender, Identifiable, Iroha, TransactionReceiver,\n\n TransactionSender, ValidBlockReceiver, ValidBlockSender,\n\n };\n\n\n\n #[doc(inline)]\n\n #[cfg(feature = \"bridge\")]\n\n pub use crate::bridge::{Bridge, BridgeDefinition, BridgeDefinitionId, BridgeId, BridgeKind};\n\n}\n", "file_path": "iroha/src/lib.rs", "rank": 49, "score": 60433.58083649166 }, { "content": "pub trait IntoContract {}\n", "file_path": "iroha_macro/src/lib.rs", "rank": 50, "score": 59140.06007539452 }, { "content": "pub trait IntoQuery {}\n", "file_path": "iroha_macro/src/lib.rs", "rank": 51, "score": 59140.06007539452 }, { "content": "/// This trait marks entity that implement it as identifiable with an `Id` type to find them by.\n\npub trait Identifiable {\n\n /// Defines the type of entity's identification.\n\n type Id;\n\n}\n", "file_path": "iroha_client_no_std/src/lib.rs", "rank": 52, "score": 57926.25089690165 }, { "content": "fn main() {\n\n let matches = App::new(\"Iroha CLI Client\")\n\n .version(\"0.1.0\")\n\n .author(\"Nikita Puzankov <puzankov@soramitsu.co.jp>\")\n\n .about(\"Iroha CLI Client provides an ability to interact with Iroha Peers Web API without direct network usage.\")\n\n .arg(\n\n Arg::with_name(CONFIG)\n\n .short(\"c\")\n\n .long(CONFIG)\n\n .value_name(\"FILE\")\n\n .help(\"Sets a config file path.\")\n\n .takes_value(true)\n\n .default_value(\"config.json\"),\n\n )\n\n .subcommand(\n\n domain::build_app(),\n\n )\n\n .subcommand(\n\n account::build_app(),\n\n )\n", "file_path": "iroha_client_cli/src/main.rs", "rank": 53, "score": 57530.402669798124 }, { "content": "pub trait AsyncStream: Read + Write + Send + Unpin {}\n\nimpl<T> AsyncStream for T where T: Read + Write + Send + Unpin {}\n\n\n", "file_path": "iroha_network/src/mock.rs", "rank": 54, "score": 48316.601145806126 }, { "content": "pub trait Io: parity_scale_codec::Encode + parity_scale_codec::Decode {}\n", "file_path": "iroha_macro/src/lib.rs", "rank": 55, "score": 45732.05710183873 }, { "content": " ))\n\n }\n\n }\n\n\n\n impl Register<Domain, AssetDefinition> {\n\n pub(crate) fn execute(\n\n &self,\n\n authority: <Account as Identifiable>::Id,\n\n world_state_view: &mut WorldStateView,\n\n ) -> Result<(), String> {\n\n PermissionInstruction::CanRegisterAssetDefinition(authority, None)\n\n .execute(world_state_view)?;\n\n let asset = self.object.clone();\n\n world_state_view\n\n .domain(&self.destination_id)\n\n .ok_or(\"Failed to find domain.\")?\n\n .asset_definitions\n\n .insert(asset.id.clone(), asset);\n\n Ok(())\n\n }\n", "file_path": "iroha/src/domain.rs", "rank": 65, "score": 43656.374831675035 }, { "content": " }\n\n }\n\n\n\n impl From<Register<Domain, Account>> for Instruction {\n\n fn from(instruction: Register<Domain, Account>) -> Self {\n\n Instruction::Domain(DomainInstruction::RegisterAccount(\n\n instruction.destination_id,\n\n instruction.object,\n\n ))\n\n }\n\n }\n\n\n\n impl Register<Domain, Account> {\n\n pub(crate) fn execute(\n\n &self,\n\n authority: <Account as Identifiable>::Id,\n\n world_state_view: &mut WorldStateView,\n\n ) -> Result<(), String> {\n\n PermissionInstruction::CanRegisterAccount(authority, None).execute(world_state_view)?;\n\n let account = self.object.clone();\n", "file_path": "iroha/src/domain.rs", "rank": 66, "score": 43656.366385387475 }, { "content": " }\n\n}\n\n\n\n/// Query module provides `IrohaQuery` Domain related implementations.\n\npub mod query {\n\n use super::*;\n\n use crate::query::IrohaQuery;\n\n use parity_scale_codec::{Decode, Encode};\n\n use std::time::SystemTime;\n\n\n\n /// Get information related to the domain with a specified `domain_name`.\n\n #[derive(Clone, Debug, Io, IntoQuery, Encode, Decode)]\n\n pub struct GetDomain {\n\n /// Identification of an domain to find information about.\n\n pub domain_name: <Domain as Identifiable>::Id,\n\n }\n\n\n\n /// Result of the `GetDomain` execution.\n\n #[derive(Clone, Debug, Encode, Decode)]\n\n pub struct GetDomainResult {\n", "file_path": "iroha/src/domain.rs", "rank": 67, "score": 43655.118917043575 }, { "content": " }\n\n\n\n impl DomainInstruction {\n\n /// Executes `DomainInstruction` on the given `WorldStateView`.\n\n /// Returns `Ok(())` if execution succeeded and `Err(String)` with error message if not.\n\n pub fn execute(\n\n &self,\n\n authority: <Account as Identifiable>::Id,\n\n world_state_view: &mut WorldStateView,\n\n ) -> Result<(), String> {\n\n match self {\n\n DomainInstruction::RegisterAccount(domain_name, account) => {\n\n Register::new(account.clone(), domain_name.clone())\n\n .execute(authority, world_state_view)\n\n }\n\n DomainInstruction::RegisterAsset(domain_name, asset) => {\n\n Register::new(asset.clone(), domain_name.clone())\n\n .execute(authority, world_state_view)\n\n }\n\n }\n", "file_path": "iroha/src/domain.rs", "rank": 68, "score": 43654.80037257055 }, { "content": "}\n\n\n\nimpl Identifiable for Domain {\n\n type Id = Name;\n\n}\n\n\n\n/// Iroha Special Instructions module provides `DomainInstruction` enum with all legal types of\n\n/// Domain related instructions as variants, implementations of generic Iroha Special Instructions\n\n/// and the `From/Into` implementations to convert `DomainInstruction` variants into generic ISI.\n\npub mod isi {\n\n use super::*;\n\n use crate::{isi::Register, permission::isi::PermissionInstruction};\n\n\n\n /// Enumeration of all legal Domain related Instructions.\n\n #[derive(Clone, Debug, Io, Encode, Decode)]\n\n pub enum DomainInstruction {\n\n /// Variant of the generic `Register` instruction for `Account` --> `Domain`.\n\n RegisterAccount(Name, Account),\n\n /// Variant of the generic `Register` instruction for `AssetDefinition` --> `Domain`.\n\n RegisterAsset(Name, AssetDefinition),\n", "file_path": "iroha/src/domain.rs", "rank": 69, "score": 43654.08011157608 }, { "content": " impl Query for GetDomain {\n\n #[log]\n\n fn execute(&self, world_state_view: &WorldStateView) -> Result<QueryResult, String> {\n\n Ok(QueryResult::GetDomain(GetDomainResult {\n\n domain: world_state_view\n\n .read_domain(&self.domain_name)\n\n .map(Clone::clone)\n\n .ok_or(\"Failed to get a domain.\")?,\n\n }))\n\n }\n\n }\n\n}\n", "file_path": "iroha/src/domain.rs", "rank": 70, "score": 43651.67022155797 }, { "content": " let domain = world_state_view\n\n .domain(&self.destination_id)\n\n .ok_or(\"Failed to find domain.\")?;\n\n if domain.accounts.contains_key(&account.id) {\n\n Err(format!(\n\n \"Domain already contains an account with an Id: {:?}\",\n\n &account.id\n\n ))\n\n } else {\n\n domain.accounts.insert(account.id.clone(), account);\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n impl From<Register<Domain, AssetDefinition>> for Instruction {\n\n fn from(instruction: Register<Domain, AssetDefinition>) -> Self {\n\n Instruction::Domain(DomainInstruction::RegisterAsset(\n\n instruction.destination_id,\n\n instruction.object,\n", "file_path": "iroha/src/domain.rs", "rank": 71, "score": 43649.237248238365 }, { "content": " /// Domain information.\n\n pub domain: Domain,\n\n }\n\n\n\n impl GetDomain {\n\n /// Build a `GetDomain` query in the form of a `QueryRequest`.\n\n pub fn build_request(domain_name: <Domain as Identifiable>::Id) -> QueryRequest {\n\n let query = GetDomain { domain_name };\n\n QueryRequest {\n\n timestamp: SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .expect(\"Failed to get System Time.\")\n\n .as_millis()\n\n .to_string(),\n\n signature: Option::None,\n\n query: query.into(),\n\n }\n\n }\n\n }\n\n\n", "file_path": "iroha/src/domain.rs", "rank": 72, "score": 43643.95283464663 }, { "content": " accounts: BTreeMap::new(),\n\n asset_definitions: BTreeMap::new(),\n\n }\n\n }\n\n\n\n /// Constructor of `Register<Domain, Account>` Iroha Special Instruction.\n\n pub fn register_account(&self, object: Account) -> Register<Domain, Account> {\n\n Register {\n\n object,\n\n destination_id: self.name.clone(),\n\n }\n\n }\n\n\n\n /// Constructor of `Register<Domain, AssetDefinition>` Iroha Special Instruction.\n\n pub fn register_asset(&self, object: AssetDefinition) -> Register<Domain, AssetDefinition> {\n\n Register {\n\n object,\n\n destination_id: self.name.clone(),\n\n }\n\n }\n", "file_path": "iroha/src/domain.rs", "rank": 73, "score": 43639.46183799945 }, { "content": "//! This module contains `Domain` structure and related implementations and trait implementations.\n\nuse crate::{isi::prelude::*, prelude::*};\n\nuse iroha_derive::*;\n\nuse parity_scale_codec::{Decode, Encode};\n\nuse std::collections::BTreeMap;\n\n\n", "file_path": "iroha/src/domain.rs", "rank": 74, "score": 43633.937270874776 }, { "content": "/// with `Iroha` subsystems.\n\n#[derive(Clone, Debug, Io, Encode, Decode)]\n\npub struct AcceptedTransaction {\n\n pub payload: Payload,\n\n pub(crate) signatures: Vec<Signature>,\n\n}\n\n\n\nimpl AcceptedTransaction {\n\n /// Sign transaction with the provided key pair.\n\n ///\n\n /// Returns `Ok(SignedTransaction)` if succeeded and `Err(String)` if failed.\n\n pub fn sign(self, key_pair: &KeyPair) -> Result<SignedTransaction, String> {\n\n let mut signatures = self.signatures.clone();\n\n signatures.push(Signature::new(key_pair.clone(), &Vec::from(&self.payload))?);\n\n Ok(SignedTransaction {\n\n payload: self.payload,\n\n signatures,\n\n })\n\n }\n\n\n", "file_path": "iroha/src/tx.rs", "rank": 75, "score": 43619.01647492692 }, { "content": " pub fn sign(self, signatures: Vec<Signature>) -> Result<SignedTransaction, String> {\n\n Ok(SignedTransaction {\n\n payload: self.payload,\n\n signatures: vec![self.signatures, signatures]\n\n .into_iter()\n\n .flatten()\n\n .collect(),\n\n })\n\n }\n\n\n\n // TODO: comment that it should use a clone\n\n /// Move transaction lifecycle forward by checking an ability to apply instructions to the\n\n /// `WorldStateView`.\n\n ///\n\n /// Returns `Ok(ValidTransaction)` if succeeded and `Err(String)` if failed.\n\n pub fn validate(\n\n self,\n\n world_state_view: &mut WorldStateView,\n\n ) -> Result<ValidTransaction, String> {\n\n for instruction in &self.payload.instructions {\n", "file_path": "iroha/src/tx.rs", "rank": 76, "score": 43617.69390261401 }, { "content": " world_state_view: &mut WorldStateView,\n\n ) -> Result<ValidTransaction, String> {\n\n for instruction in &self.payload.instructions {\n\n instruction.execute(self.payload.account_id.clone(), world_state_view)?;\n\n }\n\n Ok(ValidTransaction {\n\n payload: self.payload,\n\n signatures: self.signatures,\n\n })\n\n }\n\n\n\n /// Apply instructions to the `WorldStateView`.\n\n pub fn proceed(&self, world_state_view: &mut WorldStateView) -> Result<(), String> {\n\n for instruction in &self.payload.instructions {\n\n if let Err(e) = instruction.execute(self.payload.account_id.clone(), world_state_view) {\n\n log::warn!(\"Failed to invoke instruction on WSV: {}\", e);\n\n }\n\n }\n\n Ok(())\n\n }\n", "file_path": "iroha/src/tx.rs", "rank": 77, "score": 43610.218447809486 }, { "content": " hash.copy_from_slice(&vec_hash);\n\n hash\n\n }\n\n}\n\n\n\n/// `ValidTransaction` represents trustfull Transaction state.\n\n#[derive(Clone, Debug, Io, Encode, Decode)]\n\npub struct ValidTransaction {\n\n payload: Payload,\n\n signatures: Vec<Signature>,\n\n}\n\n\n\nimpl ValidTransaction {\n\n // TODO: comment that it should use a clone\n\n /// Move transaction lifecycle forward by checking an ability to apply instructions to the\n\n /// `WorldStateView`.\n\n ///\n\n /// Returns `Ok(ValidTransaction)` if succeeded and `Err(String)` if failed.\n\n pub fn validate(\n\n self,\n", "file_path": "iroha/src/tx.rs", "rank": 78, "score": 43609.26575162223 }, { "content": " signatures: Vec<Signature>,\n\n}\n\n\n\n#[derive(Clone, Debug, Io, Encode, Decode)]\n\npub struct Payload {\n\n /// Account ID of transaction creator.\n\n pub account_id: <Account as Identifiable>::Id,\n\n /// An ordered set of instructions.\n\n pub instructions: Vec<Instruction>,\n\n /// Time of creation (unix time, in milliseconds).\n\n pub creation_time: u64,\n\n /// The transaction will be dropped after this time if it is still in a `Queue`.\n\n pub time_to_live_ms: u64,\n\n}\n\n// 88 dc 34 17 d5058ec4b4503e0c12ea1a0a89be200fe98922423d4334014fa6b0ee\n\n// 207, 157, 30, 3\n\n\n\nimpl RequestedTransaction {\n\n /// Default `RequestedTransaction` constructor.\n\n pub fn new(\n", "file_path": "iroha/src/tx.rs", "rank": 79, "score": 43607.8943162067 }, { "content": " instruction.execute(self.payload.account_id.clone(), world_state_view)?;\n\n }\n\n Ok(ValidTransaction {\n\n payload: self.payload,\n\n signatures: self.signatures,\n\n })\n\n }\n\n\n\n /// Calculate transaction `Hash`.\n\n pub fn hash(&self) -> Hash {\n\n use ursa::blake2::{\n\n digest::{Input, VariableOutput},\n\n VarBlake2b,\n\n };\n\n let bytes: Vec<u8> = self.into();\n\n let vec_hash = VarBlake2b::new(32)\n\n .expect(\"Failed to initialize variable size hash\")\n\n .chain(bytes)\n\n .vec_result();\n\n let mut hash = [0; 32];\n", "file_path": "iroha/src/tx.rs", "rank": 80, "score": 43606.369909082794 }, { "content": " .duration_since(SystemTime::UNIX_EPOCH)\n\n .expect(\"Failed to get System Time.\");\n\n let elapsed = current_time - Duration::from_millis(self.payload.creation_time);\n\n let ttl = min(\n\n Duration::from_millis(self.payload.time_to_live_ms),\n\n transaction_time_to_live,\n\n );\n\n elapsed > ttl\n\n }\n\n}\n\n\n\n/// `SignedTransaction` represents transaction with signatures accumulated from Peer/Peers.\n\n#[derive(Clone, Debug, Io, Encode, Decode)]\n\npub struct SignedTransaction {\n\n payload: Payload,\n\n signatures: Vec<Signature>,\n\n}\n\n\n\nimpl SignedTransaction {\n\n /// Add additional Signatures.\n", "file_path": "iroha/src/tx.rs", "rank": 81, "score": 43605.81597741354 }, { "content": "\n\n /// Calculate transaction `Hash`.\n\n pub fn hash(&self) -> Hash {\n\n use ursa::blake2::{\n\n digest::{Input, VariableOutput},\n\n VarBlake2b,\n\n };\n\n let bytes: Vec<u8> = self.into();\n\n let vec_hash = VarBlake2b::new(32)\n\n .expect(\"Failed to initialize variable size hash\")\n\n .chain(bytes)\n\n .vec_result();\n\n let mut hash = [0; 32];\n\n hash.copy_from_slice(&vec_hash);\n\n hash\n\n }\n\n}\n\n\n\nimpl From<&AcceptedTransaction> for RequestedTransaction {\n\n fn from(transaction: &AcceptedTransaction) -> RequestedTransaction {\n", "file_path": "iroha/src/tx.rs", "rank": 82, "score": 43605.45687675078 }, { "content": " /// Calculate transaction `Hash`.\n\n pub fn hash(&self) -> Hash {\n\n use ursa::blake2::{\n\n digest::{Input, VariableOutput},\n\n VarBlake2b,\n\n };\n\n let bytes: Vec<u8> = self.payload.clone().into();\n\n let vec_hash = VarBlake2b::new(32)\n\n .expect(\"Failed to initialize variable size hash\")\n\n .chain(bytes)\n\n .vec_result();\n\n let mut hash = [0; 32];\n\n hash.copy_from_slice(&vec_hash);\n\n hash\n\n }\n\n\n\n /// Checks if this transaction is waiting longer than specified in `transaction_time_to_live` from `QueueConfiguration` or `time_to_live_ms` of this transaction.\n\n /// Meaning that the transaction will be expired as soon as the lesser of the specified TTLs was reached.\n\n pub fn is_expired(&self, transaction_time_to_live: Duration) -> bool {\n\n let current_time = SystemTime::now()\n", "file_path": "iroha/src/tx.rs", "rank": 83, "score": 43604.75523706437 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<&ValidTransaction> for RequestedTransaction {\n\n fn from(transaction: &ValidTransaction) -> RequestedTransaction {\n\n let transaction = transaction.clone();\n\n RequestedTransaction {\n\n payload: transaction.payload,\n\n signatures: transaction.signatures,\n\n }\n\n }\n\n}\n\n\n\nmod event {\n\n use super::*;\n\n use crate::event::{Entity, Occurrence};\n\n\n\n impl From<&RequestedTransaction> for Occurrence {\n\n fn from(transaction: &RequestedTransaction) -> Occurrence {\n\n Occurrence::Created(Entity::Transaction(transaction.into()))\n\n }\n\n }\n\n}\n", "file_path": "iroha/src/tx.rs", "rank": 84, "score": 43604.730571106666 }, { "content": " ///\n\n /// Returns `Ok(AcceptedTransaction)` if succeeded and `Err(String)` if failed.\n\n pub fn accept(self) -> Result<AcceptedTransaction, String> {\n\n for signature in &self.signatures {\n\n if let Err(e) = signature.verify(&Vec::from(&self.payload)) {\n\n return Err(format!(\"Failed to verify signatures: {}\", e));\n\n }\n\n }\n\n Ok(AcceptedTransaction {\n\n payload: self.payload,\n\n signatures: self.signatures,\n\n })\n\n }\n\n}\n\n\n\n/// An ordered set of instructions, which is applied to the ledger atomically.\n\n///\n\n/// Transactions received by `Iroha` from external resources (clients, peers, etc.)\n\n/// go through several steps before will be added to the blockchain and stored.\n\n/// Starting in form of `RequestedTransaction` transaction it changes state based on interactions\n", "file_path": "iroha/src/tx.rs", "rank": 85, "score": 43604.69004781837 }, { "content": " instructions: Vec<Instruction>,\n\n account_id: <Account as Identifiable>::Id,\n\n proposed_ttl_ms: u64,\n\n ) -> RequestedTransaction {\n\n RequestedTransaction {\n\n payload: Payload {\n\n instructions,\n\n account_id,\n\n creation_time: SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .expect(\"Failed to get System Time.\")\n\n .as_millis() as u64,\n\n time_to_live_ms: proposed_ttl_ms,\n\n },\n\n signatures: Vec::new(),\n\n }\n\n }\n\n\n\n /// Transaction acceptance will check that transaction signatures are valid and move state one\n\n /// step forward.\n", "file_path": "iroha/src/tx.rs", "rank": 86, "score": 43603.59688218483 }, { "content": "//! This module contains Transaction related functionality of the Iroha.\n\n//!\n\n//! `RequestedTransaction` is the start of the Transaction lifecycle.\n\n\n\nuse crate::{crypto::KeyPair, prelude::*};\n\nuse iroha_derive::Io;\n\nuse parity_scale_codec::{Decode, Encode};\n\nuse std::{\n\n cmp::min,\n\n time::{Duration, SystemTime},\n\n};\n\n\n\n/// This structure represents transaction in non-trusted form.\n\n///\n\n/// `Iroha` and its' clients use `RequestedTransaction` to send transactions via network.\n\n/// Direct usage in business logic is strongly prohibited. Before any interactions\n\n/// `accept`.\n\n#[derive(Clone, Debug, Io, Encode, Decode)]\n\npub struct RequestedTransaction {\n\n payload: Payload,\n", "file_path": "iroha/src/tx.rs", "rank": 87, "score": 43601.38527339528 }, { "content": " Ok(())\n\n }\n\n /// Gets `public_key` and `private_key` configuration parameters.\n\n pub fn key_pair(&self) -> (PublicKey, PrivateKey) {\n\n (self.public_key, self.private_key.clone())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::peer::PeerId;\n\n use std::convert::TryFrom;\n\n\n\n const CONFIGURATION_PATH: &str = \"tests/test_config.json\";\n\n\n\n #[test]\n\n fn parse_example_json() -> Result<(), String> {\n\n let configuration = Configuration::from_path(CONFIGURATION_PATH)\n\n .map_err(|e| format!(\"Failed to read configuration from example config: {}\", e))?;\n", "file_path": "iroha/src/config.rs", "rank": 88, "score": 43593.675514324634 }, { "content": " pub domain_name: String,\n\n}\n\n\n\nimpl Id {\n\n /// `Id` constructor used to easily create an `Id` from two string slices - one for the\n\n /// account's name, another one for the container's name.\n\n pub fn new(name: &str, domain_name: &str) -> Self {\n\n Id {\n\n name: name.to_string(),\n\n domain_name: domain_name.to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&str> for Id {\n\n fn from(string: &str) -> Id {\n\n let vector: Vec<&str> = string.split('@').collect();\n\n Id {\n\n name: String::from(vector[0]),\n\n domain_name: String::from(vector[1]),\n", "file_path": "iroha/src/account.rs", "rank": 89, "score": 43591.51751585623 }, { "content": "pub mod query {\n\n use super::*;\n\n use crate::query::IrohaQuery;\n\n use iroha_derive::*;\n\n use parity_scale_codec::{Decode, Encode};\n\n use std::time::SystemTime;\n\n\n\n /// Get information related to the account with a specified `account_id`.\n\n #[derive(Clone, Debug, Io, IntoQuery, Encode, Decode)]\n\n pub struct GetAccount {\n\n /// Identification of an account to find information about.\n\n pub account_id: <Account as Identifiable>::Id,\n\n }\n\n\n\n /// Result of the `GetAccount` execution.\n\n #[derive(Clone, Debug, Encode, Decode)]\n\n pub struct GetAccountResult {\n\n /// Account information.\n\n pub account: Account,\n\n }\n", "file_path": "iroha/src/account.rs", "rank": 90, "score": 43590.10457384522 }, { "content": " let transaction = transaction.clone();\n\n RequestedTransaction {\n\n payload: transaction.payload,\n\n signatures: transaction.signatures,\n\n }\n\n }\n\n}\n\n\n\nimpl From<&SignedTransaction> for RequestedTransaction {\n\n fn from(transaction: &SignedTransaction) -> RequestedTransaction {\n\n let transaction = transaction.clone();\n\n RequestedTransaction::from(transaction)\n\n }\n\n}\n\n\n\nimpl From<SignedTransaction> for RequestedTransaction {\n\n fn from(transaction: SignedTransaction) -> RequestedTransaction {\n\n RequestedTransaction {\n\n payload: transaction.payload,\n\n signatures: transaction.signatures,\n", "file_path": "iroha/src/tx.rs", "rank": 91, "score": 43589.47568129639 }, { "content": "impl Account {\n\n /// Constructor of the detached `Account` entity without signatories.\n\n ///\n\n /// This method can be used to create an `Account` which should be registered in the domain.\n\n /// This method should not be used to create an `Account` to work with as a part of the Iroha\n\n /// State.\n\n pub fn new(account_name: &str, domain_name: &str) -> Self {\n\n Account {\n\n id: Id::new(account_name, domain_name),\n\n assets: BTreeMap::new(),\n\n signatories: Vec::new(),\n\n }\n\n }\n\n\n\n /// Constructor of the detached `Account` entity with one signatory.\n\n ///\n\n /// This method can be used to create an `Account` which should be registered in the domain.\n\n /// This method should not be used to create an `Account` to work with as a part of the Iroha\n\n /// State.\n\n pub fn with_signatory(account_name: &str, domain_name: &str, public_key: PublicKey) -> Self {\n", "file_path": "iroha/src/account.rs", "rank": 92, "score": 43586.643609537896 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Display for Id {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}@{}\", self.name, self.domain_name)\n\n }\n\n}\n\n\n\nimpl Identifiable for Account {\n\n type Id = Id;\n\n}\n\n\n\n/// Iroha Special Instructions module provides `AccountInstruction` enum with all legal types of\n\n/// Account related instructions as variants, implementations of generic Iroha Special Instructions\n\n/// and the `From/Into` implementations to convert `AccountInstruction` variants into generic ISI.\n\npub mod isi {\n\n use super::*;\n\n use crate::permission::isi::PermissionInstruction;\n", "file_path": "iroha/src/account.rs", "rank": 93, "score": 43586.26333010687 }, { "content": " }\n\n }\n\n }\n\n\n\n impl Add<Account, PublicKey> {\n\n fn execute(\n\n &self,\n\n authority: <Account as Identifiable>::Id,\n\n world_state_view: &mut WorldStateView,\n\n ) -> Result<(), String> {\n\n PermissionInstruction::CanAddSignatory(authority, self.destination_id.clone(), None)\n\n .execute(world_state_view)?;\n\n let public_key = self.object;\n\n let account = world_state_view\n\n .account(&self.destination_id)\n\n .ok_or(\"Failed to find account.\")?;\n\n *account += public_key;\n\n Ok(())\n\n }\n\n }\n", "file_path": "iroha/src/account.rs", "rank": 94, "score": 43585.63176534318 }, { "content": " /// Returns the account signatories list without ability to modify it.\n\n pub fn read_signatories(&self) -> &Vec<PublicKey> {\n\n &self.signatories\n\n }\n\n}\n\n\n\n/// Identification of an Account. Consists of Account's name and Domain's name.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use iroha::account::Id;\n\n///\n\n/// let id = Id::new(\"user\", \"company\");\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, PartialOrd, Ord, Eq, std::hash::Hash, Encode, Decode)]\n\npub struct Id {\n\n /// Account's name.\n\n pub name: String,\n\n /// Domain's name.\n", "file_path": "iroha/src/account.rs", "rank": 95, "score": 43582.829741293404 }, { "content": "\n\n impl GetAccount {\n\n /// Build a `GetAccount` query in the form of a `QueryRequest`.\n\n pub fn build_request(account_id: <Account as Identifiable>::Id) -> QueryRequest {\n\n let query = GetAccount { account_id };\n\n QueryRequest {\n\n timestamp: SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .expect(\"Failed to get System Time.\")\n\n .as_millis()\n\n .to_string(),\n\n signature: Option::None,\n\n query: query.into(),\n\n }\n\n }\n\n }\n\n\n\n impl Query for GetAccount {\n\n #[log]\n\n fn execute(&self, world_state_view: &WorldStateView) -> Result<QueryResult, String> {\n", "file_path": "iroha/src/account.rs", "rank": 96, "score": 43582.58649884812 }, { "content": "//! This module contains `Configuration` structure and related implementation.\n\nuse crate::{\n\n block_sync::config::BlockSyncConfiguration,\n\n crypto::{KeyPair, PrivateKey, PublicKey},\n\n kura::config::KuraConfiguration,\n\n peer::PeerId,\n\n queue::config::QueueConfiguration,\n\n sumeragi::config::SumeragiConfiguration,\n\n torii::config::ToriiConfiguration,\n\n};\n\nuse iroha_logger::config::LoggerConfiguration;\n\nuse serde::Deserialize;\n\nuse std::{env, fmt::Debug, fs::File, io::BufReader, path::Path};\n\n\n\nconst IROHA_PUBLIC_KEY: &str = \"IROHA_PUBLIC_KEY\";\n\nconst IROHA_PRIVATE_KEY: &str = \"IROHA_PRIVATE_KEY\";\n\n\n\n/// Configuration parameters container.\n\n#[derive(Clone, Deserialize, Debug)]\n\n#[serde(rename_all = \"UPPERCASE\")]\n", "file_path": "iroha/src/config.rs", "rank": 97, "score": 43582.09608579414 }, { "content": " }\n\n\n\n pub fn from_str(s: &str) -> Result<Configuration, String> {\n\n // let file = File::open(path).map_err(|e| format!(\"Failed to open a file: {}\", e))?;\n\n // let reader = BufReader::new(file);\n\n let mut configuration: Configuration = serde_json::from_str(s)\n\n .map_err(|e| format!(\"Failed to deserialize json from reader: {}\", e))?;\n\n configuration.sumeragi_configuration.key_pair = KeyPair {\n\n public_key: configuration.public_key,\n\n private_key: configuration.private_key.clone(),\n\n };\n\n configuration.sumeragi_configuration.peer_id = PeerId::new(\n\n &configuration.torii_configuration.torii_url,\n\n &configuration.public_key,\n\n );\n\n Ok(configuration)\n\n }\n\n\n\n /// Load environment variables and replace existing parameters with these variables values.\n\n pub fn load_environment(&mut self) -> Result<(), String> {\n", "file_path": "iroha/src/config.rs", "rank": 98, "score": 43581.9361399964 }, { "content": " /// Returns `Ok(())` if execution succeeded and `Err(String)` with error message if not.\n\n pub fn execute(\n\n &self,\n\n authority: <Account as Identifiable>::Id,\n\n world_state_view: &mut WorldStateView,\n\n ) -> Result<(), String> {\n\n match self {\n\n AccountInstruction::TransferAsset(\n\n source_account_id,\n\n destination_account_id,\n\n component,\n\n ) => Transfer::new(\n\n source_account_id.clone(),\n\n component.clone(),\n\n destination_account_id.clone(),\n\n )\n\n .execute(authority, world_state_view),\n\n AccountInstruction::AddSignatory(account_id, public_key) => {\n\n Add::new(*public_key, account_id.clone()).execute(authority, world_state_view)\n\n }\n", "file_path": "iroha/src/account.rs", "rank": 99, "score": 43581.57688158035 } ]
Rust
Distributed systems/dsassignment1/solution/lib.rs
rzetelskik/MIMUW
6d193bd6f252a617a275acb1c697bfc983589c0c
use std::time::Duration; use async_channel::{unbounded, Sender, Receiver}; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; pub trait Message: Send + 'static {} impl<T: Send + 'static> Message for T {} #[async_trait::async_trait] pub trait Handler<M: Message> where M: Message, { async fn handle(&mut self, msg: M); } #[async_trait::async_trait] trait Handlee<T>: Send + 'static where T: Send, { async fn get_handled(self: Box<Self>, module: &mut T); } #[async_trait::async_trait] impl<M, T> Handlee<T> for M where T: Handler<M> + Send, M: Message, { async fn get_handled(self: Box<Self>, module: &mut T) { module.handle(*self).await } } #[async_trait::async_trait] trait Closeable { fn close(&self) -> bool; } #[async_trait::async_trait] impl<T> Closeable for Receiver<T> { fn close(&self) -> bool { self.close() } } #[derive(Debug, Clone)] pub struct Tick {} pub struct System { finish: Arc<AtomicBool>, handles: Vec<tokio::task::JoinHandle<()>>, tick_handles: Vec<tokio::task::JoinHandle<()>>, rxs: Vec<Box<dyn Closeable>> } impl System { pub async fn request_tick<T: Handler<Tick> + Send>( &mut self, requester: &ModuleRef<T>, delay: Duration, ) { if self.finish.load(Ordering::Relaxed) { panic!(); } let requester_cloned = requester.clone(); let finish_cloned = self.finish.clone(); let handle = tokio::spawn(async move { let mut interval = tokio::time::interval(delay); loop { interval.tick().await; if finish_cloned.load(Ordering::Relaxed) { break; } requester_cloned.send(Tick{}).await; }; }); self.tick_handles.push(handle); } pub async fn register_module<T: Send + 'static>(&mut self, module: T) -> ModuleRef<T> { if self.finish.load(Ordering::Relaxed) { panic!(); } let (tx, rx): (Sender<Box<dyn Handlee<T>>>, Receiver<Box<dyn Handlee<T>>>) = unbounded(); let rx_cloned = rx.clone(); let finish_cloned = self.finish.clone(); let mut mut_module = module; let handle = tokio::spawn(async move { while !finish_cloned.load(Ordering::Relaxed) { match rx_cloned.recv().await { Ok(msg) => { if finish_cloned.load(Ordering::Relaxed) { break; } msg.get_handled(&mut mut_module).await; } Err(_) => { break; } } } }); self.handles.push(handle); self.rxs.push(Box::new(rx)); ModuleRef{ tx } } pub async fn new() -> Self { System{ finish: Arc::new(AtomicBool::new(false)), handles: Vec::new(), tick_handles: Vec::new(), rxs: Vec::new(), } } pub async fn shutdown(&mut self) { if self.finish.load(Ordering::Relaxed) { panic!(); } self.finish.store(true, Ordering::Relaxed); for rx in self.rxs.iter_mut() { rx.close(); } for handle in self.tick_handles.iter_mut() { let _ = handle.await; } for handle in self.handles.iter_mut() { let _ = handle.await; } } } pub struct ModuleRef<T: Send + 'static> { tx: Sender<Box<dyn Handlee<T>>>, } impl<T: Send> ModuleRef<T> { pub async fn send<M: Message>(&self, msg: M) where T: Handler<M>, { let _ = self.tx.send(Box::new(msg)).await; } } impl<T: Send> Clone for ModuleRef<T> { fn clone(&self) -> Self { ModuleRef{ tx: self.tx.clone(), } } }
use std::time::Duration; use async_channel::{unbounded, Sender, Receiver}; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; pub trait Message: Send + 'static {} impl<T: Send + 'static> Message for T {} #[async_trait::async_trait] pub trait Handler<M: Message> where M: Message, { async fn handle(&mut self, msg: M); } #[async_trait::async_trait] trait Handlee<T>: Send + 'static where T: Send, { async fn get_handled(self: Box<Self>, module: &mut T); } #[async_trait::async_trait] impl<M, T> Handlee<T> for M where T: Handler<M> + Send, M: Message, { async fn get_handled(self: Box<Self>, module: &mut T) { module.handle(*self).await } } #[async_trait::async_trait] trait Closeable { fn close(&self) -> bool; } #[async_trait::async_trait] impl<T> Closeable for Receiver<T> { fn close(&self) -> bool { self.close() } } #[derive(Debug, Clone)] pub struct Tick {} pub struct System { finish: Arc<AtomicBool>, handles: Vec<tokio::task::JoinHandle<()>>, tick_handles: Vec<tokio::task::JoinHandle<()>>, rxs: Vec<Box<dyn Closeable>> } impl System { pub async fn request_tick<T: Handler<Tick> + Send>( &mut self, requester: &ModuleRef<T>, delay: Duration, ) { if self.finish.load(Ordering::Relaxed) { panic!(); } let requester_cloned = requester.clone(); let finish_cloned = self.finish.clone(); let handle = tokio::spawn(async move { let mut interval = tokio::time::interval(delay); loop { interval.tick().await; if finish_cloned.load(Ordering::Relaxed) { break; } requester_cloned.send(Tick{}).await; }; }); self.tick_handles.push(handle); } pub async fn register_module<T: Send + 'static>(&mut self, module: T) -> ModuleRef<T> { if self.finish.load(Ordering::Relaxed) { panic!(); } let (tx, rx): (Sender<Box<dyn Handlee<T>>>, Receiver<Box<dyn Handlee<T>>>) = unbounded(); let rx_cloned = rx.clone(); let finish_cloned = self.finish.clone(); let mut mut_module = module; let handle = tokio::spawn(async move { while !finish_cloned.load(Ordering::Relaxed) { match rx_cloned.recv().await { Ok(msg) => { if finish_cloned.load(Ordering::Relaxed) { break; } msg.get_handled(&mut mut_module).await; }
pub async fn new() -> Self { System{ finish: Arc::new(AtomicBool::new(false)), handles: Vec::new(), tick_handles: Vec::new(), rxs: Vec::new(), } } pub async fn shutdown(&mut self) { if self.finish.load(Ordering::Relaxed) { panic!(); } self.finish.store(true, Ordering::Relaxed); for rx in self.rxs.iter_mut() { rx.close(); } for handle in self.tick_handles.iter_mut() { let _ = handle.await; } for handle in self.handles.iter_mut() { let _ = handle.await; } } } pub struct ModuleRef<T: Send + 'static> { tx: Sender<Box<dyn Handlee<T>>>, } impl<T: Send> ModuleRef<T> { pub async fn send<M: Message>(&self, msg: M) where T: Handler<M>, { let _ = self.tx.send(Box::new(msg)).await; } } impl<T: Send> Clone for ModuleRef<T> { fn clone(&self) -> Self { ModuleRef{ tx: self.tx.clone(), } } }
Err(_) => { break; } } } }); self.handles.push(handle); self.rxs.push(Box::new(rx)); ModuleRef{ tx } }
function_block-function_prefix_line
[ { "content": "/// Run the executor.\n\nfn run_executor(rx: Receiver<FibonacciSystemMessage>) -> JoinHandle<()> {\n\n let mut modules: HashMap<Ident, FibonacciModule> = HashMap::new();\n\n\n\n thread::spawn(move || {\n\n while let Ok(msg) = rx.recv() {\n\n match msg {\n\n FibonacciSystemMessage::RegisterModule(module) => drop(modules.insert(module.id, module)),\n\n FibonacciSystemMessage::Init{id, other} => modules.get_mut(&id).unwrap().init(other),\n\n FibonacciSystemMessage::Message{id, idx, num} => modules.get_mut(&id).unwrap().message(idx, num),\n\n FibonacciSystemMessage::Done => break\n\n }\n\n }\n\n })\n\n}\n\n\n\n/// Calculate the `n`-th Fibonacci number.\n\npub(crate) fn fib(n: usize) {\n\n // Create the queue and two modules:\n\n let (tx, rx): (\n\n Sender<FibonacciSystemMessage>,\n", "file_path": "Distributed systems/dslab03/solution.rs", "rank": 1, "score": 306726.95073094097 }, { "content": "#[async_trait::async_trait]\n\npub trait Recipient<M>: Send + Sync + 'static\n\nwhere\n\n M: Message,\n\n{\n\n async fn send(&self, msg: M);\n\n fn clone_to_box(&self) -> BoxedRecipient<M>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl<M, T> Recipient<M> for ModuleRef<T>\n\nwhere\n\n M: Message,\n\n T: Handler<M> + Send,\n\n{\n\n async fn send(&self, msg: M) {\n\n self.send(msg).await;\n\n }\n\n\n\n fn clone_to_box(&self) -> BoxedRecipient<M> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "Distributed systems/dslab10/main.rs", "rank": 2, "score": 287070.2575532522 }, { "content": "// This client sends one message to the echo server:\n\npub fn send_msg() {\n\n // Connect to a remote host (if you need to specify connecting\n\n // timeout, use `connect_timeout()` instead):\n\n let mut stream = TcpStream::connect(\"127.0.0.1:8889\").unwrap();\n\n\n\n // Send the message:\n\n let msg = \"This is test\";\n\n println!(\"[Client] Sending: '{}'.\", msg);\n\n stream.write_all(msg.as_bytes()).unwrap();\n\n\n\n // Shutdown the write half of this connection (any currently blocked\n\n // or future write attempt will return an error):\n\n stream.shutdown(Shutdown::Write).unwrap();\n\n\n\n // Receive the response. The `read_to_end()` method reads all bytes until\n\n // `EOF` and appends them to the buffer, extenting the vector if needed:\n\n let mut buf = Vec::new();\n\n stream.read_to_end(&mut buf).unwrap();\n\n println!(\n\n \"[Client] Received: '{}'.\",\n\n std::string::String::from_utf8(buf).unwrap()\n\n );\n\n}\n", "file_path": "Distributed systems/dslab05/examples/tcp-communication/client.rs", "rank": 3, "score": 279248.94447328814 }, { "content": "struct Init<T: Send + 'static> {\n\n module_ref: ModuleRef<T>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handler<Init<Self>> for CyberStore2047 {\n\n async fn handle(&mut self, msg: Init<Self>) {\n\n self.module_ref = Some(msg.module_ref);\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handler<Init<Self>> for Node {\n\n async fn handle(&mut self, msg: Init<Self>) {\n\n self.module_ref = Some(msg.module_ref);\n\n }\n\n}\n\n\n\n\n\n#[async_trait::async_trait]\n", "file_path": "Distributed systems/dslab09/solution.rs", "rank": 4, "score": 243669.8027833159 }, { "content": "#[async_trait::async_trait]\n\npub trait StableStorage: Send + Sync {\n\n /// Stores `value` under `key`.\n\n ///\n\n /// Detailed requirements are specified in the description of the assignment.\n\n async fn put(&mut self, key: &str, value: &[u8]) -> Result<(), String>;\n\n\n\n /// Retrieves value stored under `key`.\n\n ///\n\n /// Detailed requirements are specified in the description of the assignment.\n\n async fn get(&self, key: &str) -> Option<Vec<u8>>;\n\n}\n\n\n", "file_path": "Distributed systems/dslab06/solution.rs", "rank": 6, "score": 240637.9135871374 }, { "content": "// This server echoes incoming requests.\n\n// For simplicity, it finishes itself after serving one request:\n\npub fn echo_once(server_up_notify: Sender<()>) {\n\n // Bind to 127.0.0.1:8889:\n\n let socket = TcpListener::bind(\"127.0.0.1:8889\").unwrap();\n\n\n\n // Notify the example that it is ready to serve requests:\n\n server_up_notify.send(()).unwrap();\n\n\n\n // Get the first element from the iterator over incoming connections:\n\n for stream in socket.incoming().take(1) {\n\n println!(\"new connection!\");\n\n let mut buf = Vec::new();\n\n let mut stream = stream.unwrap();\n\n\n\n // Receive all data:\n\n stream.read_to_end(&mut buf).unwrap();\n\n\n\n println!(\n\n \"[Server] Received: '{}'.\",\n\n std::str::from_utf8(&buf).unwrap()\n\n );\n\n\n\n // Echo the message:\n\n stream.write_all(&buf).unwrap();\n\n }\n\n}\n", "file_path": "Distributed systems/dslab05/examples/tcp-communication/server.rs", "rank": 7, "score": 231582.89915136015 }, { "content": "#[async_trait::async_trait]\n\npub trait SectorsManager: Send + Sync {\n\n /// Returns 4096 bytes of sector data by index.\n\n async fn read_data(&self, idx: SectorIdx) -> SectorVec;\n\n\n\n /// Returns timestamp and write rank of the process which has saved this data.\n\n /// Timestamps and ranks are relevant for atomic register algorithm, and are described\n\n /// there.\n\n async fn read_metadata(&self, idx: SectorIdx) -> (u64, u8);\n\n\n\n /// Writes a new data, along with timestamp and write rank to some sector.\n\n async fn write(&self, idx: SectorIdx, sector: &(SectorVec, u64, u8));\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment2/solution/src/sectors_manager_public.rs", "rank": 8, "score": 229287.83057436018 }, { "content": "#[async_trait::async_trait]\n\npub trait AtomicRegister: Send + Sync {\n\n /// Send client command to the register. After it is completed, we expect\n\n /// callback to be called. Note that completion of client command happens after\n\n /// delivery of multiple system commands to the register, as the algorithm specifies.\n\n async fn client_command(\n\n &mut self,\n\n cmd: ClientRegisterCommand,\n\n operation_complete: Box<\n\n dyn FnOnce(OperationComplete) -> Pin<Box<dyn Future<Output=()> + Send>>\n\n + Send\n\n + Sync,\n\n >,\n\n );\n\n\n\n /// Send system command to the register.\n\n async fn system_command(&mut self, cmd: SystemRegisterCommand);\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment2/solution/src/atomic_register_public.rs", "rank": 9, "score": 229287.83057436018 }, { "content": "/// A helper trait for small amount of durable metadata needed by the register algorithm\n\n/// itself. Again, it is only for AtomicRegister definition. StableStorage in unit tests\n\n/// is durable, as one could expect.\n\npub trait StableStorage: Send + Sync {\n\n async fn put(&mut self, key: &str, value: &[u8]) -> Result<(), String>;\n\n\n\n async fn get(&self, key: &str) -> Option<Vec<u8>>;\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment2/solution/src/stable_storage_public.rs", "rank": 10, "score": 229280.9272462853 }, { "content": "// Closure is also a trait: `Fn<Args>`. This way, closures can be stored\n\n// in structs and returned from functions:\n\nfn closure_trait() -> Box<dyn Fn(u32) -> bool> {\n\n let even: Box<dyn Fn(u32) -> bool> = Box::new(|x: u32| x % 2 == 0);\n\n\n\n println!(\"closure_trait: {:?}\", even(8));\n\n\n\n even\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 11, "score": 226635.58839091496 }, { "content": "// This server echoes incoming requests.\n\n// For simplicity, it finishes itself after serving two requests:\n\npub fn echo_server(server_ready_notify: Sender<()>) {\n\n // Bind to 127.0.0.1:8889:\n\n let socket = UdpSocket::bind(\"127.0.0.1:8889\").unwrap();\n\n\n\n // Notify the example that it is ready to serve requests:\n\n server_ready_notify.send(()).unwrap();\n\n\n\n let mut buf = [0; 20];\n\n\n\n for _ in 0..2 {\n\n // Receive incoming data (it blocks until some data is available):\n\n let (count, addr) = socket.recv_from(&mut buf).unwrap();\n\n println!(\n\n \"[Server] Received {} bytes. Message: '{}'.\",\n\n count,\n\n std::str::from_utf8(&buf[..count]).unwrap()\n\n );\n\n // Echo the message:\n\n socket.send_to(&buf[..count], addr).unwrap();\n\n }\n\n}\n", "file_path": "Distributed systems/dslab05/examples/udp-communication/server.rs", "rank": 12, "score": 226258.3233944134 }, { "content": "fn get_callback(config: Arc<RegisterProcessConfig>, cmd: ClientRegisterCommand, stream: Arc<Mutex<dyn AsyncWrite + core::marker::Send + Unpin>>, sockaddr: SocketAddr, internal_ack_tx: UnboundedSender<SectorIdx>, permit: OwnedSemaphorePermit) -> Callback {\n\n Box::new(move |mut op_complete| Box::pin(async move {\n\n op_complete.request_identifier = cmd.header.request_identifier;\n\n\n\n internal_ack_tx.send(cmd.header.sector_idx).unwrap();\n\n if let Err(err) = serialize_operation_complete(&op_complete, stream, &config.hmac_client_key).await {\n\n error!(\"[Process {}] Can't send operation complete to {}: {:?}\", config.self_ident, sockaddr.to_string(), err);\n\n }\n\n drop(permit);\n\n\n\n debug!(\"[Process {}] Completed a {} operation on sector {}\", config.self_ident, match cmd.content {\n\n ClientRegisterCommandContent::Read => \"READ\",\n\n ClientRegisterCommandContent::Write { .. } => \"WRITE\"\n\n }, cmd.header.sector_idx);\n\n }))\n\n}\n\n\n\nasync fn serialize_operation_complete(operation_complete: &OperationComplete, writer: Arc<Mutex<dyn AsyncWrite + core::marker::Send + Unpin>>, hmac_client_key: &[u8]) -> Result<(), Error> {\n\n let mut buffer = Vec::<u8>::new();\n\n let (msg_type, content) = unpack_operation_return(&operation_complete.op_return);\n", "file_path": "Distributed systems/dsassignment2/solution/src/register_process.rs", "rank": 14, "score": 222229.5669766215 }, { "content": "struct FutureInt5 {}\n\n\n\n// Put the macro on top of every trait implementation:\n\n#[async_trait::async_trait]\n\nimpl FutureInt for FutureInt5 {\n\n async fn get(&self) -> i32 {\n\n 5\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n println!(\n\n \"Async trait: {}\",\n\n FutureInt5 {}.get().await\n\n );\n\n}\n", "file_path": "Distributed systems/dslab04/examples/async_trait.rs", "rank": 15, "score": 219711.74864742605 }, { "content": "fn ownership_copy_move_clone() {\n\n let i1 = 42u32;\n\n // Above type of the variable is inferred from the type of the value: u32.\n\n let v1 = vec![0, 1];\n\n // Above type of the vector's elements defaults to i32.\n\n\n\n let i2 = i1;\n\n // Above value is copied so both variables own their own values.\n\n let v2 = v1;\n\n // Above value is moved so `v1` no longer owns the vector.\n\n let v3 = v2.clone();\n\n // Above value is cloned so `v3` owns a copy of the vector.\n\n\n\n println!(\"Old integer: {}\", i1);\n\n // Won't compile:\n\n // println!(\"Old vector: {:?}\", v1);\n\n println!(\"New integer: {}\", i2);\n\n println!(\"Moved vector: {:?}\", v2);\n\n println!(\"Cloned vector: {:?}\", v3);\n\n} // `v2` and `v3` go out of scope, so their values are dropped (the vectors are\n\n // deallocated). `v1` also goes out of scope but since it doesn't own any value\n\n // here, nothing is dropped (thus the vector isn't dropped twice).\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 16, "score": 213643.09054192307 }, { "content": "/// We do not need any public implementation of this trait. It is there for use\n\n/// in AtomicRegister. In our opinion it is a safe bet to say some structure of\n\n/// this kind must appear in your solution.\n\npub trait RegisterClient: core::marker::Send + core::marker::Sync {\n\n /// Sends a system message to a single process.\n\n async fn send(&self, msg: Send);\n\n\n\n /// Broadcasts a system message to all processes in the system, including self.\n\n async fn broadcast(&self, msg: Broadcast);\n\n}\n\n\n\npub struct Broadcast {\n\n pub cmd: Arc<SystemRegisterCommand>,\n\n}\n\n\n\npub struct Send {\n\n pub cmd: Arc<SystemRegisterCommand>,\n\n /// Identifier of the target process. Those start at 1.\n\n pub target: usize,\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment2/solution/src/register_client_public.rs", "rank": 17, "score": 200399.00838920948 }, { "content": "pub fn assert_system_cmd_header(\n\n serialized: &[u8],\n\n msg_ident: &[u8; 16],\n\n process_identifier: u8,\n\n msg_type: u8,\n\n read_ident: u64,\n\n sector_idx: u64,\n\n) {\n\n assert_eq!(&serialized[0..4], MAGIC_NUMBER.as_ref());\n\n assert_eq!(*serialized.get(6).unwrap(), process_identifier);\n\n assert_eq!(*serialized.get(7).unwrap(), msg_type);\n\n assert_eq!(&serialized[8..24], msg_ident);\n\n assert_eq!(\n\n u64::from_be_bytes(serialized[24..32].try_into().unwrap()),\n\n read_ident\n\n );\n\n assert_eq!(\n\n u64::from_be_bytes(serialized[32..40].try_into().unwrap()),\n\n sector_idx\n\n );\n\n}\n", "file_path": "Distributed systems/dsassignment2/test-utils/transfer.rs", "rank": 18, "score": 194034.50730057555 }, { "content": "pub fn unpack_system_command_content(\n\n content: &SystemRegisterCommandContent,\n\n) -> (SystemRegisterCommandType, Option<(&u64, &u8, &SectorVec)>) {\n\n match content {\n\n SystemRegisterCommandContent::ReadProc => (SystemRegisterCommandType::ReadProc, None),\n\n SystemRegisterCommandContent::Value {\n\n timestamp,\n\n write_rank,\n\n sector_data,\n\n } => (\n\n SystemRegisterCommandType::Value,\n\n Some((timestamp, write_rank, sector_data)),\n\n ),\n\n SystemRegisterCommandContent::WriteProc {\n\n timestamp,\n\n write_rank,\n\n data_to_write,\n\n } => (\n\n SystemRegisterCommandType::WriteProc,\n\n Some((timestamp, write_rank, data_to_write)),\n", "file_path": "Distributed systems/dsassignment2/solution/src/transfer_public.rs", "rank": 19, "score": 192107.22218512915 }, { "content": "fn verify_hmac_tag(tag: &[u8], message: &str, secret_key: &[u8]) -> bool {\n\n // Initialize a new MAC instance from the secret key:\n\n let mut mac = HmacSha256::new_from_slice(secret_key).unwrap();\n\n\n\n // Calculate MAC for the data (one can provide it in multiple portions):\n\n mac.update(message.as_bytes());\n\n\n\n // Verify the tag:\n\n mac.verify(tag).is_ok()\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/hmac.rs", "rank": 20, "score": 190884.3224336024 }, { "content": "// This client sends two messages to the echo server:\n\npub fn client() {\n\n // Bind to a socket:\n\n let socket = UdpSocket::bind(\"0.0.0.0:0\").unwrap();\n\n\n\n\n\n // Send the first message directly specifying the address of the recipient:\n\n let count = socket\n\n .send_to(b\"testing UTF-8 string\", \"127.0.0.1:8889\")\n\n .unwrap();\n\n println!(\"[Client] Sent {} bytes.\", count);\n\n\n\n // Receive the response:\n\n let mut buf = [0; 32];\n\n let count = socket.recv(&mut buf).unwrap();\n\n println!(\n\n \"[Client] Received: '{}'.\",\n\n std::str::from_utf8(&buf[..count]).unwrap()\n\n );\n\n\n\n\n", "file_path": "Distributed systems/dslab05/examples/udp-communication/client.rs", "rank": 21, "score": 188018.71417020724 }, { "content": "#[async_trait::async_trait]\n\ntrait FutureInt {\n\n async fn get(&self) -> i32;\n\n}\n\n\n", "file_path": "Distributed systems/dslab04/examples/async_trait.rs", "rank": 22, "score": 182074.1103875771 }, { "content": "pub fn unpack_client_command_content(\n\n content: &ClientRegisterCommandContent,\n\n) -> (ClientRegisterCommandType, Option<&SectorVec>) {\n\n match content {\n\n ClientRegisterCommandContent::Read => (ClientRegisterCommandType::Read, None),\n\n ClientRegisterCommandContent::Write { data } => {\n\n (ClientRegisterCommandType::Write, Some(data))\n\n }\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment2/solution/src/transfer_public.rs", "rank": 23, "score": 181573.29605251673 }, { "content": "#[derive(Clone, Default)]\n\nstruct ExecutorSender {\n\n processes: Arc<Mutex<HashMap<Uuid, BoxedRecipient<RaftMessage>>>>,\n\n}\n\n\n\nimpl ExecutorSender {\n\n async fn insert(&self, id: Uuid, addr: BoxedRecipient<RaftMessage>) {\n\n self.processes.lock().await.insert(id, addr);\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl crate::solution::Sender for ExecutorSender {\n\n async fn send(&self, target: &Uuid, msg: RaftMessage) {\n\n if let Some(addr) = self.processes.lock().await.get(target) {\n\n let addr = addr.clone_to_box();\n\n addr.send(msg).await;\n\n }\n\n }\n\n\n\n async fn broadcast(&self, msg: RaftMessage) {\n\n let map = self.processes.lock().await;\n\n for addr in map.values() {\n\n let addr = addr.clone_to_box();\n\n addr.send(msg).await;\n\n }\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dslab10/main.rs", "rank": 25, "score": 176721.021788493 }, { "content": "fn add_by_mut_reference(v: &mut Vec<u8>) {\n\n v.push(42);\n\n}\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 26, "score": 175411.64884491908 }, { "content": "fn timeout_receiving() {\n\n let (_tx, rx): (Sender<u8>, Receiver<u8>) = unbounded();\n\n\n\n // If required, the receiving operation can time out after\n\n // a specified duration:\n\n assert_eq!(\n\n Err(RecvTimeoutError::Timeout),\n\n rx.recv_timeout(Duration::from_millis(20))\n\n );\n\n}\n\n\n", "file_path": "Distributed systems/dslab03/examples/crossbeam.rs", "rank": 27, "score": 173740.43854411793 }, { "content": "fn closures_traits() {\n\n let mut env1 = vec![1];\n\n let mut env2 = vec![2];\n\n let mut env3 = vec![3];\n\n #[allow(unused_mut)]\n\n let mut env4 = vec![4];\n\n\n\n let mut holder = ClosureHolder {\n\n fn_once_closure: Box::new(move |x| {\n\n env1.push(x);\n\n println!(\"FnOnce: {:?}\", env1)\n\n }),\n\n fn_once_closure_2: Box::new(move |x| {\n\n env2.push(x);\n\n println!(\"FnOnce: {:?}\", env2)\n\n }),\n\n fn_mut_closure: Box::new(move |x| {\n\n env3.push(x);\n\n println!(\"FnMut: {:?}\", env3)\n\n }),\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 28, "score": 173697.01554461216 }, { "content": "struct ConnectNodeToInternetMessage {\n\n addr: ChordAddr,\n\n node_ref: ModuleRef<ChordNode>,\n\n conn_notifier: Sender<bool>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handler<ConnectNodeToInternetMessage> for Internet {\n\n async fn handle(&mut self, msg: ConnectNodeToInternetMessage) {\n\n self.connect_node(&msg.addr, &msg.node_ref).await;\n\n msg.conn_notifier.send(true).await.unwrap();\n\n }\n\n}\n\n\n\npub(crate) async fn wire_all_chord_nodes_given_global_info_from_oracle<'a, I>(\n\n node_refs: I,\n\n all_nodes: &Arc<BTreeMap<ChordId, ChordAddr>>,\n\n) where\n\n I: Iterator<Item = &'a ModuleRef<ChordNode>> + Clone,\n\n{\n", "file_path": "Distributed systems/dslab13/main.rs", "rank": 29, "score": 171875.10800133334 }, { "content": "fn may_panic() {\n\n // Result<T, E> is a return type of all operations that may fail.\n\n // It represents either the correct result or a failure. E.g.:\n\n let parsed_or_not: Result<u8, std::num::ParseIntError> = \"7\".to_string().parse::<u8>();\n\n\n\n // unwrap() calls panic! when the result is a failure (by default, in\n\n // a full-fledged OS, panic! stops the application and unwinds the stack).\n\n // Otherwise it returs the value. Let's try to get the number:\n\n println!(\"Parsed: {}\", parsed_or_not.unwrap());\n\n}\n\n\n\n// The compiler warns about unused functions, but the warning can be suppressed\n\n// with a special attribute. It also warns about unused variables, but\n\n// the warning can be suppressed by prefixing them with an underscore. E.g.:\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 30, "score": 171384.81316896185 }, { "content": "fn moving_values_into_thread() {\n\n let mut v = vec![1, 2, 3, 4];\n\n\n\n // Not every value can be moved into another thread, only these which are\n\n // Send. Most types are Send, but some are not (e.g., raw pointers and Rc).\n\n let thread = spawn(move || {\n\n v.push(5);\n\n println!(\"{:?}\", v);\n\n });\n\n thread.join().unwrap();\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/threads.rs", "rank": 31, "score": 171377.1182896485 }, { "content": "fn closure_moving_values() {\n\n let mut vec: Vec<u32> = vec![];\n\n\n\n // The `move` keyword forces moving the value of `vec` into a closure\n\n // (the ownership is transferred to the closure):\n\n let mut add_to_vec = move |x: u32| {\n\n vec.push(x);\n\n println!(\"closure_moving_values: {:?}\", vec);\n\n };\n\n\n\n // Won't compile, the value of `vec` was moved to the closure:\n\n // println!(\"{:?}\", vec);\n\n\n\n add_to_vec(1);\n\n add_to_vec(2);\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 32, "score": 171377.1182896485 }, { "content": "fn atomic_bool() {\n\n let is_first = Arc::new(AtomicBool::new(true));\n\n let is_first_clone = is_first.clone();\n\n\n\n let thread = spawn(move || {\n\n is_first_clone.store(false, Ordering::Relaxed);\n\n });\n\n\n\n println!(\"Am I first? {}\", is_first.load(Ordering::Relaxed));\n\n\n\n thread.join().unwrap();\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/shared_memory.rs", "rank": 33, "score": 171373.4906970327 }, { "content": "// Now two ChattyIntegers can be added:\n\nfn struct_example() {\n\n let i1 = ChattyInteger { num: 7 };\n\n let i2 = ChattyInteger { num: 8 };\n\n\n\n // Add them using the `+` operator:\n\n println!(\"Sum {:?}\", i1 + i2);\n\n\n\n // Add them calling the add() method directly:\n\n println!(\"Sum {:?}\", i1.add(i2));\n\n\n\n // Add them calling the add() method directly and using a more explicit syntax:\n\n println!(\"Sum {:?}\", ChattyInteger::add(i1, i2));\n\n\n\n // Add them calling the add() method directly and using a fully qualified\n\n // syntax. It would be useful if ChattyInteger also implemented an add()\n\n // method, but we wanted to call the add() method of the Add trait\n\n // implemented for ChattyInteger:\n\n println!(\"Sum {:?}\", <ChattyInteger as Add>::add(i1, i2));\n\n}\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 34, "score": 171277.484407118 }, { "content": "fn run_async_function() {\n\n // Create a future by calling an asynchronous function:\n\n let future = async_function();\n\n\n\n // Run the future to completion in the current thread:\n\n futures::executor::block_on(future);\n\n}\n\n\n\n\n\nasync fn action1_step1() -> String {\n\n String::from(\"Step 1 of Action 1\")\n\n}\n\n\n\nasync fn action1_step2(str: String) {\n\n println!(\"Step 2 of Action 1 follows {}\", str);\n\n}\n\n\n\n// By using `.await` we do not block the thread until each step is completed.\n\n// Contrarily, we make it possible to advance the other action when the futures\n\n// are not completed:\n", "file_path": "Distributed systems/dslab04/examples/futures.rs", "rank": 35, "score": 170932.79689632828 }, { "content": "fn hmac_tag_is_ok(key: &[u8], data: &[u8]) -> bool {\n\n let boundary = data.len() - HMAC_TAG_SIZE;\n\n let mut mac = HmacSha256::new_from_slice(key).unwrap();\n\n mac.update(&data[..boundary]);\n\n mac.verify(&data[boundary..]).is_ok()\n\n}\n", "file_path": "Distributed systems/dsassignment2/public-tests/tests/system.rs", "rank": 36, "score": 165908.54854544275 }, { "content": "fn tcp_stream_api_example(stream: &mut TcpStream) {\n\n // Set the value of the `TCP_NODELAY` option for this socket:\n\n stream.set_nodelay(true).unwrap();\n\n\n\n // Get the value of the `TCP_TTL` option for this socket:\n\n println!(\"TTL: {}\", stream.ttl().unwrap());\n\n\n\n // Get the write timeout of this socket:\n\n println!(\"Write timeout: {:#?}\", stream.write_timeout().unwrap());\n\n\n\n // Set the read timeout for this socket:\n\n stream\n\n .set_read_timeout(Some(Duration::from_millis(500)))\n\n .unwrap();\n\n\n\n {\n\n // Cloning the stream creates a new handle to the same OS socket:\n\n let _cloned = stream.try_clone().unwrap();\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/tcp-communication/main.rs", "rank": 37, "score": 162572.11902575076 }, { "content": "type Task = Box<dyn FnOnce() + Send>;\n\n\n\n// You can define new types (e.g., structs) if you need.\n\n// However, they shall not be public (i.e., do not use the `pub` keyword).\n\n\n\n/// The thread pool.\n\npub struct Threadpool {\n\n // Add here any fields you need.\n\n // We suggest storing handles of the worker threads, submitted tasks,\n\n // and an information whether the pool is running or it is to be finished.\n\n handles: Option<Vec<JoinHandle<()>>>,\n\n queue: Arc<(Mutex<VecDeque<Task>>, Condvar)>,\n\n finish: Arc<AtomicBool>\n\n}\n\n\n\nimpl Threadpool {\n\n /// Create new thread pool with `workers_count` workers.\n\n pub fn new(workers_count: usize) -> Self {\n\n let queue: Arc<(Mutex<VecDeque<Task>>, Condvar)> = Arc::new((Mutex::new(VecDeque::new()), Condvar::new()));\n\n let mut handles: Vec<JoinHandle<()>> = Vec::with_capacity(workers_count);\n", "file_path": "Distributed systems/dslab02/solution.rs", "rank": 38, "score": 154962.4212670314 }, { "content": "// Put the macro on top of trait definition:\n\n#[async_trait::async_trait]\n", "file_path": "Distributed systems/dslab04/examples/async_trait.rs", "rank": 39, "score": 146289.9847737405 }, { "content": "/// Path parameter points to a directory to which this method has exclusive access.\n\npub fn build_sectors_manager(storage_dir: PathBuf) -> Arc<dyn SectorsManager> {\n\n Arc::new(InternalSectorsManager::new(storage_dir))\n\n}\n", "file_path": "Distributed systems/dsassignment2/solution/src/sectors_manager_public.rs", "rank": 40, "score": 143524.56550807616 }, { "content": "fn prepare_msg(name: &str, round: u32) -> String {\n\n format!(\"In {}: received {}\\n\", name, round)\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handler<Ball> for PingPong {\n\n async fn handle(&mut self, _msg: Ball) {\n\n self.log_sender\n\n .send(prepare_msg(self.name, self.received_msgs))\n\n .await\n\n .unwrap();\n\n\n\n self.received_msgs += 1;\n\n if self.received_msgs < ROUNDS {\n\n self.other.as_ref().unwrap().send(Ball {}).await;\n\n }\n\n }\n\n}\n\n\n\nasync fn initialize_system(sys: &mut System) -> Receiver<String> {\n", "file_path": "Distributed systems/dsassignment1/public-tests/tests/executors.rs", "rank": 41, "score": 142808.08153418067 }, { "content": "fn calculate_hmac_tag(message: &str, secret_key: &[u8]) -> [u8; 32] {\n\n // Initialize a new MAC instance from the secret key:\n\n let mut mac = HmacSha256::new_from_slice(secret_key).unwrap();\n\n\n\n // Calculate MAC for the data (one can provide it in multiple portions):\n\n mac.update(message.as_bytes());\n\n\n\n // Finalize the computations of MAC and obtain the resulting tag:\n\n let tag = mac.finalize().into_bytes();\n\n\n\n tag.into()\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/hmac.rs", "rank": 42, "score": 141441.78202363112 }, { "content": "fn decrypt(encrypted_message: &[u8], private_key: &RsaPrivateKey) -> String {\n\n // Use the PKCS1v15 scheme:\n\n let padding = PaddingScheme::new_pkcs1v15_encrypt();\n\n\n\n // Decrypt the message:\n\n let decrypted = private_key.decrypt(padding, encrypted_message).unwrap();\n\n\n\n // Convert the decrypted message to String:\n\n String::from_utf8(decrypted).unwrap()\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/rsa.rs", "rank": 43, "score": 140884.53750892074 }, { "content": "fn encrypt(message: &str, public_key: &RsaPublicKey) -> Vec<u8> {\n\n // Use the PKCS1v15 scheme:\n\n let padding = PaddingScheme::new_pkcs1v15_encrypt();\n\n\n\n // Encrypt the message:\n\n public_key\n\n .encrypt(&mut OsRng, padding, message.as_bytes())\n\n .unwrap()\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/rsa.rs", "rank": 44, "score": 139544.2118061466 }, { "content": "fn decrypt(encrypted_message: &[u8], key: &[u8], iv: &[u8]) -> String {\n\n // Create a new block mode instance from the key and the IV:\n\n let cipher = Aes128Cbc::new_from_slices(key, iv).unwrap();\n\n\n\n // Decrypt the message:\n\n let decrypted = cipher.decrypt_vec(encrypted_message).unwrap();\n\n\n\n // Convert the decrypted message to String:\n\n String::from_utf8(decrypted).unwrap()\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/aes_cbc.rs", "rank": 45, "score": 138439.3518858996 }, { "content": "fn encrypt(message: &str, key: &[u8], iv: &[u8]) -> Vec<u8> {\n\n // Create a new block mode instance from the key and the IV:\n\n let cipher = Aes128Cbc::new_from_slices(key, iv).unwrap();\n\n\n\n // Encrypt the message:\n\n cipher.encrypt_vec(message.as_bytes())\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/aes_cbc.rs", "rank": 46, "score": 137512.9187495745 }, { "content": "static int atdd_finish_all_completed_requests(struct atdd_ktread_data *data)\n\n{\n\n int err;\n\n int rq_status;\n\n struct atdd_cmd_return cmd_r;\n\n struct request *rq;\n\n int rq_pos;\n\n\n\n while ((err = atdd_fetch_command(data->atdd_c, &cmd_r)) > 0) {\n\n rq_pos = atdd_find_request_pos(data->requests_buf, cmd_r.ident);\n\n if (rq_pos < 0) {\n\n printk(KERN_WARNING \"Command completed unsuccessfully\");\n\n blk_end_request_all(rq, -EIO);\n\n continue;\n\n }\n\n\n\n rq = data->requests_buf[rq_pos].content;\n\n data->requests_buf[rq_pos] = data->requests_buf[data->buf_completed_pos];\n\n data->requests_buf[data->buf_completed_pos] = (struct atdd_request){\n\n .ident = cmd_r.ident,\n\n .content = rq\n\n };\n\n ++data->buf_completed_pos;\n\n rq_status = convert_atdd_to_blk_status(cmd_r.rcode);\n\n\n\n if (rq_status != 0) {\n\n printk(KERN_WARNING \"Command completed unsuccessfully\");\n\n blk_end_request_all(rq, rq_status);\n\n continue;\n\n }\n\n\n\n /* read, data must be copied */\n\n if (rq_data_dir(rq) == 0)\n\n memcpy(bio_data(rq->bio), cmd_r.data, SECTOR_SIZE);\n\n\n\n /* If command finishes */\n\n blk_end_request_all(rq, 0);\n\n }\n\n\n\n return err < 0 ? err : 0;\n", "file_path": "Distributed systems/dsassignment2/driver/atomic_disc_driver.c", "rank": 47, "score": 131097.97411740536 }, { "content": "static int atdd_send_accepted_requests(struct atdd_ktread_data *data)\n\n{\n\n int err;\n\n size_t cmds_to_send = atdd_cmds_to_send(data);\n\n\n\n while (cmds_to_send--) {\n\n if ((err = atdd_send_command(\n\n data->atdd_c, data->requests_buf[data->buf_tcp_forwarded_pos].content,\n\n data->requests_buf[data->buf_tcp_forwarded_pos].ident)) < 0)\n\n return err;\n\n\n\n data->buf_tcp_forwarded_pos++;\n\n }\n\n\n\n return 0;\n", "file_path": "Distributed systems/dsassignment2/driver/atomic_disc_driver.c", "rank": 48, "score": 131025.67234173822 }, { "content": "struct Init {\n\n self_ref: ModuleRef<Raft>,\n\n}\n\n\n\n/// Message disabling a process. Used for testing to simulate failures.\n\npub(crate) struct Disable;\n\n\n\n#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]\n\npub(crate) struct RaftMessageHeader {\n\n /// Term of the process which issues the message.\n\n pub(crate) term: u64,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub(crate) enum RaftMessageContent {\n\n Heartbeat {\n\n /// Id of the process issuing the message, which claims to be the leader.\n\n leader_id: Uuid,\n\n },\n\n HeartbeatResponse,\n", "file_path": "Distributed systems/dslab10/solution.rs", "rank": 49, "score": 128455.76935691283 }, { "content": "struct Timeout;\n\n\n", "file_path": "Distributed systems/dslab10/solution.rs", "rank": 50, "score": 128455.76935691283 }, { "content": "fn main() {\n\n let nth = parse_arg();\n\n println!(\"Calculating the {}-th Fibbonaci number...\", nth);\n\n\n\n println!(\"fibonacci(): {}\", Fibonacci::fibonacci(nth));\n\n\n\n let mut fib = Fibonacci::new();\n\n match fib.nth(nth) {\n\n Some(num) => println!(\"iterator: {}\", num),\n\n None => println!(\"Sorry, the n-th Fibonacci number doesn't fit u128.\"),\n\n }\n\n}\n", "file_path": "Distributed systems/dslab01/main.rs", "rank": 51, "score": 127914.88473878439 }, { "content": "fn main() {\n\n let mac_key = vec![17, 18];\n\n\n\n let (mut target_client, mut target_server) = setup_process(&mac_key);\n\n\n\n let target_thread = std::thread::spawn(move || {\n\n let received = target_server.recv_message().unwrap();\n\n println!(\n\n \"[Server] Received message: '{}'.\",\n\n std::str::from_utf8(received.as_ref()).unwrap()\n\n );\n\n });\n\n\n\n target_client.send_msg(b\"Hello World!\".to_vec());\n\n assert!(matches!(target_thread.join(), Ok(_)));\n\n}\n", "file_path": "Distributed systems/dslab05/main.rs", "rank": 52, "score": 127914.88473878439 }, { "content": "fn main() {\n\n solution::fib(parse_args());\n\n}\n\n\n\n// Sample result of `cargo run`:\n\n//\n\n// Inside 109630159281952332903990402733657913010, value: 1\n\n// Inside 90776690821287183793072922585987852479, value: 2\n\n// Inside 109630159281952332903990402733657913010, value: 3\n\n// Inside 90776690821287183793072922585987852479, value: 5\n\n// Inside 109630159281952332903990402733657913010, value: 8\n\n// Inside 90776690821287183793072922585987852479, value: 13\n\n// Inside 109630159281952332903990402733657913010, value: 21\n\n// Inside 90776690821287183793072922585987852479, value: 34\n\n// Inside 109630159281952332903990402733657913010, value: 55\n\n//\n\n// The identifiers are random, of course.\n", "file_path": "Distributed systems/dslab03/main.rs", "rank": 53, "score": 127914.88473878439 }, { "content": "fn main() {\n\n let shared_vec = Arc::new(Mutex::new(Vec::new()));\n\n let pool = solution::Threadpool::new(2);\n\n\n\n for x in 0..6 {\n\n let shared_vec_clone = shared_vec.clone();\n\n pool.submit(Box::new(move || {\n\n std::thread::sleep(std::time::Duration::from_millis(500));\n\n let mut vec = shared_vec_clone.lock().unwrap();\n\n vec.push(x);\n\n println!(\"Data: {:#?}\", vec);\n\n }));\n\n }\n\n}\n", "file_path": "Distributed systems/dslab02/main.rs", "rank": 54, "score": 127914.88473878439 }, { "content": "// A custom struct:\n\nstruct Droppable {\n\n name: &'static str,\n\n}\n\n\n\n// A custom implementation of the Drop trait for the struct:\n\nimpl Drop for Droppable {\n\n // Rust calls automatically `drop()` for each field of a struct. A custom\n\n // implementation of the Drop trait needs only to dealocacte resources\n\n // introduced by the struct. Hence this `drop()` implementation does not\n\n // actually deallocate anything:\n\n fn drop(&mut self) {\n\n println!(\"> Dropping {}\", self.name);\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/drop.rs", "rank": 55, "score": 126790.16891665691 }, { "content": "#[derive(Default, Clone)]\n\nstruct RamStorage {\n\n state: Arc<std::sync::Mutex<Option<ProcessState>>>,\n\n}\n\n\n\nimpl StableStorage for RamStorage {\n\n fn put(&mut self, state: &ProcessState) {\n\n *self.state.lock().unwrap().deref_mut() = Some(*state);\n\n }\n\n\n\n fn get(&self) -> Option<ProcessState> {\n\n *self.state.lock().unwrap().deref()\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dslab10/main.rs", "rank": 56, "score": 126790.00172698784 }, { "content": "struct SStorage {\n\n root_storage_dir: PathBuf,\n\n}\n\n\n\nimpl SStorage {\n\n fn get_filename(key: &str) -> Result<String, String> {\n\n if key.len() > 255 {\n\n return Err(\"invalid key\".to_string());\n\n }\n\n\n\n Ok(format!(\"{:X}\", Sha256::new().chain(key.as_bytes()).finalize()))\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl StableStorage for SStorage {\n\n async fn put(&mut self, key: &str, value: &[u8]) -> Result<(), String> {\n\n let hash = SStorage::get_filename(key)?;\n\n\n\n if value.len() > 65535 {\n", "file_path": "Distributed systems/dslab06/solution.rs", "rank": 57, "score": 126785.15782857168 }, { "content": "// This example presents a simplified executor system with two PingPong modules\n\n// exchanging Ball messages.\n\n//\n\n// The executor system is hugely simplified: there is only one executor (the\n\n// system is single-threaded), it supports only one module type (PingPong) and\n\n// only one simple message type (Ball), the modules are registered statically,\n\n// it assumes there are exactly two modules and they have particular names, it\n\n// runs only for a specified number or steps, etc.\n\nfn main() {\n\n // Create a channel. It will be the queue:\n\n let (tx, rx) = unbounded();\n\n\n\n // Create the modules:\n\n let mut ping = PingPong::new(\"ping\", tx.clone());\n\n let mut pong = PingPong::new(\"pong\", tx.clone());\n\n\n\n // Send the initial Ball:\n\n tx.send(Ball::from(\"ping\")).unwrap();\n\n\n\n // Run the executor for 5 steps:\n\n for _ in 0..5 {\n\n let msg = rx.recv().unwrap();\n\n match msg.as_str() {\n\n // Message \"ping\" comes from the \"ping\" module\n\n // and targets the \"pong\" module:\n\n \"ping\" => pong.handler(msg),\n\n\n\n // Message \"pong\" comes from the \"pong\" module\n\n // and targets the \"ping\" module:\n\n \"pong\" => ping.handler(msg),\n\n\n\n _ => panic!(\"Unsupported Ball({})!\", msg),\n\n };\n\n }\n\n}\n", "file_path": "Distributed systems/dslab03/examples/tennis.rs", "rank": 58, "score": 126267.0003293844 }, { "content": "fn main() {\n\n tokio_runtimes();\n\n tokio_macro();\n\n tokio_spawning_tasks();\n\n tokio_filesystem();\n\n tokio_network();\n\n tokio_sync();\n\n}\n", "file_path": "Distributed systems/dslab04/examples/tokio.rs", "rank": 59, "score": 126249.22325166066 }, { "content": "fn main() {\n\n let message = \"Secret message\";\n\n\n\n // Generate a new 256-bit private key (the example uses a short key\n\n // as it is faster to generate, but for actual security applications\n\n // longer keys are recommend):\n\n let private_key = RsaPrivateKey::new(&mut OsRng, 256).unwrap();\n\n\n\n // Derive a public key from the private key:\n\n let public_key = RsaPublicKey::from(&private_key);\n\n\n\n // Encrypt the message (the public key is used):\n\n let encrypted = encrypt(message, &public_key);\n\n println!(\"Encrypted data: {:?}\", encrypted);\n\n\n\n // Decrypt the message (the private key is used):\n\n let decrypted = decrypt(&encrypted, &private_key);\n\n println!(\"Decrypted data: '{}'\", decrypted);\n\n}\n", "file_path": "Distributed systems/dslab05/examples/rsa.rs", "rank": 60, "score": 126249.22325166066 }, { "content": "fn main() {\n\n let msg = \"Message requiring authorization\";\n\n let secret_key = [1, 2, 3];\n\n\n\n // Generate HMAC tag:\n\n let tag = calculate_hmac_tag(msg, &secret_key);\n\n println!(\"HMAC tag: {:?}\", tag);\n\n\n\n // Verify HMAC tag:\n\n let verified = verify_hmac_tag(&tag, msg, &secret_key);\n\n println!(\"Tag is valid for the message: {}\", verified);\n\n}\n", "file_path": "Distributed systems/dslab05/examples/hmac.rs", "rank": 61, "score": 126249.22325166066 }, { "content": "fn main() {\n\n simple_thread();\n\n panicking_thread();\n\n moving_values_into_thread();\n\n}\n", "file_path": "Distributed systems/dslab02/examples/threads.rs", "rank": 62, "score": 126249.22325166066 }, { "content": "fn main() {\n\n custom_drop_example();\n\n file_drop_example();\n\n}\n", "file_path": "Distributed systems/dslab02/examples/drop.rs", "rank": 63, "score": 126249.22325166066 }, { "content": "fn main() {\n\n create_crossbeam_channel();\n\n channel_ends_dropped();\n\n timeout_receiving();\n\n communication_between_threads();\n\n bounded_channel();\n\n zero_size_channel();\n\n}\n", "file_path": "Distributed systems/dslab03/examples/crossbeam.rs", "rank": 64, "score": 126249.22325166066 }, { "content": "fn main() {\n\n simple_future();\n\n futures_combining();\n\n futures_joining();\n\n run_async_function();\n\n run_asychronous_actions();\n\n}\n", "file_path": "Distributed systems/dslab04/examples/futures.rs", "rank": 65, "score": 126249.22325166066 }, { "content": "// The examples require an Internet access to work:\n\nfn main() {\n\n example_1();\n\n example_2();\n\n}\n", "file_path": "Distributed systems/dslab04/examples/asynchronous.rs", "rank": 66, "score": 126249.22325166066 }, { "content": "fn main() {\n\n simple_closure();\n\n closure_environment();\n\n environment_capture_rules();\n\n closure_moving_values();\n\n closure_mutable_borrow_of_ownership();\n\n let _ = closure_trait();\n\n closures_traits();\n\n}\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 67, "score": 126249.22325166066 }, { "content": "// More specifically, there are three different types of closures:\n\n// * `FnOnce<Args> – a closure that can modify its environment, and that can be\n\n// run only once (if you find it weird to restrict a closure to run only once,\n\n// consider again the example `environment_capture_rules()`),\n\n// * `FnMut<Args>` – a closure that can modify its environment, and that can be\n\n// run multiple times (it is a subtrait of `FnOnce`),\n\n// * `Fn<Args>` – a closure that cannot modify its environment, and that can be\n\n// run multiple times (it is a subtrait of `FnMut`).\n\nstruct ClosureHolder {\n\n #[allow(clippy::unused_unit)]\n\n fn_once_closure: Box<dyn FnOnce(u32) -> ()>,\n\n // If the returned type is the unit, it does not have to be specified:\n\n fn_once_closure_2: Box<dyn FnOnce(u32)>,\n\n\n\n fn_mut_closure: Box<dyn FnMut(u32)>,\n\n\n\n fn_closure: Box<dyn Fn(u32)>,\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 68, "score": 125180.03853016988 }, { "content": "// The PingPong module is implemented as a struct:\n\nstruct PingPong {\n\n /// Name of the module (here \"ping\" or \"pong\").\n\n name: &'static str,\n\n /// Number of messages received by this module.\n\n received_messages: usize,\n\n /// Reference to the queue of the other module's executor.\n\n other_queue: Sender<Ball>,\n\n}\n\n\n\nimpl PingPong {\n\n /// Create new PingPong module wit the provided name.\n\n fn new(name: &'static str, other_queue: Sender<Ball>) -> Self {\n\n PingPong {\n\n name,\n\n received_messages: 0,\n\n other_queue,\n\n }\n\n }\n\n\n\n /// Handle the received message, Ball, replying with a new Ball.\n", "file_path": "Distributed systems/dslab03/examples/tennis.rs", "rank": 69, "score": 125179.94381164198 }, { "content": "struct InternalClientCommand {\n\n action: Action,\n\n}\n\n\n\nimpl<const N: usize> EditorClient for SimpleClient<N> {}\n\n\n\nimpl<const N: usize> SimpleClient<N> {\n\n pub(crate) async fn new(system: &mut System, rank: usize) -> ModuleRef<Self> {\n\n let self_ref = system\n\n .register_module(Self {\n\n rank,\n\n text: String::new(),\n\n num_applied: 0,\n\n process: None,\n\n })\n\n .await;\n\n self_ref\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dslab12/main.rs", "rank": 70, "score": 125170.31722698495 }, { "content": "#[derive(Serialize, Deserialize, PartialEq, Debug)]\n\nstruct Entity {\n\n x: f32,\n\n y: f32,\n\n}\n\n\n", "file_path": "Distributed systems/dslab07/examples/bincode_serde.rs", "rank": 71, "score": 125170.31722698495 }, { "content": "// This server receives incoming data and acknowledges the receipt.\n\n// For simplicity, it serves only the first incoming request and finishes\n\n// itself afterwards. Moreover, sizes of the messages are fixed:\n\nfn server() {\n\n // Bind to the specified address:\n\n let listener = TcpListener::bind(SERVER_ADDRESS).unwrap();\n\n\n\n // Accept the first incoming connection and create `TcpStream` for it:\n\n let server_raw_stream = listener.incoming().next().unwrap().unwrap();\n\n\n\n // Wrap the stream in TLS:\n\n let mut server = server_stream(server_raw_stream);\n\n\n\n // Receive incoming data:\n\n let mut data = vec![0; 22];\n\n server.read_exact(data.as_mut()).unwrap();\n\n println!(\n\n \"[Server] Received: '{}'.\",\n\n std::str::from_utf8(data.as_ref()).unwrap()\n\n );\n\n\n\n // Reply with the acknowledgment:\n\n server.write_all(b\"Done\").unwrap();\n\n}\n\n\n", "file_path": "Distributed systems/dslab05/examples/rustls/main.rs", "rank": 72, "score": 124656.0473447558 }, { "content": "// This client sends one message to the server and waits for the acknowledgment.\n\n// For simplicity, sizes of the messages are fixed:\n\nfn client() {\n\n // Connect to the remote host:\n\n let client_raw_link = TcpStream::connect(SERVER_ADDRESS).unwrap();\n\n\n\n // Wrap the stream in TLS:\n\n let mut client = client_stream(client_raw_link);\n\n\n\n // Send the message:\n\n client.write_all(b\"Hello encrypted world!\").unwrap();\n\n\n\n // Receive the acknowledgment:\n\n let mut data = vec![0; 5];\n\n client.take(4).read_to_end(data.as_mut()).unwrap();\n\n println!(\n\n \"[Client] Received: '{}'.\",\n\n std::str::from_utf8(data.as_ref()).unwrap()\n\n );\n\n}\n\n\n\n\n", "file_path": "Distributed systems/dslab05/examples/rustls/main.rs", "rank": 73, "score": 124648.92276624806 }, { "content": "fn main() {\n\n box_example();\n\n rc_example();\n\n}\n", "file_path": "Distributed systems/dslab02/examples/box_rc.rs", "rank": 74, "score": 124639.16744139243 }, { "content": "fn tokio_runtimes() {\n\n // Create a single-threaded runtime.\n\n // All tasks are executed by the current thread:\n\n let single_thread_runtime = Builder::new_current_thread().build().unwrap();\n\n\n\n // Run a future to completion on the runtime:\n\n single_thread_runtime.block_on(async {\n\n println!(\"I am a future inside the single-threaded runtime.\");\n\n });\n\n\n\n\n\n // Create a mult-threaded runtime, the default one.\n\n // Tasks are executed by an automatically created thread pool:\n\n let multi_threaded_runtime = Runtime::new().unwrap();\n\n\n\n // Run a future to completion on the runtime:\n\n multi_threaded_runtime.block_on(async {\n\n println!(\"I am a future inside the multi-threaded runtime.\");\n\n });\n\n}\n", "file_path": "Distributed systems/dslab04/examples/tokio.rs", "rank": 75, "score": 124639.16744139243 }, { "content": "fn bounded_channel() {\n\n // There are also channels of limited capacity. The maximal number of\n\n // values such a channel can store is specified when the channel\n\n // is created:\n\n let (tx, _rx) = bounded(1);\n\n\n\n tx.send(7).unwrap();\n\n\n\n // An attempt to send more values than the capacity results\n\n // in blocking the thread until some value is received. Uncomment\n\n // the following line and check whether it blocks:\n\n // tx.send(7).unwrap();\n\n}\n\n\n", "file_path": "Distributed systems/dslab03/examples/crossbeam.rs", "rank": 76, "score": 124639.16744139243 }, { "content": "fn simple_future() {\n\n // Crate a future that is immediately ready with\n\n // a value which is then transformed:\n\n let future = ready(7).map(|x| x * 2);\n\n\n\n // Run the future to completion in the current thread:\n\n let result = futures::executor::block_on(future);\n\n\n\n println!(\"Result of the simple future: {}\", result);\n\n}\n\n\n\n\n", "file_path": "Distributed systems/dslab04/examples/futures.rs", "rank": 77, "score": 124639.16744139243 }, { "content": "// Futures can be combined into a new future:\n\nfn futures_combining() {\n\n // Create a future that is immediately ready with a success value:\n\n let future: futures::future::Ready<std::result::Result<i32, ()>> =\n\n futures::future::ok(7);\n\n\n\n // Create a new future which will execute another future (created by\n\n // a closure) when the future successes:\n\n let combined_future = future.and_then(|x| futures::future::ok(x * x));\n\n\n\n // Run the future to completion in the current thread:\n\n let result = futures::executor::block_on(combined_future).unwrap();\n\n\n\n println!(\"Result of the combined future: {}\", result);\n\n}\n\n\n\n\n", "file_path": "Distributed systems/dslab04/examples/futures.rs", "rank": 78, "score": 124639.16744139243 }, { "content": "fn main() {\n\n let client_thread = std::thread::spawn(|| {\n\n client();\n\n });\n\n\n\n server();\n\n\n\n client_thread.join().unwrap();\n\n}\n", "file_path": "Distributed systems/dslab05/examples/rustls/main.rs", "rank": 79, "score": 124639.16744139243 }, { "content": "#[allow(unused_assignments)]\n\nfn mutability() {\n\n let i1: i32 = 42;\n\n // Won't compile:\n\n // i1 = 43;\n\n println!(\"Immutable i32: {}\", i1);\n\n\n\n let mut i2: i32 = 42;\n\n i2 = 43;\n\n println!(\"Mutable i32: {}\", i2);\n\n\n\n let a1: [u32; 2] = [0, 1];\n\n // Won't compile:\n\n // a1[0] = 42;\n\n println!(\"Immutable array with two u32 values: {:?}\", a1);\n\n // Above `{:?}` marker displays arrays for debugging purposes.\n\n // Arrays don't implement the user-friendly formatting (the `{}` marker).\n\n\n\n let mut a2: [u32; 2] = [0, 1];\n\n a2[0] = 42;\n\n println!(\"Mutable array with two u32 values: {:?}\", a2);\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 80, "score": 124639.16744139243 }, { "content": "fn communication_between_threads() {\n\n let (tx, rx) = unbounded();\n\n let tx_clone = tx.clone();\n\n\n\n // The required end can be moved to the other thread:\n\n std::thread::spawn(move || {\n\n tx.send(42).unwrap();\n\n });\n\n\n\n // Receive the value in the main thread:\n\n assert_eq!(Ok(42), rx.recv());\n\n\n\n // The main thread can still send new values using the cloned end:\n\n tx_clone.send(7).unwrap();\n\n\n\n // Receive the value:\n\n assert_eq!(Ok(7), rx.recv());\n\n}\n\n\n", "file_path": "Distributed systems/dslab03/examples/crossbeam.rs", "rank": 81, "score": 124639.16744139243 }, { "content": "fn panicking_thread() {\n\n let thread = spawn(|| {\n\n // Panic in the thread:\n\n panic!(\"Panic attack!\")\n\n });\n\n\n\n // The main thread still runs. However,\n\n // the result of `join()` is `Err` now:\n\n assert!(thread.join().is_err())\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/threads.rs", "rank": 82, "score": 124639.16744139243 }, { "content": "/// The main function of this example.\n\nfn main() {\n\n simple_function();\n\n may_panic();\n\n mutability();\n\n ownership_copy_move_clone();\n\n ownership_functions();\n\n references_borrowing();\n\n struct_example();\n\n enum_example();\n\n generic_type_example();\n\n}\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 83, "score": 124639.16744139243 }, { "content": "fn simple_thread() {\n\n let thread = spawn(|| {\n\n println!(\"Inside a thread\");\n\n });\n\n\n\n // Waiting for the thread to finish:\n\n thread.join().unwrap()\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/threads.rs", "rank": 84, "score": 124639.16744139243 }, { "content": "fn closure_environment() {\n\n let allowed_numbers = vec![2, 7];\n\n let vec = vec![1, 2, 3, 4, 5, 6];\n\n\n\n // The closure used below to filter the vector captures its environment:\n\n // the immutable reference to `allowed_numbers` is passed into the closure.\n\n // This is the default way the capturing of the environment works. Note that\n\n // the closure cannot outlive any of the borrowed references, the same as\n\n // any value in Rust:\n\n let filtered: Vec<i32> = vec\n\n .into_iter()\n\n .filter(|x| allowed_numbers.contains(x))\n\n .collect();\n\n\n\n println!(\"closure_environment: {:?}\", filtered);\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 85, "score": 124639.16744139243 }, { "content": "fn simple_closure() {\n\n // Closure is a function which can be created with a special\n\n // syntax. The closure below is equivalent to defining\n\n // an `inc(u32)` function:\n\n let inc: fn(u32) -> u32 = |x: u32| x + 1;\n\n println!(\"simple_closure: {}\", inc(inc(0)))\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/closures.rs", "rank": 86, "score": 124639.16744139243 }, { "content": "fn main() {\n\n let entity = Entity { x: 1.0, y: -11.0 };\n\n\n\n // Serialize the object:\n\n let serialized: Vec<u8> = bincode::serialize(&entity).unwrap();\n\n println!(\"Serialized entity: {:?}\", serialized);\n\n\n\n // Deserialize the object:\n\n let deserialized: Entity = bincode::deserialize(&serialized[..]).unwrap();\n\n assert_eq!(entity, deserialized);\n\n}\n", "file_path": "Distributed systems/dslab07/examples/bincode_serde.rs", "rank": 87, "score": 124639.16744139243 }, { "content": "fn main() {\n\n let message = \"AES is fast for large amounts of data\";\n\n\n\n // Generate random key and initialization vector:\n\n let key = rand::thread_rng().gen::<[u8; 16]>();\n\n let iv = rand::thread_rng().gen::<[u8; 16]>();\n\n\n\n // Encrypt the message:\n\n let encrypted = encrypt(message, &key, &iv);\n\n println!(\"Encrypted data: {:?}\", encrypted);\n\n\n\n // Decrypt the message:\n\n let decrypted = decrypt(&encrypted, &key, &iv);\n\n println!(\"Decrypted data: '{}'\", decrypted);\n\n}\n", "file_path": "Distributed systems/dslab05/examples/aes_cbc.rs", "rank": 88, "score": 124639.16744139243 }, { "content": "// Futures can be joined into a new future:\n\nfn futures_joining() {\n\n // Create a vector of futures:\n\n let futures = vec![ready(1), ready(2), ready(3)];\n\n\n\n // Create a new future which represents a collection\n\n // of outputs of the futures:\n\n let joined_future = join_all(futures);\n\n\n\n // Run the future to completion in the current thread:\n\n let result = futures::executor::block_on(joined_future);\n\n\n\n println!(\"Result of the joined future: {:?}\", result);\n\n}\n\n\n\n\n\n// Function can be made asynchronous (which turns their return type into a future)\n\n// by using the `async` keyword:\n\nasync fn async_function() {\n\n println!(\"I'm an async function.\");\n\n}\n\n\n", "file_path": "Distributed systems/dslab04/examples/futures.rs", "rank": 89, "score": 124639.16744139243 }, { "content": "fn main() {\n\n immutable_data_is_sync();\n\n immutable_mutex_reference_grants_mutable_access();\n\n atomic_reference();\n\n conditional_variable();\n\n atomic_bool();\n\n}\n", "file_path": "Distributed systems/dslab02/examples/shared_memory.rs", "rank": 90, "score": 124639.16744139243 }, { "content": "#[derive(Clone)]\n\nstruct Ball {}\n\n\n", "file_path": "Distributed systems/dsassignment1/public-tests/tests/executors.rs", "rank": 91, "score": 123613.40091235942 }, { "content": "#[derive(Clone)]\n\nstruct Init {\n\n target: ModuleRef<PingPong>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handler<Init> for PingPong {\n\n async fn handle(&mut self, msg: Init) {\n\n self.other = Some(msg.target);\n\n if self.first {\n\n self.other.as_ref().unwrap().send(Ball {}).await;\n\n }\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment1/public-tests/tests/executors.rs", "rank": 92, "score": 123613.40091235942 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct ChattyInteger {\n\n num: i32,\n\n}\n\n\n\n// An implementation of the Add trait for the ChattyInteger struct\n\n// (the Add trait defines implementation of the addition operator `+`):\n\nimpl Add for ChattyInteger {\n\n // Traits can have not only associated methods, but also associated types:\n\n type Output = Self; // Here Self refers to ChattyInteger.\n\n\n\n // A method:\n\n fn add(self, other: Self) -> Self {\n\n println!(\"Adding...\");\n\n\n\n // Create (and return from the method) a new struct:\n\n Self {\n\n num: self.num + other.num,\n\n }\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 93, "score": 123613.28947859317 }, { "content": "struct Timeout;\n\n\n\nasync fn set_timer(\n\n system: &mut System,\n\n timeout_callback: Pin<Box<dyn Future<Output = ()> + Send>>,\n\n duration: Duration,\n\n) -> ModuleRef<Timer> {\n\n let timer = system.register_module(Timer::new(timeout_callback)).await;\n\n system.request_tick(&timer, duration).await;\n\n timer\n\n}\n\n\n\n#[tokio::test]\n\n#[timeout(300)]\n\nasync fn second_tick_arrives_after_correct_interval() {\n\n let mut sys = System::new().await;\n\n let (timeout_sender, timeout_receiver) = unbounded::<Timeout>();\n\n let timeout_interval = Duration::from_millis(50);\n\n\n\n let start_instant = Instant::now();\n", "file_path": "Distributed systems/dsassignment1/public-tests/tests/executors.rs", "rank": 94, "score": 123608.50065626213 }, { "content": "struct Timer {\n\n first_tick_received: bool,\n\n timeout_callback: Option<Pin<Box<dyn Future<Output = ()> + Send>>>,\n\n}\n\n\n\nimpl Timer {\n\n fn new(timeout_callback: Pin<Box<dyn Future<Output = ()> + Send>>) -> Self {\n\n Self {\n\n first_tick_received: false,\n\n timeout_callback: Some(timeout_callback),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handler<Tick> for Timer {\n\n async fn handle(&mut self, _msg: Tick) {\n\n if !self.first_tick_received {\n\n self.first_tick_received = true;\n\n } else {\n\n match self.timeout_callback.take() {\n\n Some(callback) => callback.await,\n\n None => (),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "Distributed systems/dsassignment1/public-tests/tests/executors.rs", "rank": 95, "score": 123608.50065626213 }, { "content": "// The combination `Arc<Mutex<T>>` is very useful for sharing some data\n\n// between multiple threads in Rust:\n\nfn atomic_reference() {\n\n let send_and_sync = Arc::new(Mutex::new(vec![1, 2, 3]));\n\n let cloned = send_and_sync.clone();\n\n\n\n let thread = spawn(move || {\n\n let mut guard = cloned.lock().unwrap();\n\n let data = guard.deref_mut();\n\n data.push(4);\n\n });\n\n\n\n // Won't compile, `cloned` was moved to the closure:\n\n // cloned.lock().unwrap().push(5);\n\n\n\n // But the other reference can still be used:\n\n send_and_sync.lock().unwrap().push(5);\n\n\n\n thread.join().unwrap();\n\n}\n\n\n", "file_path": "Distributed systems/dslab02/examples/shared_memory.rs", "rank": 96, "score": 123086.4430984567 }, { "content": "fn references_borrowing() {\n\n let mut v: Vec<u8> = vec![0, 1];\n\n\n\n print_by_reference(&v);\n\n // Above function call performs immutable borrowing.\n\n\n\n print_by_slice(&v[0..2]);\n\n // Above function call performs immutable borrowing\n\n // of the range [0th element, 2nd element) of the vector.\n\n\n\n // Multiple immutable references are safe:\n\n let v_imm_ref_1 = &v;\n\n let v_imm_ref_2 = &v;\n\n\n\n // Won't compile, there are already immutable references:\n\n // let v_mut_ref = &mut v;\n\n\n\n print_by_reference(v_imm_ref_2);\n\n print_by_reference(v_imm_ref_1);\n\n\n\n // The above immutable references are not used below so it is now safe\n\n // to have a mutable reference:\n\n let v_mut_ref = &mut v;\n\n add_by_mut_reference(v_mut_ref);\n\n print_by_reference(v_mut_ref);\n\n}\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 97, "score": 123081.97855117344 }, { "content": "/// A simple function.\n\n///\n\n/// Note that doc comments in Rust start with `///` and support **Markdown**!\n\n/// They can be then easily converted (by calling `cargo doc`) to a website, just\n\n/// like [The Rust Standard Library Docs](https://doc.rust-lang.org/std/index.html).\n\n/// Moreover, the doc comments may include examples which can be then\n\n/// automatically tested!\n\nfn simple_function() {\n\n // println! is a macro for printing:\n\n println!(\"Hello world!\")\n\n\n\n // No semicolon after the last expression of the function, so this value\n\n // (the result of the macro call) is returned from the function.\n\n}\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 98, "score": 123081.97855117344 }, { "content": "fn ownership_functions() {\n\n let mut v: Vec<u8> = vec![0, 1];\n\n\n\n v = print_by_value_and_return(v);\n\n // Above vector is moved out of the variable to the function. Then it is\n\n // returned by the function and reassigned (moved) to the variable.\n\n\n\n print_by_value(v);\n\n // Above vector is moved out of the variable to the function.\n\n\n\n // Won't compile, `v` no longer owns the vector:\n\n // println!(\"Vector: {:?}\", v);\n\n}\n\n\n", "file_path": "Distributed systems/dslab01/examples/rust_overview.rs", "rank": 99, "score": 123081.97855117344 } ]
Rust
src/main.rs
belltoy/raindrop
8e26118026d41da74e899dd1d9af0000301253f5
use std::io::BufRead; use std::path::PathBuf; use glob::glob; use structopt::StructOpt; use log::{debug, info, error}; use pretty_env_logger::env_logger as logger; mod exhaust; mod db; #[derive(StructOpt, Debug)] struct Args { #[structopt(short, long, help = "DONOT execute SQL statements")] check: bool, #[structopt(short, long, help = "Input paths", required(true))] inputs: Vec<String>, #[structopt(short, long, help = "Execute or not")] execute: bool, #[structopt(short, long, help = "MySQL URL, example: `mysql://user:password@host:port/db_name`", required_if("execute", "true"))] mysql: Option<String>, } fn main() -> Result<(), String> { logger::from_env(logger::Env::default().default_filter_or("info")).init(); let args = Args::from_args(); if args.execute && args.mysql.is_none() { return Err("Require mysql URL".into()); } let inputs = read_files(&args.inputs)?; let filtered: Vec<_> = inputs.iter() .filter(|(file, p)| { if p.len() == 0 { info!("Filter out empty file: {:?}", file); false } else { true } }) .enumerate() .map(|(no, (file, p))| { (no, file, p) }).collect(); let input_contents: Vec<_> = filtered.iter().map(|(no, _file, p)| { p.iter().map(|s| (no, s.as_str())).collect::<Vec<_>>() }).collect(); let input_files: Vec<_> = filtered.iter().map(|(_no, file, _p)| file).collect(); debug!("input files: {:?}", input_files); assert_eq!(input_files.len(), input_contents.len()); let input_contents: Vec<_> = (&input_contents[..]).iter().map(|p| &p[..]).collect(); let outputs = exhaust::shuffle(&input_contents[..]); outputs.iter().enumerate().for_each(|(i, case)| { debug!("-- Case: {}", i); case.iter().for_each(|(idx, line)| { let idx = **idx; debug!(" {} -- [client: {}] [from file: {:?}]", line, idx, input_files[idx]); }); }); if args.execute { execute_sqls(args.mysql.as_ref().unwrap(), &input_files, &outputs)?; } Ok(()) } fn read_files(inputs: &Vec<String>) -> Result<Vec<(PathBuf, Vec<String>)>, String> { let inputs_paths = inputs.iter().flat_map(|p| { let mut result = glob(p).unwrap().peekable(); if result.peek().is_none() { error!("Invalid input argument: {}", p); } result }); let mut unique_inputs = Vec::new(); inputs_paths .flat_map(|p| { p.map_err(|e| format!("glob pattern error: {}", e)) .and_then(|p| { if p.is_dir() { p.read_dir().map_err(|e| format!("read_dir error: {}", e)) .map(|op: std::fs::ReadDir| { op.map(|p| { p .map(|dir_entry| dir_entry.path()) .map_err(|e| format!("IO error during read dir iteration: {}", e)) }) .filter(|p| { if let Ok(p) = p { !p.is_dir() } else { true } }).collect::<Vec<_>>() }) } else { Ok(vec![Ok(p)]) } }) }) .flat_map(|p| p) .for_each(|p| { if !unique_inputs.contains(&p) { unique_inputs.push(p); } }); let (inputs, errors): (Vec<Result<_, _>>, _) = unique_inputs.into_iter().partition(Result::is_ok); info!("inputs files: {:?}", inputs); if errors.len() > 0 { return Err(format!("{:?}", errors)); } let inputs = inputs.into_iter().map(Result::unwrap); let (inputs_contents, io_errors): (Vec<_>, _) = inputs.map(|input_file| -> Result<_, String> { debug!("Open file: {:?}", input_file); let f = std::fs::File::open(&input_file).map_err(|e| { format!("open file {} error: {}", input_file.to_str().unwrap_or("unknown path"), e) })?; let reader = std::io::BufReader::new(f); let lines: Vec<_> = reader.lines().map(|line| { let line = line.map_err(|e| format!("IO Error when reading lines: {}", e)); line }).collect(); Ok((input_file, lines)) }).partition(Result::is_ok); let inputs_contents = inputs_contents.into_iter().map(Result::unwrap); let io_errors: Vec<_> = io_errors.into_iter().map(Result::unwrap_err).collect(); if io_errors.len() > 0 { let e = &io_errors[0]; return Err(e.into()); } let inputs = inputs_contents.map(|(file_name, lines_result)| { let lines: Vec<_> = lines_result.into_iter() .map(Result::unwrap) .map(|line| line.trim().to_owned()) .filter(|line| line.len() > 0) .collect(); (file_name, lines) }).collect::<Vec<_>>(); Ok(inputs) } fn execute_sqls<I: std::fmt::Debug>(url: &str, input_files: &[I], outputs: &[Vec<(&usize, &str)>]) -> Result<(), String> { let pool = db::init_clients(&url, input_files.len()).map_err(|e| { format!("db error: {:?}", e) })?; let (clients, errors): (Vec<_>, _) = input_files.iter().map(|file| { let file_path = format!("{:?}", file); pool.get_conn().map(|p| (p, file_path)) }) .partition(Result::is_ok); let mut clients: Vec<_> = clients.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); if errors.len() > 0 { return Err(format!("get connection error: {:?}", errors[0])); } db::execute_sqls(&mut clients[..], &outputs[..]).map_err(|e| { format!("mysql error: {:?}", e) })?; Ok(()) }
use std::io::BufRead; use std::path::PathBuf; use glob::glob; use structopt::StructOpt; use log::{debug, info, error}; use pretty_env_logger::env_logger as logger; mod exhaust; mod db; #[derive(StructOpt, Debug)] struct Args { #[structopt(short, long, help = "DONOT execute SQL statements")] check: bool, #[structopt(short, long, help = "Input paths", required(true))] inputs: Vec<String>, #[structopt(short, long, help = "Execute or not")] execute: bool, #[structopt(short, long, help = "MySQL URL, example: `mysql://user:password@host:port/db_name`", required_if("execute", "true"))] mysql: Option<String>, } fn main() -> Result<(), String> { logger::from_env(logger::Env::default().default_filter_or("info")).init(); let args = Args::from_args(); if args.execute && args.mysql.is_none() { return Err("Require mysql URL".into()); } let inputs = read_files(&args.inputs)?; let filtered: Vec<_> = inputs.iter() .filter(|(file, p)| { if p.len() == 0 { info!("Filter out empty file: {:?}", file); false } else { true } }) .enumerate() .map(|(no, (file, p))| { (no, file, p) }).collect(); let input_contents: Vec<_> = filtered.iter().map(|(no, _file, p)| { p.iter().map(|s| (no, s.as_str())).collect::<Vec<_>>() }).collect(); let input_files: Vec<_> = filtered.iter().map(|(_no, file, _p)| file).collect(); debug!("input files: {:
e(); if result.peek().is_none() { error!("Invalid input argument: {}", p); } result }); let mut unique_inputs = Vec::new(); inputs_paths .flat_map(|p| { p.map_err(|e| format!("glob pattern error: {}", e)) .and_then(|p| { if p.is_dir() { p.read_dir().map_err(|e| format!("read_dir error: {}", e)) .map(|op: std::fs::ReadDir| { op.map(|p| { p .map(|dir_entry| dir_entry.path()) .map_err(|e| format!("IO error during read dir iteration: {}", e)) }) .filter(|p| { if let Ok(p) = p { !p.is_dir() } else { true } }).collect::<Vec<_>>() }) } else { Ok(vec![Ok(p)]) } }) }) .flat_map(|p| p) .for_each(|p| { if !unique_inputs.contains(&p) { unique_inputs.push(p); } }); let (inputs, errors): (Vec<Result<_, _>>, _) = unique_inputs.into_iter().partition(Result::is_ok); info!("inputs files: {:?}", inputs); if errors.len() > 0 { return Err(format!("{:?}", errors)); } let inputs = inputs.into_iter().map(Result::unwrap); let (inputs_contents, io_errors): (Vec<_>, _) = inputs.map(|input_file| -> Result<_, String> { debug!("Open file: {:?}", input_file); let f = std::fs::File::open(&input_file).map_err(|e| { format!("open file {} error: {}", input_file.to_str().unwrap_or("unknown path"), e) })?; let reader = std::io::BufReader::new(f); let lines: Vec<_> = reader.lines().map(|line| { let line = line.map_err(|e| format!("IO Error when reading lines: {}", e)); line }).collect(); Ok((input_file, lines)) }).partition(Result::is_ok); let inputs_contents = inputs_contents.into_iter().map(Result::unwrap); let io_errors: Vec<_> = io_errors.into_iter().map(Result::unwrap_err).collect(); if io_errors.len() > 0 { let e = &io_errors[0]; return Err(e.into()); } let inputs = inputs_contents.map(|(file_name, lines_result)| { let lines: Vec<_> = lines_result.into_iter() .map(Result::unwrap) .map(|line| line.trim().to_owned()) .filter(|line| line.len() > 0) .collect(); (file_name, lines) }).collect::<Vec<_>>(); Ok(inputs) } fn execute_sqls<I: std::fmt::Debug>(url: &str, input_files: &[I], outputs: &[Vec<(&usize, &str)>]) -> Result<(), String> { let pool = db::init_clients(&url, input_files.len()).map_err(|e| { format!("db error: {:?}", e) })?; let (clients, errors): (Vec<_>, _) = input_files.iter().map(|file| { let file_path = format!("{:?}", file); pool.get_conn().map(|p| (p, file_path)) }) .partition(Result::is_ok); let mut clients: Vec<_> = clients.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); if errors.len() > 0 { return Err(format!("get connection error: {:?}", errors[0])); } db::execute_sqls(&mut clients[..], &outputs[..]).map_err(|e| { format!("mysql error: {:?}", e) })?; Ok(()) }
?}", input_files); assert_eq!(input_files.len(), input_contents.len()); let input_contents: Vec<_> = (&input_contents[..]).iter().map(|p| &p[..]).collect(); let outputs = exhaust::shuffle(&input_contents[..]); outputs.iter().enumerate().for_each(|(i, case)| { debug!("-- Case: {}", i); case.iter().for_each(|(idx, line)| { let idx = **idx; debug!(" {} -- [client: {}] [from file: {:?}]", line, idx, input_files[idx]); }); }); if args.execute { execute_sqls(args.mysql.as_ref().unwrap(), &input_files, &outputs)?; } Ok(()) } fn read_files(inputs: &Vec<String>) -> Result<Vec<(PathBuf, Vec<String>)>, String> { let inputs_paths = inputs.iter().flat_map(|p| { let mut result = glob(p).unwrap().peekabl
random
[ { "content": "pub fn init_clients(url: &str, size: usize) -> MySQLResult<Pool> {\n\n let opts = Opts::from_url(url)?;\n\n let pool = Pool::new_manual(size, size, opts)?;\n\n Ok(pool)\n\n}\n\n\n", "file_path": "src/db.rs", "rank": 3, "score": 74685.11859225594 }, { "content": "/// Here not use async client, because this tool will simulate multiple clients and execute sql\n\n/// statements in its' own order in the original file. Async execution will break the order.\n\npub fn execute_sqls(clients: &mut[(PooledConn, String)], sqls: &[Vec<(&usize, &str)>]) -> MySQLResult<()> {\n\n let cases_count = sqls.iter().enumerate().map(|(i, case)| {\n\n info!(\"Executing case: {}\", i);\n\n let (success, failed): (Vec<_>, _) = case.iter().map(|(idx, sql)| {\n\n let idx = **idx;\n\n if let Err(e) = clients[idx].0.query_drop(sql) {\n\n return Err(format!(\"Execute sql `{}` error in file: {}, error: {:?}\", sql, clients[idx].1, e));\n\n }\n\n Ok(())\n\n })\n\n .partition(Result::is_ok);\n\n\n\n let failed_count = failed.into_iter().map(Result::unwrap_err).inspect(|e| {\n\n error!(\"{}\", e);\n\n }).count();\n\n\n\n if failed_count > 0 {\n\n error!(\"Executed errors in case: {}, error count: {:?}, success count: {}\", i, failed_count, success.iter().count());\n\n }\n\n }).count();\n\n\n\n info!(\"Executed {} cases\", cases_count);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/db.rs", "rank": 4, "score": 73825.6949046939 }, { "content": "/// Combine all slices in its own order, return all possible cases.\n\npub fn shuffle<T: Copy + std::fmt::Debug>(inputs: &[&[T]]) -> Vec<Vec<T>> {\n\n if inputs.len() == 0 {\n\n return Vec::new();\n\n }\n\n let mut results: Vec<Vec<T>> = Vec::new();\n\n let mut result: Vec<T> = Vec::new();\n\n\n\n let size = inputs.iter().map(|p| {\n\n (&p).iter().count()\n\n }).sum::<usize>();\n\n\n\n let mut used = vec![0; inputs.len()];\n\n backtracking(&mut results, &mut result, inputs, size, &mut used);\n\n results\n\n}\n\n\n", "file_path": "src/exhaust.rs", "rank": 6, "score": 56642.1634967859 }, { "content": "\n\n\n\n\n\n\n\n\n", "file_path": "tests/sql/empty.sql", "rank": 7, "score": 40832.21767266146 }, { "content": "fn backtracking<T: Copy>(\n\n results: &mut Vec<Vec<T>>,\n\n result: &mut Vec<T>,\n\n inputs: &[&[T]],\n\n size: usize,\n\n used: &mut Vec<usize>,\n\n) {\n\n if result.len() == size {\n\n results.push(result.to_vec());\n\n return;\n\n }\n\n\n\n for k in 0..used.len() {\n\n\n\n if used[k] >= inputs[k].len() {\n\n continue;\n\n }\n\n\n\n result.push(inputs[k][used[k]]);\n\n used[k] += 1;\n", "file_path": "src/exhaust.rs", "rank": 8, "score": 40184.476047820215 }, { "content": "UPDATE X SET a = 6 WHERE id = 2;\n", "file_path": "tests/sql/a1.sql", "rank": 9, "score": 20137.17514940732 }, { "content": "UPDATE X SET a = 8 WHERE id = 8;\n", "file_path": "tests/sql/a2.sql", "rank": 10, "score": 20137.17514940732 }, { "content": "UPDATE X SET a = 5 WHERE id = 1;\n", "file_path": "tests/sql/a1.sql", "rank": 11, "score": 20137.17514940732 }, { "content": "use log::{error, info};\n\nuse mysql::{Opts, Pool, PooledConn, Result as MySQLResult};\n\nuse mysql::prelude::Queryable;\n\n\n", "file_path": "src/db.rs", "rank": 12, "score": 19080.14077752204 }, { "content": " backtracking(results, result, inputs, size, used);\n\n result.pop();\n\n used[k] -= 1;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n /// You can prove that the amount(P) cases of inputs[a1, a2, a3, ..., an], where ax is an array/slice,\n\n /// P(a1, a2, a3, .., an) = (a1 + a2 + a3 + .. + an)! / (a1! * a2! * ... * an!)\n\n fn count_shuffled<T: std::fmt::Debug>(inputs: &[&[T]]) -> usize {\n\n if inputs.len() == 0 {\n\n return 0;\n\n }\n\n\n\n let size: usize = inputs.iter().map(|v| v.iter().count()).sum();\n\n let numerator = (1..=size).fold(1, |acc, n| acc * n);\n\n let denominator = inputs.iter().map(|v| {\n\n if v.len() <= 1 {\n", "file_path": "src/exhaust.rs", "rank": 13, "score": 17683.266076102387 }, { "content": " vec![5, 8, 6, 1, 2],\n\n\n\n vec![8, 1, 6, 2, 5],\n\n vec![8, 1, 6, 5, 2],\n\n vec![8, 1, 5, 6, 2],\n\n vec![8, 5, 1, 6, 2],\n\n vec![5, 8, 1, 6, 2],\n\n\n\n vec![8, 1, 2, 6, 5],\n\n vec![8, 1, 2, 5, 6],\n\n vec![8, 1, 5, 2, 6],\n\n vec![8, 5, 1, 2, 6],\n\n vec![5, 8, 1, 2, 6],\n\n ]),\n\n ];\n\n\n\n for (input, mut check) in cases {\n\n let mut results = super::shuffle(&input);\n\n\n\n let refs: Vec<_> = input.as_slice().iter().map(|v| &v[..]).collect();\n", "file_path": "src/exhaust.rs", "rank": 14, "score": 17681.753023387057 }, { "content": " assert_eq!(count_shuffled(&refs[..]), check.len(), \"testing amount cases of results\");\n\n\n\n let slice = results.as_mut_slice();\n\n slice.sort();\n\n\n\n let check = check.as_mut_slice();\n\n check.sort();\n\n itertools::assert_equal(slice, check);\n\n }\n\n }\n\n}\n", "file_path": "src/exhaust.rs", "rank": 15, "score": 17680.533377201307 }, { "content": "//! Implementation of shuffle multiple slices in its own order\n\n//!\n\n//! This algorithm use backtracking, but loop will be better.\n\n\n\n/// Combine all slices in its own order, return all possible cases.\n", "file_path": "src/exhaust.rs", "rank": 16, "score": 17678.14254324906 }, { "content": " return 1;\n\n }\n\n (1..=v.len()).fold(1, |acc, n| acc * n)\n\n }).fold(1, |acc, n| acc * n);\n\n\n\n return numerator / denominator;\n\n }\n\n\n\n #[test]\n\n fn exhaust() {\n\n let cases = vec![\n\n (vec![], vec![]),\n\n\n\n (vec![&[1, 2][..]], vec![vec![1, 2]]),\n\n\n\n (vec![&[1, 2][..], &[][..]], vec![vec![1, 2]]),\n\n\n\n (vec![&[1][..], &[8][..]], vec![\n\n vec![1, 8],\n\n vec![8, 1],\n", "file_path": "src/exhaust.rs", "rank": 17, "score": 17677.933279407054 }, { "content": " vec![1, 2, 5, 8, 6],\n\n vec![1, 5, 2, 8, 6],\n\n vec![5, 1, 2, 8, 6],\n\n\n\n vec![1, 8, 6, 2, 5],\n\n vec![1, 8, 6, 5, 2],\n\n vec![1, 8, 5, 6, 2],\n\n vec![1, 5, 8, 6, 2],\n\n vec![5, 1, 8, 6, 2],\n\n\n\n vec![1, 8, 2, 6, 5],\n\n vec![1, 8, 2, 5, 6],\n\n vec![1, 8, 5, 2, 6],\n\n vec![1, 5, 8, 2, 6],\n\n vec![5, 1, 8, 2, 6],\n\n\n\n vec![8, 6, 1, 2, 5],\n\n vec![8, 6, 1, 5, 2],\n\n vec![8, 6, 5, 1, 2],\n\n vec![8, 5, 6, 1, 2],\n", "file_path": "src/exhaust.rs", "rank": 18, "score": 17676.75394717541 }, { "content": " ]),\n\n\n\n (vec![&[1, 2][..], &[8][..]], vec![\n\n vec![1, 2, 8],\n\n vec![1, 8, 2],\n\n vec![8, 1, 2],\n\n ]),\n\n\n\n (vec![&[1, 2][..], &[8, 6][..]], vec![\n\n vec![1, 2, 8, 6],\n\n vec![1, 8, 6, 2],\n\n vec![1, 8, 2, 6],\n\n vec![8, 6, 1, 2],\n\n vec![8, 1, 6, 2],\n\n vec![8, 1, 2, 6],\n\n ]),\n\n\n\n (vec![&[1, 2][..], &[5][..], &[8, 6][..]], vec![\n\n vec![1, 2, 8, 6, 5],\n\n vec![1, 2, 8, 5, 6],\n", "file_path": "src/exhaust.rs", "rank": 19, "score": 17676.75394717541 }, { "content": "SQL 乱序执行\n\n============\n\n\n\n## README\n\n\n\n目前实现:假设文件中的 SQL 语言为一行一句。\n\n\n\nTODOs:\n\n\n\n- [ ] 解析 SQL 语句,支持 SQL 注释\n\n\n\n\n\n### Build & Run\n\n\n\n```sh\n\n# Build\n\ncargo build\n\n\n\n# Run unit tests\n\ncargo test\n\n\n\n# Show helps\n\ncargo run -- -h\n\n\n\nraindrop 0.1.0\n\n\n\nUSAGE:\n\n raindrop [FLAGS] [OPTIONS] --inputs <inputs>...\n\n\n\nFLAGS:\n\n -c, --check DONOT execute SQL statements\n\n -e, --execute Execute or not\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -i, --inputs <inputs>... Input paths\n\n -m, --mysql <mysql> MySQL URL\n\n\n\n# Run without excution\n\nenv RUST_LOG=debug cargo run -- -i tests/sql\n\n\n\n# Run with excution\n\nenv RUST_LOG=debug cargo run -- -i tests/sql -e --m 'mysql://root:123456@localhost:3306/db_name'\n\n\n\n```\n\n\n\n## 描述\n\n\n\nTiDB SQL 乱序执行框架\n\n\n\n在测试 TiDB 事务时,需要多个客户端乱序执行 SQL。\n\n\n\n以两个客户端的情况为例。首先启动两个 TiDB 客户端,客户端分别读取对应的 SQL 文件后,以交错顺序执行 SQL 文件中的语句。要求穷举所有执行顺序。\n\n\n", "file_path": "README.md", "rank": 27, "score": 11.461657829884189 }, { "content": "## Example\n\n\n\n给定 `sql1.txt`、`sql2.txt`, 内容如下:\n\n\n\n```sql\n\n-- sql1.txt:\n\nupdate X set a=5 where id=1;\n\nupdate X set a=6 where id=2;\n\n```\n\n```sql\n\n-- sql2.txt\n\nupdate X set a=8 where id=8\n\n```\n\n\n\n启动客户端 client1 读取 `sql1.txt`、client2 读取 `sql2.txt`。\n\n\n\n假设 client1 先执行第一条 sql 语句,client2 执行第一条,client1 再执行第二条。则执行顺序是:\n\n\n\n```\n\nclient1:update X set a=5 where id=1;\n\nclient1:update X set a=6 where id=2;\n\nclient2:update X set a=8 where id=8;\n\n```\n\n\n\n对这个 case,穷举所有可能,意味着执行顺序必须包含以下三种情况:\n\n\n\n- 情况 1:\n\n\n\n client1:update X set a=5 where id=1;\n\n client1:update X set a=6 where id=2;\n\n client2:update X set a=8 where id=8;\n\n\n\n- 情况 2:\n\n\n\n client1:update X set a=5 where id=1;\n\n client2:update X set a=8 where id=8;\n\n client1:update X set a=6 where id=2;\n\n\n\n- 情况 3:\n\n\n\n client2:update X set a=8 where id=8;\n\n client1:update X set a=5 where id=1;\n\n client1:update X set a=6 where id=2;\n\n\n\n## 要求\n\n\n\n1. [x] 写程序模拟多个客户端实现上述功能\n\n2. [x] 良好的代码设计,可读性,可维护性,可扩展性。\n\n3. [x] 以上可以在单机实现,用 VM 或者 Docker 启动 TiDB 集群不限\n\n\n\n\n\n## License\n\n\n\nThis project is licensed under the [MIT License](LICENSE).\n", "file_path": "README.md", "rank": 28, "score": 1.6735638442319307 } ]
Rust
alvr/experiments/client/src/xr/openxr/interaction.rs
AndresGroselj/ALVR
679f3a1cdaeff99322fb94560208949d75edef83
use super::{convert, SceneButtons, XrContext, XrHandPoseInput}; use crate::{ xr::{XrActionType, XrActionValue, XrHandTrackingInput, XrProfileDesc}, ViewConfig, }; use alvr_common::{prelude::*, MotionData}; use alvr_session::TrackingSpace; use openxr as xr; use std::collections::HashMap; const OCULUS_PROFILE: &str = "/interaction_profiles/oculus/touch_controller"; const SELECT_ACTION_NAME: &str = "alvr_scene_select"; const OCULUS_SELECT_PATHS: &[&str] = &[ "/user/hand/left/input/x/click", "/user/hand/right/input/a/click", "/user/hand/left/input/trigger", "/user/hand/right/input/trigger", ]; const MENU_ACTION_NAME: &str = "alvr_scene_menu"; const OCULUS_MENU_PATHS: &[&str] = &["/user/hand/left/input/menu/click"]; enum OpenxrButtonAction { Binary(xr::Action<bool>), Scalar(xr::Action<f32>), } struct HandTrackingContext { tracker: xr::HandTracker, target_ray_action: xr::Action<xr::Posef>, target_ray_space: xr::Space, } pub struct HandInteractionContext { grip_action: xr::Action<xr::Posef>, grip_space: xr::Space, hand_tracking_context: Option<HandTrackingContext>, vibration_action: xr::Action<xr::Haptic>, } pub struct OpenxrInteractionContext { session: xr::Session<xr::Vulkan>, action_set: xr::ActionSet, scene_select_action: xr::Action<bool>, scene_menu_action: xr::Action<bool>, streaming_button_actions: HashMap<String, OpenxrButtonAction>, pub reference_space: xr::Space, pub left_hand_interaction: HandInteractionContext, pub right_hand_interaction: HandInteractionContext, } impl OpenxrInteractionContext { fn get_hand_interaction( xr_context: &XrContext, session: xr::Session<xr::Vulkan>, action_set: &xr::ActionSet, hand: xr::Hand, ) -> StrResult<HandInteractionContext> { let hand_str = if hand == xr::Hand::LEFT { "alvr_left" } else { "alvr_right" }; let grip_action_name = format!("{}_grip", hand_str); let grip_action = trace_err!(action_set.create_action(&grip_action_name, &grip_action_name, &[]))?; let grip_space = trace_err!(grip_action.create_space( session.clone(), xr::Path::NULL, xr::Posef::IDENTITY ))?; let hand_tracking_context = if trace_err!(xr_context .instance .supports_hand_tracking(xr_context.system))? { let tracker = trace_err!(session.create_hand_tracker(hand))?; let target_ray_action_name = format!("{}_aim", hand_str); let target_ray_action = trace_err!(action_set.create_action( &target_ray_action_name, &target_ray_action_name, &[] ))?; let target_ray_space = trace_err!(target_ray_action.create_space( session, xr::Path::NULL, xr::Posef::IDENTITY ))?; Some(HandTrackingContext { tracker, target_ray_action, target_ray_space, }) } else { None }; let vibration_action_name = format!("{}_haptics", hand_str); let vibration_action = trace_err!(action_set.create_action( &vibration_action_name, &vibration_action_name, &[] ))?; Ok(HandInteractionContext { grip_action, grip_space, hand_tracking_context, vibration_action, }) } pub fn new( xr_context: &XrContext, session: xr::Session<xr::Vulkan>, stream_action_types: &[(String, XrActionType)], stream_profile_descs: Vec<XrProfileDesc>, ) -> StrResult<Self> { let action_set = trace_err!(xr_context .instance .create_action_set("alvr_bindings", "ALVR bindings", 0))?; let mut button_actions = HashMap::new(); button_actions.insert( SELECT_ACTION_NAME.to_owned(), OpenxrButtonAction::Binary(trace_err!(action_set.create_action( SELECT_ACTION_NAME, SELECT_ACTION_NAME, &[] ))?), ); button_actions.insert( MENU_ACTION_NAME.to_owned(), OpenxrButtonAction::Binary(trace_err!(action_set.create_action( MENU_ACTION_NAME, MENU_ACTION_NAME, &[] ))?), ); for (name, action_type) in stream_action_types { match action_type { XrActionType::Binary => button_actions.insert( name.clone(), OpenxrButtonAction::Binary(trace_err!(action_set.create_action( name, name, &[] ))?), ), XrActionType::Scalar => button_actions.insert( name.clone(), OpenxrButtonAction::Scalar(trace_err!(action_set.create_action( name, name, &[] ))?), ), }; } let left_hand_interaction = Self::get_hand_interaction(xr_context, session.clone(), &action_set, xr::Hand::LEFT)?; let right_hand_interaction = Self::get_hand_interaction(xr_context, session.clone(), &action_set, xr::Hand::RIGHT)?; let mut profile_descs = vec![]; for mut profile in stream_profile_descs { if profile.profile == OCULUS_PROFILE { profile.tracked = true; profile.has_haptics = true; for path in OCULUS_SELECT_PATHS { profile .button_bindings .push((SELECT_ACTION_NAME.to_owned(), (*path).to_owned())); } for path in OCULUS_MENU_PATHS { profile .button_bindings .push((MENU_ACTION_NAME.to_owned(), (*path).to_owned())); } } profile_descs.push(profile); } if profile_descs .iter() .any(|profile| profile.profile == OCULUS_PROFILE) { let mut button_bindings = vec![]; for path in OCULUS_SELECT_PATHS { button_bindings.push((SELECT_ACTION_NAME.to_owned(), (*path).to_owned())); } for path in OCULUS_MENU_PATHS { button_bindings.push((MENU_ACTION_NAME.to_owned(), (*path).to_owned())); } profile_descs.push(XrProfileDesc { profile: OCULUS_PROFILE.to_owned(), button_bindings, tracked: true, has_haptics: true, }) } for profile in profile_descs { let profile_path = trace_err!(xr_context.instance.string_to_path(&profile.profile))?; let mut bindings = vec![]; for (action_name, path_string) in &profile.button_bindings { let action = if let Some(res) = button_actions.get(action_name) { res } else { return fmt_e!("Action {} not defined", action_name); }; let path = trace_err!(xr_context.instance.string_to_path(path_string))?; match action { OpenxrButtonAction::Binary(action) => { bindings.push(xr::Binding::new(action, path)) } OpenxrButtonAction::Scalar(action) => { bindings.push(xr::Binding::new(action, path)) } } } if profile.tracked { bindings.push(xr::Binding::new( &left_hand_interaction.grip_action, trace_err!(xr_context .instance .string_to_path("/user/hand/left/input/grip/pose"))?, )); if let Some(hand_tracking_context) = &left_hand_interaction.hand_tracking_context { bindings.push(xr::Binding::new( &hand_tracking_context.target_ray_action, trace_err!(xr_context .instance .string_to_path("/user/hand/left/input/aim/pose"))?, )); } bindings.push(xr::Binding::new( &right_hand_interaction.grip_action, trace_err!(xr_context .instance .string_to_path("/user/hand/right/input/grip/pose"))?, )); if let Some(hand_tracking_context) = &right_hand_interaction.hand_tracking_context { bindings.push(xr::Binding::new( &hand_tracking_context.target_ray_action, trace_err!(xr_context .instance .string_to_path("/user/hand/right/input/aim/pose"))?, )); } } if profile.has_haptics { bindings.push(xr::Binding::new( &left_hand_interaction.vibration_action, trace_err!(xr_context .instance .string_to_path("/user/hand/left/output/haptic"))?, )); bindings.push(xr::Binding::new( &right_hand_interaction.grip_action, trace_err!(xr_context .instance .string_to_path("/user/hand/right/output/haptic"))?, )); } xr_context .instance .suggest_interaction_profile_bindings(profile_path, &bindings) .ok(); } trace_err!(session.attach_action_sets(&[&action_set]))?; let reference_space = trace_err!(session .create_reference_space(xr::ReferenceSpaceType::STAGE, xr::Posef::IDENTITY) .or_else(|_| { session.create_reference_space( xr::ReferenceSpaceType::LOCAL, xr::Posef { orientation: xr::Quaternionf::IDENTITY, position: xr::Vector3f { x: 0.0, y: -1.5, z: 0.0, }, }, ) }))?; let scene_select_action = match button_actions.remove(SELECT_ACTION_NAME).unwrap() { OpenxrButtonAction::Binary(action) => action, _ => unreachable!(), }; let scene_menu_action = match button_actions.remove(MENU_ACTION_NAME).unwrap() { OpenxrButtonAction::Binary(action) => action, _ => unreachable!(), }; Ok(Self { session, action_set, scene_select_action, scene_menu_action, streaming_button_actions: button_actions, reference_space, left_hand_interaction, right_hand_interaction, }) } pub fn sync_input(&self) -> StrResult { trace_err!(self.session.sync_actions(&[(&self.action_set).into()])) } pub fn get_views( &self, view_configuration_type: xr::ViewConfigurationType, display_time: xr::Time, ) -> StrResult<Vec<ViewConfig>> { let (_, views) = trace_err!(self.session.locate_views( view_configuration_type, display_time, &self.reference_space ))?; Ok(views .into_iter() .map(|view| ViewConfig { orientation: convert::from_xr_orientation(view.pose.orientation), position: convert::from_xr_vec3(view.pose.position), fov: convert::from_xr_fov(view.fov), }) .collect()) } fn get_motion(location: xr::SpaceLocation, velocity: xr::SpaceVelocity) -> MotionData { MotionData { orientation: convert::from_xr_orientation(location.pose.orientation), position: convert::from_xr_vec3(location.pose.position), linear_velocity: velocity.linear_velocity.map(convert::from_xr_vec3), angular_velocity: velocity.angular_velocity.map(convert::from_xr_vec3), } } pub fn get_poses( &self, hand_interaction: &HandInteractionContext, display_time: xr::Time, ) -> StrResult<XrHandPoseInput> { let (grip_location, grip_velocity) = trace_err!(hand_interaction .grip_space .relate(&self.reference_space, display_time))?; let grip_motion = Self::get_motion(grip_location, grip_velocity); let hand_tracking_input = if let Some(ctx) = &hand_interaction.hand_tracking_context { let (target_ray_location, target_ray_velocity) = trace_err!(ctx .target_ray_space .relate(&self.reference_space, display_time))?; let target_ray_motion = Self::get_motion(target_ray_location, target_ray_velocity); if let Some((joint_locations, joint_velocities)) = trace_err!(self .reference_space .relate_hand_joints(&ctx.tracker, display_time))? { let skeleton_motion = joint_locations .iter() .zip(joint_velocities.iter()) .map(|(joint_location, joint_velocity)| MotionData { orientation: convert::from_xr_orientation(joint_location.pose.orientation), position: convert::from_xr_vec3(joint_location.pose.position), linear_velocity: joint_velocity .velocity_flags .contains(xr::SpaceVelocityFlags::LINEAR_VALID) .then(|| convert::from_xr_vec3(joint_velocity.linear_velocity)), angular_velocity: joint_velocity .velocity_flags .contains(xr::SpaceVelocityFlags::ANGULAR_VALID) .then(|| convert::from_xr_vec3(joint_velocity.angular_velocity)), }) .collect(); Some(XrHandTrackingInput { target_ray_motion, skeleton_motion, }) } else { None } } else { None }; Ok(XrHandPoseInput { grip_motion, hand_tracking_input, }) } pub fn get_scene_buttons(&self) -> StrResult<SceneButtons> { let select_state = trace_err!(self .scene_select_action .state(&self.session, xr::Path::NULL))?; let menu_state = trace_err!(self.scene_menu_action.state(&self.session, xr::Path::NULL))?; Ok(SceneButtons { select: select_state.current_state, menu: menu_state.current_state, }) } pub fn get_streming_buttons(&self) -> StrResult<HashMap<String, XrActionValue>> { let mut values = HashMap::new(); for (name, action) in &self.streaming_button_actions { match action { OpenxrButtonAction::Binary(action) => { values.insert( name.clone(), XrActionValue::Boolean( trace_err!(action.state(&self.session, xr::Path::NULL))?.current_state, ), ); } OpenxrButtonAction::Scalar(action) => { values.insert( name.clone(), XrActionValue::Scalar( trace_err!(action.state(&self.session, xr::Path::NULL))?.current_state, ), ); } } } Ok(values) } }
use super::{convert, SceneButtons, XrContext, XrHandPoseInput}; use crate::{ xr::{XrActionType, XrActionValue, XrHandTrackingInput, XrProfileDesc}, ViewConfig, }; use alvr_common::{prelude::*, MotionData}; use alvr_session::TrackingSpace; use openxr as xr; use std::collections::HashMap; const OCULUS_PROFILE: &str = "/interaction_profiles/oculus/touch_controller"; const SELECT_ACTION_NAME: &str = "alvr_scene_select"; const OCULUS_SELECT_PATHS: &[&str] = &[ "/user/hand/left/input/x/click", "/user/hand/right/input/a/click", "/user/hand/left/input/trigger", "/user/hand/right/input/trigger", ]; const MENU_ACTION_NAME: &str = "alvr_scene_menu"; const OCULUS_MENU_PATHS: &[&str] = &["/user/hand/left/input/menu/click"]; enum OpenxrButtonAction { Binary(xr::Action<bool>), Scalar(xr::Action<f32>), } struct HandTrackingContext { tracker: xr::HandTracker, target_ray_action: xr::Action<xr::Posef>, target_ray_space: xr::Space, } pub struct HandInteractionContext { grip_action: xr::Action<xr::Posef>, grip_space: xr::Space, hand_tracking_context: Option<HandTrackingContext>, vibration_action: xr::Action<xr::Haptic>, } pub struct OpenxrInteractionContext { session: xr::Session<xr::Vulkan>, action_set: xr::ActionSet, scene_select_action: xr::Action<bool>, scene_menu_action: xr::Action<bool>, streaming_button_actions: HashMap<String, OpenxrButtonAction>, pub reference_space: xr::Space, pub left_hand_interaction: HandInteractionContext, pub right_hand_interaction: HandInteractionContext, } impl OpenxrInteractionContext { fn get_hand_interaction( xr_context: &XrContext, session: xr::Session<xr::Vulkan>, action_set: &xr::ActionSet, hand: xr::Hand, ) -> StrResult<HandInteractionContext> { let hand_str = if hand == xr::Hand::LEFT { "alvr_left" } else { "alvr_right" }; let grip_action_name = format!("{}_grip", hand_str); let grip_action = trace_err!(action_set.create_action(&grip_action_name, &grip_action_name, &[]))?; let grip_space = trace_err!(grip_action.create_space( session.clone(), xr::Path::NULL, xr::Posef::IDENTITY ))?; let hand_tracking_context = if trace_err!(xr_context .instance .supports_hand_tracking(xr_context.system))? { let tracker = trace_err!(session.create_hand_tracker(hand))?; let target_ray_action_name = format!("{}_aim", hand_str);
let target_ray_space = trace_err!(target_ray_action.create_space( session, xr::Path::NULL, xr::Posef::IDENTITY ))?; Some(HandTrackingContext { tracker, target_ray_action, target_ray_space, }) } else { None }; let vibration_action_name = format!("{}_haptics", hand_str); let vibration_action = trace_err!(action_set.create_action( &vibration_action_name, &vibration_action_name, &[] ))?; Ok(HandInteractionContext { grip_action, grip_space, hand_tracking_context, vibration_action, }) } pub fn new( xr_context: &XrContext, session: xr::Session<xr::Vulkan>, stream_action_types: &[(String, XrActionType)], stream_profile_descs: Vec<XrProfileDesc>, ) -> StrResult<Self> { let action_set = trace_err!(xr_context .instance .create_action_set("alvr_bindings", "ALVR bindings", 0))?; let mut button_actions = HashMap::new(); button_actions.insert( SELECT_ACTION_NAME.to_owned(), OpenxrButtonAction::Binary(trace_err!(action_set.create_action( SELECT_ACTION_NAME, SELECT_ACTION_NAME, &[] ))?), ); button_actions.insert( MENU_ACTION_NAME.to_owned(), OpenxrButtonAction::Binary(trace_err!(action_set.create_action( MENU_ACTION_NAME, MENU_ACTION_NAME, &[] ))?), ); for (name, action_type) in stream_action_types { match action_type { XrActionType::Binary => button_actions.insert( name.clone(), OpenxrButtonAction::Binary(trace_err!(action_set.create_action( name, name, &[] ))?), ), XrActionType::Scalar => button_actions.insert( name.clone(), OpenxrButtonAction::Scalar(trace_err!(action_set.create_action( name, name, &[] ))?), ), }; } let left_hand_interaction = Self::get_hand_interaction(xr_context, session.clone(), &action_set, xr::Hand::LEFT)?; let right_hand_interaction = Self::get_hand_interaction(xr_context, session.clone(), &action_set, xr::Hand::RIGHT)?; let mut profile_descs = vec![]; for mut profile in stream_profile_descs { if profile.profile == OCULUS_PROFILE { profile.tracked = true; profile.has_haptics = true; for path in OCULUS_SELECT_PATHS { profile .button_bindings .push((SELECT_ACTION_NAME.to_owned(), (*path).to_owned())); } for path in OCULUS_MENU_PATHS { profile .button_bindings .push((MENU_ACTION_NAME.to_owned(), (*path).to_owned())); } } profile_descs.push(profile); } if profile_descs .iter() .any(|profile| profile.profile == OCULUS_PROFILE) { let mut button_bindings = vec![]; for path in OCULUS_SELECT_PATHS { button_bindings.push((SELECT_ACTION_NAME.to_owned(), (*path).to_owned())); } for path in OCULUS_MENU_PATHS { button_bindings.push((MENU_ACTION_NAME.to_owned(), (*path).to_owned())); } profile_descs.push(XrProfileDesc { profile: OCULUS_PROFILE.to_owned(), button_bindings, tracked: true, has_haptics: true, }) } for profile in profile_descs { let profile_path = trace_err!(xr_context.instance.string_to_path(&profile.profile))?; let mut bindings = vec![]; for (action_name, path_string) in &profile.button_bindings { let action = if let Some(res) = button_actions.get(action_name) { res } else { return fmt_e!("Action {} not defined", action_name); }; let path = trace_err!(xr_context.instance.string_to_path(path_string))?; match action { OpenxrButtonAction::Binary(action) => { bindings.push(xr::Binding::new(action, path)) } OpenxrButtonAction::Scalar(action) => { bindings.push(xr::Binding::new(action, path)) } } } if profile.tracked { bindings.push(xr::Binding::new( &left_hand_interaction.grip_action, trace_err!(xr_context .instance .string_to_path("/user/hand/left/input/grip/pose"))?, )); if let Some(hand_tracking_context) = &left_hand_interaction.hand_tracking_context { bindings.push(xr::Binding::new( &hand_tracking_context.target_ray_action, trace_err!(xr_context .instance .string_to_path("/user/hand/left/input/aim/pose"))?, )); } bindings.push(xr::Binding::new( &right_hand_interaction.grip_action, trace_err!(xr_context .instance .string_to_path("/user/hand/right/input/grip/pose"))?, )); if let Some(hand_tracking_context) = &right_hand_interaction.hand_tracking_context { bindings.push(xr::Binding::new( &hand_tracking_context.target_ray_action, trace_err!(xr_context .instance .string_to_path("/user/hand/right/input/aim/pose"))?, )); } } if profile.has_haptics { bindings.push(xr::Binding::new( &left_hand_interaction.vibration_action, trace_err!(xr_context .instance .string_to_path("/user/hand/left/output/haptic"))?, )); bindings.push(xr::Binding::new( &right_hand_interaction.grip_action, trace_err!(xr_context .instance .string_to_path("/user/hand/right/output/haptic"))?, )); } xr_context .instance .suggest_interaction_profile_bindings(profile_path, &bindings) .ok(); } trace_err!(session.attach_action_sets(&[&action_set]))?; let reference_space = trace_err!(session .create_reference_space(xr::ReferenceSpaceType::STAGE, xr::Posef::IDENTITY) .or_else(|_| { session.create_reference_space( xr::ReferenceSpaceType::LOCAL, xr::Posef { orientation: xr::Quaternionf::IDENTITY, position: xr::Vector3f { x: 0.0, y: -1.5, z: 0.0, }, }, ) }))?; let scene_select_action = match button_actions.remove(SELECT_ACTION_NAME).unwrap() { OpenxrButtonAction::Binary(action) => action, _ => unreachable!(), }; let scene_menu_action = match button_actions.remove(MENU_ACTION_NAME).unwrap() { OpenxrButtonAction::Binary(action) => action, _ => unreachable!(), }; Ok(Self { session, action_set, scene_select_action, scene_menu_action, streaming_button_actions: button_actions, reference_space, left_hand_interaction, right_hand_interaction, }) } pub fn sync_input(&self) -> StrResult { trace_err!(self.session.sync_actions(&[(&self.action_set).into()])) } pub fn get_views( &self, view_configuration_type: xr::ViewConfigurationType, display_time: xr::Time, ) -> StrResult<Vec<ViewConfig>> { let (_, views) = trace_err!(self.session.locate_views( view_configuration_type, display_time, &self.reference_space ))?; Ok(views .into_iter() .map(|view| ViewConfig { orientation: convert::from_xr_orientation(view.pose.orientation), position: convert::from_xr_vec3(view.pose.position), fov: convert::from_xr_fov(view.fov), }) .collect()) } fn get_motion(location: xr::SpaceLocation, velocity: xr::SpaceVelocity) -> MotionData { MotionData { orientation: convert::from_xr_orientation(location.pose.orientation), position: convert::from_xr_vec3(location.pose.position), linear_velocity: velocity.linear_velocity.map(convert::from_xr_vec3), angular_velocity: velocity.angular_velocity.map(convert::from_xr_vec3), } } pub fn get_poses( &self, hand_interaction: &HandInteractionContext, display_time: xr::Time, ) -> StrResult<XrHandPoseInput> { let (grip_location, grip_velocity) = trace_err!(hand_interaction .grip_space .relate(&self.reference_space, display_time))?; let grip_motion = Self::get_motion(grip_location, grip_velocity); let hand_tracking_input = if let Some(ctx) = &hand_interaction.hand_tracking_context { let (target_ray_location, target_ray_velocity) = trace_err!(ctx .target_ray_space .relate(&self.reference_space, display_time))?; let target_ray_motion = Self::get_motion(target_ray_location, target_ray_velocity); if let Some((joint_locations, joint_velocities)) = trace_err!(self .reference_space .relate_hand_joints(&ctx.tracker, display_time))? { let skeleton_motion = joint_locations .iter() .zip(joint_velocities.iter()) .map(|(joint_location, joint_velocity)| MotionData { orientation: convert::from_xr_orientation(joint_location.pose.orientation), position: convert::from_xr_vec3(joint_location.pose.position), linear_velocity: joint_velocity .velocity_flags .contains(xr::SpaceVelocityFlags::LINEAR_VALID) .then(|| convert::from_xr_vec3(joint_velocity.linear_velocity)), angular_velocity: joint_velocity .velocity_flags .contains(xr::SpaceVelocityFlags::ANGULAR_VALID) .then(|| convert::from_xr_vec3(joint_velocity.angular_velocity)), }) .collect(); Some(XrHandTrackingInput { target_ray_motion, skeleton_motion, }) } else { None } } else { None }; Ok(XrHandPoseInput { grip_motion, hand_tracking_input, }) } pub fn get_scene_buttons(&self) -> StrResult<SceneButtons> { let select_state = trace_err!(self .scene_select_action .state(&self.session, xr::Path::NULL))?; let menu_state = trace_err!(self.scene_menu_action.state(&self.session, xr::Path::NULL))?; Ok(SceneButtons { select: select_state.current_state, menu: menu_state.current_state, }) } pub fn get_streming_buttons(&self) -> StrResult<HashMap<String, XrActionValue>> { let mut values = HashMap::new(); for (name, action) in &self.streaming_button_actions { match action { OpenxrButtonAction::Binary(action) => { values.insert( name.clone(), XrActionValue::Boolean( trace_err!(action.state(&self.session, xr::Path::NULL))?.current_state, ), ); } OpenxrButtonAction::Scalar(action) => { values.insert( name.clone(), XrActionValue::Scalar( trace_err!(action.state(&self.session, xr::Path::NULL))?.current_state, ), ); } } } Ok(values) } }
let target_ray_action = trace_err!(action_set.create_action( &target_ray_action_name, &target_ray_action_name, &[] ))?;
assignment_statement
[ { "content": "pub fn create_graphics_context(xr_context: &XrContext) -> StrResult<GraphicsContext> {\n\n let entry = unsafe { ash::Entry::load().unwrap() };\n\n\n\n let raw_instance = unsafe {\n\n let extensions_ptrs =\n\n convert::get_vulkan_instance_extensions(&entry, TARGET_VULKAN_VERSION)?\n\n .iter()\n\n .map(|x| x.as_ptr())\n\n .collect::<Vec<_>>();\n\n let layers = vec![CStr::from_bytes_with_nul(b\"VK_LAYER_KHRONOS_validation\\0\").unwrap()];\n\n let layers_ptrs = layers.iter().map(|x| x.as_ptr()).collect::<Vec<_>>();\n\n\n\n let raw_instance_ptr =\n\n trace_err!(trace_err!(xr_context.instance.create_vulkan_instance(\n\n xr_context.system,\n\n mem::transmute(entry.static_fn().get_instance_proc_addr),\n\n &vk::InstanceCreateInfo::builder()\n\n .application_info(\n\n &vk::ApplicationInfo::builder().api_version(TARGET_VULKAN_VERSION),\n\n )\n", "file_path": "alvr/experiments/client/src/xr/openxr/graphics_interop.rs", "rank": 0, "score": 280584.2430260204 }, { "content": "pub fn to_xr_vec3(v: Vec3) -> xr::Vector3f {\n\n xr::Vector3f {\n\n x: v.x,\n\n y: v.y,\n\n z: v.z,\n\n }\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/xr/openxr/convert.rs", "rank": 2, "score": 253754.75690579953 }, { "content": "pub fn from_xr_vec3(v: xr::Vector3f) -> Vec3 {\n\n Vec3::new(v.x, v.y, v.z)\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/xr/openxr/convert.rs", "rank": 3, "score": 253754.7569057995 }, { "content": "pub fn to_xr_fov(fov: Fov) -> xr::Fovf {\n\n xr::Fovf {\n\n angle_left: fov.left,\n\n angle_right: fov.right,\n\n angle_up: fov.top,\n\n angle_down: fov.bottom,\n\n }\n\n}\n", "file_path": "alvr/experiments/client/src/xr/openxr/convert.rs", "rank": 4, "score": 249422.79094371284 }, { "content": "pub fn from_xr_orientation(quat: xr::Quaternionf) -> Quat {\n\n Quat::from_xyzw(quat.x, quat.y, quat.z, quat.w)\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/xr/openxr/convert.rs", "rank": 5, "score": 249422.7909437128 }, { "content": "pub fn from_xr_fov(fov: xr::Fovf) -> Fov {\n\n Fov {\n\n left: fov.angle_left,\n\n right: fov.angle_right,\n\n top: fov.angle_up,\n\n bottom: fov.angle_down,\n\n }\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/xr/openxr/convert.rs", "rank": 6, "score": 249422.79094371284 }, { "content": "pub fn to_xr_orientation(quat: Quat) -> xr::Quaternionf {\n\n xr::Quaternionf {\n\n x: quat.x,\n\n y: quat.y,\n\n z: quat.z,\n\n w: quat.w,\n\n }\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/xr/openxr/convert.rs", "rank": 7, "score": 249422.79094371284 }, { "content": "pub fn create_swapchain(\n\n device: &Device,\n\n session: &xr::Session<xr::Vulkan>,\n\n size: UVec2,\n\n) -> OpenxrSwapchain {\n\n const FORMAT: vk::Format = vk::Format::R8G8B8A8_SRGB;\n\n\n\n let usage = xr::SwapchainUsageFlags::COLOR_ATTACHMENT | xr::SwapchainUsageFlags::SAMPLED;\n\n // This corresponds to USAGE\n\n let hal_usage = hal::TextureUses::COLOR_TARGET | hal::TextureUses::RESOURCE;\n\n\n\n let swapchain = session\n\n .create_swapchain(&xr::SwapchainCreateInfo {\n\n create_flags: xr::SwapchainCreateFlags::EMPTY,\n\n usage_flags: usage,\n\n format: FORMAT.as_raw() as _,\n\n sample_count: 1,\n\n width: size.x,\n\n height: size.y,\n\n face_count: 1,\n", "file_path": "alvr/experiments/client/src/xr/openxr/graphics_interop.rs", "rank": 8, "score": 248623.82005538698 }, { "content": "pub fn load_session(path: &Path) -> StrResult<SessionDesc> {\n\n trace_err!(json::from_str(&trace_err!(fs::read_to_string(path))?))\n\n}\n\n\n", "file_path": "alvr/session/src/lib.rs", "rank": 9, "score": 240173.97132615972 }, { "content": "pub fn save_session(session_desc: &SessionDesc, path: &Path) -> StrResult {\n\n trace_err!(fs::write(\n\n path,\n\n trace_err!(json::to_string_pretty(session_desc))?\n\n ))\n\n}\n\n\n\n// This structure is used to store the minimum configuration data that ALVR driver needs to\n\n// initialize OpenVR before having the chance to communicate with a client. When a client is\n\n// connected, a new OpenvrConfig instance is generated, then the connection is accepted only if that\n\n// instance is equivalent to the one stored in the session, otherwise SteamVR is restarted.\n\n// Other components (like the encoder, audio recorder) don't need this treatment and are initialized\n\n// dynamically.\n\n// todo: properties that can be set after the OpenVR initialization should be removed and set with\n\n// UpdateForStream.\n\n#[derive(Serialize, Deserialize, PartialEq, Default, Clone, Debug)]\n\npub struct OpenvrConfig {\n\n pub universe_id: u64,\n\n pub headset_serial_number: String,\n\n pub headset_tracking_system_name: String,\n", "file_path": "alvr/session/src/lib.rs", "rank": 10, "score": 234202.8487906429 }, { "content": "pub fn session_settings_default() -> SettingsDefault {\n\n SettingsDefault {\n\n video: VideoDescDefault {\n\n adapter_index: 0,\n\n render_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n\n },\n\n recommended_target_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n\n },\n", "file_path": "alvr/session/src/settings.rs", "rank": 11, "score": 210135.34924311642 }, { "content": "// Create wgpu-compatible Vulkan instance. Corresponds to xrCreateVulkanInstanceKHR\n\npub fn create_vulkan_instance(\n\n entry: &ash::Entry,\n\n info: &vk::InstanceCreateInfo,\n\n) -> StrResult<ash::Instance> {\n\n let mut extensions_ptrs =\n\n get_vulkan_instance_extensions(entry, unsafe { (*info.p_application_info).api_version })?\n\n .iter()\n\n .map(|x| x.as_ptr())\n\n .collect::<Vec<_>>();\n\n\n\n extensions_ptrs.extend_from_slice(unsafe {\n\n slice::from_raw_parts(\n\n info.pp_enabled_extension_names,\n\n info.enabled_extension_count as _,\n\n )\n\n });\n\n\n\n let layers = vec![CStr::from_bytes_with_nul(b\"VK_LAYER_KHRONOS_validation\\0\").unwrap()];\n\n let layers_ptrs = layers.iter().map(|x| x.as_ptr()).collect::<Vec<_>>();\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 12, "score": 207324.5799377233 }, { "content": "pub fn build_deps(target_os: &str) {\n\n if target_os == \"android\" {\n\n command::run(\"rustup target add aarch64-linux-android\").unwrap();\n\n command::run(\"cargo install cargo-apk\").unwrap();\n\n\n\n build_rust_android_gradle();\n\n get_oculus_openxr_mobile_loader();\n\n } else {\n\n println!(\"Nothing to do for {}!\", target_os)\n\n }\n\n}\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 13, "score": 205499.85768081108 }, { "content": "// Get extensions needed by wgpu. Corresponds to xrGetVulkanInstanceExtensionsKHR\n\npub fn get_vulkan_instance_extensions(\n\n entry: &ash::Entry,\n\n version: u32,\n\n) -> StrResult<Vec<&'static CStr>> {\n\n let mut flags = hal::InstanceFlags::empty();\n\n if cfg!(debug_assertions) {\n\n flags |= hal::InstanceFlags::VALIDATION;\n\n flags |= hal::InstanceFlags::DEBUG;\n\n }\n\n\n\n trace_err!(<hal::api::Vulkan as hal::Api>::Instance::required_extensions(entry, flags))\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 14, "score": 203102.1163806538 }, { "content": "#[cfg(windows)]\n\npub fn exec_fname(name: &str) -> String {\n\n format!(\"{}.exe\", name)\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 16, "score": 198561.03166860362 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn dynlib_fname(name: &str) -> String {\n\n format!(\"lib{}.dylib\", name)\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 17, "score": 198561.03166860362 }, { "content": "#[cfg(feature = \"new-dashboard\")]\n\npub fn ui_thread() -> StrResult {\n\n let mut engine = rhai::Engine::new();\n\n\n\n let mut scope = rhai::Scope::new();\n\n engine.register_fn(\"load_session\", load_session);\n\n engine.register_fn(\"store_session\", store_session);\n\n engine.register_fn(\"add_client\", add_client);\n\n engine.register_fn(\"trust_client\", trust_client);\n\n engine.register_fn(\"remove_client\", remove_client);\n\n\n\n let dashboard = Arc::new(Dashboard::new());\n\n\n\n *MAYBE_NEW_DASHBOARD.lock() = Some(Arc::clone(&dashboard));\n\n\n\n dashboard.run(\n\n SESSION_MANAGER.lock().get().clone(),\n\n Box::new(move |command| {\n\n // Each time the handler is invoked, the command might request access to the session.\n\n // Keep the session manager locked during the evaluation of the command to avoid race\n\n // conditions\n", "file_path": "alvr/server/src/dashboard.rs", "rank": 18, "score": 198326.18935951983 }, { "content": "fn bump_cargo_version(crate_dir_name: &str, new_version: &str) {\n\n let manifest_path = packages_dir().join(crate_dir_name).join(\"Cargo.toml\");\n\n\n\n let manifest = fs::read_to_string(&manifest_path).unwrap();\n\n\n\n let (file_start, _, file_end) = split_string(&manifest, \"version = \\\"\", '\\\"');\n\n let manifest = format!(\"{}{}{}\", file_start, new_version, file_end);\n\n\n\n fs::write(manifest_path, manifest).unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 19, "score": 195998.6155348557 }, { "content": "pub fn unblock_alvr_addon() -> StrResult {\n\n let config_path = alvr_commands::steam_config_dir()?.join(\"steamvr.vrsettings\");\n\n\n\n let mut fields_ref: json::Map<String, json::Value> = trace_err!(json::from_str(&trace_err!(\n\n fs::read_to_string(&config_path)\n\n )?))?;\n\n\n\n fields_ref.remove(\"driver_alvr_server\");\n\n\n\n trace_err!(fs::write(\n\n config_path,\n\n trace_err!(json::to_string_pretty(&fields_ref))?\n\n ))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 20, "score": 194105.15995353836 }, { "content": "pub fn maybe_register_alvr_driver() -> StrResult {\n\n let alvr_driver_dir = afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap())\n\n .openvr_driver_root_dir;\n\n\n\n let driver_registered = alvr_commands::get_driver_dir_from_registered()\n\n .ok()\n\n .filter(|dir| *dir == alvr_driver_dir)\n\n .is_some();\n\n\n\n if !driver_registered {\n\n let paths_backup = match alvr_commands::get_registered_drivers() {\n\n Ok(paths) => paths,\n\n Err(e) => {\n\n return fmt_e!(\n\n \"{}\\n{}\\n\\n({})\",\n\n \"Failed to load registered drivers.\",\n\n \"Please reset the drivers installation with the apposite button on the launcher.\",\n\n e\n\n )\n\n }\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 21, "score": 190105.45577288605 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn maybe_wrap_vrcompositor_launcher() -> StrResult {\n\n let steamvr_bin_dir = alvr_commands::steamvr_root_dir()?\n\n .join(\"bin\")\n\n .join(\"linux64\");\n\n let real_launcher_path = steamvr_bin_dir.join(\"vrcompositor.real\");\n\n let launcher_path = steamvr_bin_dir.join(\"vrcompositor\");\n\n\n\n // In case of SteamVR update, vrcompositor will be restored\n\n match fs::read_link(&launcher_path) {\n\n Err(_) => match fs::metadata(&launcher_path) {\n\n Err(_) => (), //file does not exist, do nothing\n\n Ok(_) => {\n\n trace_err!(fs::rename(&launcher_path, &real_launcher_path))?;\n\n }\n\n },\n\n Ok(_) => trace_err!(fs::remove_file(&launcher_path))?, // recreate the link\n\n };\n\n\n\n trace_err!(std::os::unix::fs::symlink(\n\n afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap())\n\n .vrcompositor_wrapper(),\n\n &launcher_path\n\n ))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 22, "score": 190105.45577288605 }, { "content": "pub fn log_event(id: ServerEvent) {\n\n log::info!(\"#{}#\", serde_json::to_string(&id).unwrap());\n\n}\n", "file_path": "alvr/session/src/events.rs", "rank": 23, "score": 187835.50819210123 }, { "content": "pub fn run_without_shell(cmd: &str, args: &[&str]) -> Result<(), Box<dyn Error>> {\n\n println!(\n\n \"\\n> {}\",\n\n args.iter().fold(String::from(cmd), |s, arg| s + \" \" + arg)\n\n );\n\n let output = Command::new(cmd)\n\n .args(args)\n\n .stdout(Stdio::inherit())\n\n .spawn()?\n\n .wait_with_output()?;\n\n\n\n if output.status.success() {\n\n Ok(())\n\n } else {\n\n Err(Box::new(StringError(format!(\n\n \"Command failed: {}\",\n\n String::from_utf8_lossy(&output.stderr)\n\n ))))\n\n }\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 24, "score": 183413.8622505902 }, { "content": "pub fn load_asset(fname: &str) -> Vec<u8> {\n\n let asset_manager = ndk_glue::native_activity().asset_manager();\n\n let fname_cstring = CString::new(fname).unwrap();\n\n let mut asset = asset_manager.open(fname_cstring.as_c_str()).unwrap();\n\n asset.get_buffer().unwrap().to_vec()\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/android.rs", "rank": 25, "score": 181829.11381850878 }, { "content": "pub fn load_asset(fname: &str) -> Vec<u8> {\n\n // todo\n\n vec![]\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/desktop.rs", "rank": 26, "score": 181829.11381850878 }, { "content": "pub fn load_config() -> StrResult<Config> {\n\n let maybe_config = serde_json::from_str(&load_config_string());\n\n\n\n if let Ok(config) = maybe_config {\n\n Ok(config)\n\n } else {\n\n let config = Config::default();\n\n store_config(&config)?;\n\n\n\n Ok(config)\n\n }\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/mod.rs", "rank": 27, "score": 180043.43858705464 }, { "content": "pub fn steamvr_root_dir() -> StrResult<PathBuf> {\n\n get_single_openvr_path(\"runtime\")\n\n}\n\n\n", "file_path": "alvr/commands/src/openvrpaths.rs", "rank": 28, "score": 180043.43858705467 }, { "content": "pub fn steam_config_dir() -> StrResult<PathBuf> {\n\n get_single_openvr_path(\"config\")\n\n}\n", "file_path": "alvr/commands/src/openvrpaths.rs", "rank": 29, "score": 180043.43858705467 }, { "content": "pub fn split_string(source: &str, start_pattern: &str, end: char) -> (String, String, String) {\n\n let start_idx = source.find(start_pattern).unwrap() + start_pattern.len();\n\n let end_idx = start_idx + source[start_idx..].find(end).unwrap();\n\n\n\n (\n\n source[..start_idx].to_owned(),\n\n source[start_idx..end_idx].to_owned(),\n\n source[end_idx..].to_owned(),\n\n )\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 30, "score": 177985.89687456764 }, { "content": "fn create_layer_views<'a>(\n\n acquired_swapchains: &'a mut [AcquiredXrSwapchain],\n\n view_configs: &'a [ViewConfig],\n\n) -> Vec<xr::CompositionLayerProjectionView<'a, xr::Vulkan>> {\n\n acquired_swapchains\n\n .iter_mut()\n\n .enumerate()\n\n .map(|(index, swapchain)| {\n\n let view_config = view_configs\n\n .get(index)\n\n .cloned()\n\n .unwrap_or_else(|| ViewConfig {\n\n orientation: Quat::IDENTITY,\n\n position: Vec3::ZERO,\n\n fov: Fov::default(),\n\n });\n\n\n\n swapchain.handle_lock.release_image().unwrap();\n\n\n\n let rect = xr::Rect2Di {\n", "file_path": "alvr/experiments/client/src/xr/openxr/mod.rs", "rank": 31, "score": 177827.07185290157 }, { "content": "pub fn run(cmd: &str) -> Result<(), Box<dyn Error>> {\n\n run_in(&env::current_dir().unwrap(), cmd)\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 32, "score": 177783.14631203804 }, { "content": "pub fn openvr_source_file_path() -> StrResult<PathBuf> {\n\n let path = trace_none!(if cfg!(windows) {\n\n dirs::cache_dir()\n\n } else {\n\n dirs::config_dir()\n\n })?\n\n .join(\"openvr/openvrpaths.vrpath\");\n\n\n\n if path.exists() {\n\n Ok(path)\n\n } else {\n\n fmt_e!(\"{} does not exist\", path.to_string_lossy())\n\n }\n\n}\n\n\n\npub(crate) fn load_openvr_paths_json() -> StrResult<json::Value> {\n\n let file = trace_err!(File::open(openvr_source_file_path()?))?;\n\n\n\n let mut file_content_decoded = String::new();\n\n trace_err!(DecodeReaderBytes::new(&file).read_to_string(&mut file_content_decoded))?;\n", "file_path": "alvr/commands/src/openvrpaths.rs", "rank": 33, "score": 176437.18691401728 }, { "content": "pub fn get_devices_list() -> StrResult<AudioDevicesList> {\n\n let host = cpal::default_host();\n\n\n\n let output = trace_err!(host.output_devices())?\n\n .filter_map(|d| d.name().ok())\n\n .collect::<Vec<_>>();\n\n let input = trace_err!(host.input_devices())?\n\n .filter_map(|d| d.name().ok())\n\n .collect::<Vec<_>>();\n\n\n\n Ok(AudioDevicesList { output, input })\n\n}\n\n\n\npub enum AudioDeviceType {\n\n Output,\n\n Input,\n\n\n\n // for the virtual microphone devices, input and output labels are swapped\n\n VirtualMicrophoneInput,\n\n VirtualMicrophoneOutput { matching_input_device_name: String },\n", "file_path": "alvr/audio/src/lib.rs", "rank": 34, "score": 176437.18691401728 }, { "content": "pub fn get_driver_dir() -> StrResult<PathBuf> {\n\n get_driver_dir_from_registered()\n\n .map_err(|e| format!(\"ALVR driver path not stored and not registered ({})\", e))\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 35, "score": 176437.18691401728 }, { "content": "pub fn restart_steamvr(launcher_path: &Path) -> StrResult {\n\n invoke_launcher(launcher_path, \"--restart-steamvr\")\n\n}\n\n\n", "file_path": "alvr/commands/src/launcher.rs", "rank": 36, "score": 175419.5598186266 }, { "content": "pub fn get_driver_dir_from_registered() -> StrResult<PathBuf> {\n\n for dir in get_registered_drivers()? {\n\n let maybe_driver_name = || -> StrResult<_> {\n\n let manifest_string =\n\n trace_err!(fs::read_to_string(dir.join(\"driver.vrdrivermanifest\")))?;\n\n let mut manifest_map = trace_err!(json::from_str::<HashMap<String, json::Value>>(\n\n &manifest_string\n\n ))?;\n\n\n\n trace_none!(manifest_map.remove(\"name\"))\n\n }();\n\n\n\n if maybe_driver_name == Ok(json::Value::String(\"alvr_server\".to_owned())) {\n\n return Ok(dir);\n\n }\n\n }\n\n fmt_e!(\"ALVR driver path not registered\")\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 37, "score": 173006.23755620507 }, { "content": "pub fn invoke_application_update(launcher_path: &Path) -> StrResult {\n\n invoke_launcher(launcher_path, \"--update\")\n\n}\n", "file_path": "alvr/commands/src/launcher.rs", "rank": 38, "score": 171813.3081455892 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn get_screen_size() -> StrResult<(u32, u32)> {\n\n Ok((0, 0))\n\n}\n", "file_path": "alvr/server/src/graphics_info.rs", "rank": 39, "score": 171813.3081455892 }, { "content": "pub fn store_config(config: &Config) -> StrResult {\n\n store_config_string(trace_err!(serde_json::to_string(config))?);\n\n\n\n Ok(())\n\n}\n", "file_path": "alvr/experiments/client/src/storage/mod.rs", "rank": 40, "score": 171813.3081455892 }, { "content": "pub fn get_registered_drivers() -> StrResult<Vec<PathBuf>> {\n\n Ok(crate::from_openvr_paths(trace_none!(\n\n crate::load_openvr_paths_json()?.get_mut(\"external_drivers\")\n\n )?))\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 41, "score": 168382.35878777702 }, { "content": "pub fn download(url: &str, destination: &Path) -> Result<(), Box<dyn Error>> {\n\n run_without_shell(\n\n \"curl\",\n\n &[\"-o\", &destination.to_string_lossy(), \"--url\", url],\n\n )\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 42, "score": 165517.37056439338 }, { "content": "pub fn run_in(workdir: &Path, cmd: &str) -> Result<(), Box<dyn Error>> {\n\n let shell = if cfg!(windows) { \"cmd\" } else { \"bash\" };\n\n let shell_flag = if cfg!(windows) { \"/C\" } else { \"-c\" };\n\n\n\n run_as_shell_in(workdir, shell, shell_flag, cmd)\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 43, "score": 165517.37056439338 }, { "content": "pub fn get_sample_rate(device: &AudioDevice) -> StrResult<u32> {\n\n let maybe_config_range = trace_err!(device.inner.supported_output_configs())?.next();\n\n let config = if let Some(config) = maybe_config_range {\n\n config\n\n } else {\n\n trace_none!(trace_err!(device.inner.supported_input_configs())?.next())?\n\n };\n\n\n\n // Assumption: device is in shared mode: this means that there is one and fixed sample rate,\n\n // format and channel count\n\n Ok(config.min_sample_rate().0)\n\n}\n\n\n\npub async fn record_audio_loop(\n\n device: AudioDevice,\n\n channels_count: u16,\n\n sample_rate: u32,\n\n #[cfg_attr(not(windows), allow(unused_variables))] mute: bool,\n\n mut sender: StreamSender<()>,\n\n) -> StrResult {\n", "file_path": "alvr/audio/src/lib.rs", "rank": 44, "score": 164830.15239474401 }, { "content": "// Bash can be invoked on Windows if WSL is installed\n\npub fn run_as_bash_in(workdir: &Path, cmd: &str) -> Result<(), Box<dyn Error>> {\n\n run_as_shell_in(workdir, \"bash\", \"-c\", cmd)\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 45, "score": 162249.24448413312 }, { "content": "pub fn create_identity(hostname: Option<String>) -> StrResult<PrivateIdentity> {\n\n let hostname = hostname.unwrap_or(format!(\"{}.client.alvr\", rand::random::<u16>()));\n\n\n\n let certificate = trace_err!(rcgen::generate_simple_self_signed([hostname.clone()]))?;\n\n\n\n Ok(PrivateIdentity {\n\n hostname,\n\n certificate_pem: trace_err!(certificate.serialize_pem())?,\n\n key_pem: certificate.serialize_private_key_pem(),\n\n })\n\n}\n\n\n\nmod util {\n\n use alvr_common::prelude::*;\n\n use std::future::Future;\n\n use tokio::{sync::oneshot, task};\n\n\n\n // Tokio tasks are not cancelable. This function awaits a cancelable task.\n\n pub async fn spawn_cancelable(\n\n future: impl Future<Output = StrResult> + Send + 'static,\n", "file_path": "alvr/sockets/src/lib.rs", "rank": 46, "score": 162015.73897971178 }, { "content": "pub fn apply_driver_paths_backup(driver_dir: PathBuf) -> StrResult {\n\n if driver_paths_backup_present() {\n\n let backup_path = env::temp_dir().join(DRIVER_PATHS_BACKUP_FNAME);\n\n let driver_paths = trace_err!(json::from_str::<Vec<_>>(&trace_err!(fs::read_to_string(\n\n &backup_path\n\n ))?))?;\n\n trace_err!(fs::remove_file(backup_path))?;\n\n\n\n driver_registration(&[driver_dir], false)?;\n\n\n\n driver_registration(&driver_paths, true).ok();\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 47, "score": 161997.60785402614 }, { "content": "#[cfg(windows)]\n\npub fn get_windows_device_id(device: &AudioDevice) -> StrResult<String> {\n\n unsafe {\n\n let mm_device = get_windows_device(device)?;\n\n\n\n let mut id_str_ptr = ptr::null_mut();\n\n mm_device.GetId(&mut id_str_ptr);\n\n let id_str = trace_err!(U16CStr::from_ptr_str(id_str_ptr).to_string())?;\n\n CoTaskMemFree(id_str_ptr as _);\n\n\n\n Ok(id_str)\n\n }\n\n}\n\n\n\n// device must be an output device\n", "file_path": "alvr/audio/src/lib.rs", "rank": 48, "score": 161562.02631448375 }, { "content": "// this will not kill the child process \"ALVR launcher\"\n\npub fn kill_steamvr() {\n\n let mut system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n system.refresh_processes();\n\n\n\n // first kill vrmonitor, then kill vrserver if it is hung.\n\n\n\n for process in system.process_by_name(&afs::exec_fname(\"vrmonitor\")) {\n\n #[cfg(not(windows))]\n\n process.kill(sysinfo::Signal::Term);\n\n #[cfg(windows)]\n\n kill_process(process.pid());\n\n }\n\n\n\n thread::sleep(Duration::from_secs(1));\n\n\n\n for process in system.process_by_name(&afs::exec_fname(\"vrserver\")) {\n\n #[cfg(not(windows))]\n\n process.kill(sysinfo::Signal::Term);\n\n #[cfg(windows)]\n\n kill_process(process.pid());\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 49, "score": 161140.06141156275 }, { "content": "pub fn fix_steamvr() {\n\n // If ALVR driver does not start use a more destructive approach: delete openvrpaths.vrpath then recreate it\n\n if let Ok(path) = alvr_commands::openvr_source_file_path() {\n\n fs::remove_file(path).ok();\n\n\n\n maybe_launch_steamvr();\n\n thread::sleep(Duration::from_secs(5));\n\n kill_steamvr();\n\n thread::sleep(Duration::from_secs(5));\n\n }\n\n\n\n unblock_alvr_addon().ok();\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 50, "score": 161140.06141156275 }, { "content": "pub fn shutdown_runtime() {\n\n alvr_session::log_event(ServerEvent::ServerQuitting);\n\n\n\n if let Some(window) = MAYBE_WINDOW.lock().take() {\n\n window.close();\n\n }\n\n\n\n SHUTDOWN_NOTIFIER.notify_waiters();\n\n\n\n if let Some(runtime) = RUNTIME.lock().take() {\n\n runtime.shutdown_background();\n\n // shutdown_background() is non blocking and it does not guarantee that every internal\n\n // thread is terminated in a timely manner. Using shutdown_background() instead of just\n\n // dropping the runtime has the benefit of giving SteamVR a chance to clean itself as\n\n // much as possible before the process is killed because of alvr_launcher timeout.\n\n }\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 51, "score": 161140.06141156275 }, { "content": "#[cfg_attr(target_os = \"android\", ndk_glue::main)]\n\npub fn main() {\n\n env_logger::init();\n\n log::error!(\"enter main\");\n\n\n\n show_err(run());\n\n\n\n #[cfg(target_os = \"android\")]\n\n ndk_glue::native_activity().finish();\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/lib.rs", "rank": 52, "score": 161140.06141156275 }, { "content": "pub fn invoke_installer() {\n\n try_close_steamvr_gracefully();\n\n\n\n spawn_no_window(Command::new(afs::installer_path()).arg(\"-q\"));\n\n\n\n // delete crash_log.txt (take advantage of the occasion to do some routine cleaning)\n\n fs::remove_file(\n\n afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap()).crash_log(),\n\n )\n\n .ok();\n\n}\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 53, "score": 161140.06141156275 }, { "content": "pub fn run_as_shell_in(\n\n workdir: &Path,\n\n shell: &str,\n\n shell_flag: &str,\n\n cmd: &str,\n\n) -> Result<(), Box<dyn Error>> {\n\n println!(\"\\n> {}\", cmd);\n\n\n\n let output = Command::new(shell)\n\n .args(&[shell_flag, cmd])\n\n .stdout(Stdio::inherit())\n\n .current_dir(workdir)\n\n .spawn()?\n\n .wait_with_output()?;\n\n\n\n if output.status.success() {\n\n Ok(())\n\n } else {\n\n Err(Box::new(StringError(format!(\n\n \"Command failed: {}\",\n\n String::from_utf8_lossy(&output.stderr)\n\n ))))\n\n }\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 54, "score": 161140.06141156275 }, { "content": "pub fn build_server(\n\n is_release: bool,\n\n experiments: bool,\n\n fetch_crates: bool,\n\n bundle_ffmpeg: bool,\n\n root: Option<String>,\n\n reproducible: bool,\n\n) {\n\n // Always use CustomRoot for contructing the build directory. The actual runtime layout is respected\n\n let layout = Layout::new(&afs::server_build_dir());\n\n\n\n let build_type = if is_release { \"release\" } else { \"debug\" };\n\n\n\n let build_flags = format!(\n\n \"{} {}\",\n\n if is_release { \"--release\" } else { \"\" },\n\n if reproducible {\n\n \"--offline --locked\"\n\n } else {\n\n \"\"\n", "file_path": "alvr/xtask/src/main.rs", "rank": 55, "score": 161140.06141156275 }, { "content": "pub fn restart_steamvr() {\n\n try_close_steamvr_gracefully();\n\n\n\n if alvr_common::show_err(maybe_register_alvr_driver()).is_some() {\n\n maybe_launch_steamvr();\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 56, "score": 161140.06141156275 }, { "content": "// Current data extrapolation strategy: match both field name and value type exactly.\n\n// Integer bounds are not validated, if they do not match the schema, deserialization will fail and\n\n// all data is lost.\n\n// Future strategies: check if value respects schema constraints, fuzzy field name matching, accept\n\n// integer to float and float to integer, tree traversal.\n\nfn extrapolate_session_settings_from_session_settings(\n\n old_session_settings: &json::Value,\n\n new_session_settings: &json::Value,\n\n schema: &SchemaNode,\n\n) -> json::Value {\n\n match schema {\n\n SchemaNode::Section { entries } => json::Value::Object(\n\n entries\n\n .iter()\n\n .filter_map(|(field_name, maybe_data)| {\n\n maybe_data.as_ref().map(|data_schema| {\n\n let value_json =\n\n if let Some(new_value_json) = new_session_settings.get(field_name) {\n\n extrapolate_session_settings_from_session_settings(\n\n &old_session_settings[field_name],\n\n new_value_json,\n\n &data_schema.content,\n\n )\n\n } else {\n\n old_session_settings[field_name].clone()\n", "file_path": "alvr/session/src/lib.rs", "rank": 57, "score": 160194.8175316103 }, { "content": "pub fn maybe_save_driver_paths_backup(paths_backup: &[PathBuf]) -> StrResult {\n\n if !driver_paths_backup_present() {\n\n trace_err!(fs::write(\n\n env::temp_dir().join(DRIVER_PATHS_BACKUP_FNAME),\n\n trace_err!(json::to_string_pretty(paths_backup))?,\n\n ))?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 58, "score": 159022.18811422342 }, { "content": "pub fn notify_shutdown_driver() {\n\n thread::spawn(|| {\n\n RESTART_NOTIFIER.notify_waiters();\n\n\n\n // give time to the control loop to send the restart packet (not crucial)\n\n thread::sleep(Duration::from_millis(100));\n\n\n\n shutdown_runtime();\n\n\n\n unsafe { ShutdownSteamvr() };\n\n });\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 59, "score": 157992.3014690029 }, { "content": "// Avoid Oculus link popups when debugging the client\n\npub fn kill_oculus_processes() {\n\n command::run_without_shell(\n\n \"powershell\",\n\n &[\n\n \"Start-Process\",\n\n \"taskkill\",\n\n \"-ArgumentList\",\n\n \"\\\"/F /IM OVR* /T\\\"\",\n\n \"-Verb\",\n\n \"runAs\",\n\n ],\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 60, "score": 157992.3014690029 }, { "content": "pub fn notify_application_update() {\n\n notify_shutdown_driver();\n\n\n\n alvr_commands::invoke_application_update(&FILESYSTEM_LAYOUT.launcher_exe()).ok();\n\n}\n\n\n\npub enum ClientListAction {\n\n AddIfMissing { display_name: String },\n\n TrustAndMaybeAddIp(Option<IpAddr>),\n\n RemoveIpOrEntry(Option<IpAddr>),\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 61, "score": 157992.3014690029 }, { "content": "pub fn init_logging() {\n\n #[cfg(target_os = \"android\")]\n\n android_logger::init_once(\n\n android_logger::Config::default()\n\n .with_tag(\"[ALVR NATIVE-RUST]\")\n\n .with_min_level(log::Level::Info),\n\n );\n\n\n\n alvr_common::set_panic_hook();\n\n}\n", "file_path": "alvr/client/src/logging_backend.rs", "rank": 62, "score": 157992.3014690029 }, { "content": "pub fn set_panic_hook() {\n\n std::panic::set_hook(Box::new(|panic_info| {\n\n let message = panic_info\n\n .payload()\n\n .downcast_ref::<&str>()\n\n .unwrap_or(&\"Unavailable\");\n\n let err_str = format!(\n\n \"Message: {:?}\\nBacktrace:\\n{:?}\",\n\n message,\n\n backtrace::Backtrace::new()\n\n );\n\n\n\n log::error!(\"{}\", err_str);\n\n\n\n #[cfg(windows)]\n\n std::thread::spawn(move || {\n\n msgbox::create(\"ALVR panicked\", &err_str, msgbox::IconType::Error).ok();\n\n });\n\n }))\n\n}\n\n\n", "file_path": "alvr/common/src/logging.rs", "rank": 63, "score": 157992.3014690029 }, { "content": "pub fn notify_restart_driver() {\n\n notify_shutdown_driver();\n\n\n\n alvr_commands::restart_steamvr(&FILESYSTEM_LAYOUT.launcher_exe()).ok();\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 64, "score": 157992.3014690029 }, { "content": "pub fn maybe_launch_steamvr() {\n\n let mut system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n system.refresh_processes();\n\n\n\n if system\n\n .process_by_name(&afs::exec_fname(\"vrserver\"))\n\n .is_empty()\n\n {\n\n #[cfg(windows)]\n\n spawn_no_window(Command::new(\"cmd\").args(&[\"/C\", \"start\", \"steam://rungameid/250820\"]));\n\n #[cfg(not(windows))]\n\n spawn_no_window(Command::new(\"steam\").args(&[\"steam://rungameid/250820\"]));\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 65, "score": 157992.3014690029 }, { "content": "pub fn remove_build_dir() {\n\n let build_dir = afs::build_dir();\n\n fs::remove_dir_all(&build_dir).ok();\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 66, "score": 157992.3014690029 }, { "content": "fn add_client(hostname: &str, ip: &str) {\n\n println!(\"add_client\");\n\n}\n\n\n", "file_path": "alvr/experiments/gui/src/main.rs", "rank": 67, "score": 156921.2038318538 }, { "content": "pub fn driver_registration(driver_paths: &[PathBuf], register: bool) -> StrResult {\n\n let mut openvr_paths_json = crate::load_openvr_paths_json()?;\n\n let paths_json_ref = trace_none!(openvr_paths_json.get_mut(\"external_drivers\"))?;\n\n\n\n let mut paths: HashSet<_> = crate::from_openvr_paths(paths_json_ref)\n\n .into_iter()\n\n .collect();\n\n\n\n if register {\n\n paths.extend(driver_paths.iter().cloned());\n\n } else {\n\n for path in driver_paths {\n\n paths.remove(path);\n\n }\n\n }\n\n\n\n // write into openvr_paths_json, the other fields are preserved\n\n *paths_json_ref = crate::to_openvr_paths(paths.into_iter().collect::<Vec<_>>().as_slice());\n\n\n\n crate::save_openvr_paths_json(&openvr_paths_json)\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 68, "score": 155630.9880459609 }, { "content": "// session_settings does not get validated here, it must be already valid\n\nfn json_session_settings_to_settings(\n\n session_settings: &json::Value,\n\n schema: &SchemaNode,\n\n) -> json::Value {\n\n match schema {\n\n SchemaNode::Section { entries } => json::Value::Object(\n\n entries\n\n .iter()\n\n .filter_map(|(field_name, maybe_data)| {\n\n maybe_data.as_ref().map(|data_schema| {\n\n (\n\n field_name.clone(),\n\n json_session_settings_to_settings(\n\n &session_settings[field_name],\n\n &data_schema.content,\n\n ),\n\n )\n\n })\n\n })\n\n .collect(),\n", "file_path": "alvr/session/src/lib.rs", "rank": 69, "score": 155290.8140038279 }, { "content": "// Create wgpu-compatible Vulkan device. Corresponds to xrCreateVulkanDeviceKHR\n\npub fn create_vulkan_device(\n\n entry: ash::Entry,\n\n version: u32,\n\n instance: &ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n create_info: &vk::DeviceCreateInfo,\n\n) -> StrResult<ash::Device> {\n\n let temp_adapter =\n\n get_temporary_hal_adapter(entry, version, instance.clone(), physical_device)?;\n\n\n\n let wgpu_extensions = temp_adapter\n\n .adapter\n\n .required_device_extensions(temp_adapter.features);\n\n let mut extensions_ptrs = wgpu_extensions\n\n .iter()\n\n .map(|x| x.as_ptr())\n\n .collect::<Vec<_>>();\n\n let mut enabled_phd_features = temp_adapter.adapter.physical_device_features(\n\n &wgpu_extensions,\n\n temp_adapter.features,\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 70, "score": 155025.2642475532 }, { "content": "pub fn create_texture_set(\n\n device: &Device,\n\n data: SwapchainCreateData,\n\n info: SwapchainCreateInfo,\n\n) -> Vec<Texture> {\n\n let wgpu_usage = {\n\n let mut wgpu_usage = TextureUsages::empty();\n\n\n\n if info.usage.contains(SwapchainUsageFlags::SAMPLED) {\n\n wgpu_usage |= TextureUsages::TEXTURE_BINDING;\n\n }\n\n if info.usage.contains(SwapchainUsageFlags::COLOR_ATTACHMENT) {\n\n wgpu_usage |= TextureUsages::RENDER_ATTACHMENT;\n\n }\n\n if info\n\n .usage\n\n .contains(SwapchainUsageFlags::DEPTH_STENCIL_ATTACHMENT)\n\n {\n\n wgpu_usage |= TextureUsages::RENDER_ATTACHMENT;\n\n }\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 71, "score": 155018.72407772753 }, { "content": "pub fn execute_default_pass(\n\n encoder: &mut CommandEncoder,\n\n pipeline: &RenderPipeline,\n\n bind_group: &BindGroup,\n\n push_constants: &[u8],\n\n output: &TextureView,\n\n) {\n\n let mut pass = encoder.begin_render_pass(&RenderPassDescriptor {\n\n color_attachments: &[RenderPassColorAttachment {\n\n view: output,\n\n resolve_target: None,\n\n ops: Operations {\n\n load: LoadOp::Clear(Color::BLACK),\n\n store: true,\n\n },\n\n }],\n\n ..Default::default()\n\n });\n\n\n\n pass.set_pipeline(pipeline);\n\n pass.set_bind_group(0, bind_group, &[]);\n\n pass.set_push_constants(ShaderStages::FRAGMENT, 0, push_constants);\n\n\n\n pass.draw(0..4, 0..1);\n\n\n\n // here the pass is dropped and applied to the command encoder\n\n}\n", "file_path": "alvr/experiments/graphics/src/lib.rs", "rank": 72, "score": 155018.72407772753 }, { "content": "#[inline]\n\npub fn get_next_frame_batch(\n\n sample_buffer: &mut VecDeque<f32>,\n\n channels_count: usize,\n\n batch_frames_count: usize,\n\n) -> Vec<f32> {\n\n if sample_buffer.len() / channels_count >= batch_frames_count {\n\n let mut batch = sample_buffer\n\n .drain(0..batch_frames_count * channels_count)\n\n .collect::<Vec<_>>();\n\n\n\n if sample_buffer.len() / channels_count < batch_frames_count {\n\n // Render fade-out. It is completely contained in the current batch\n\n for f in 0..batch_frames_count {\n\n let volume = 1. - f as f32 / batch_frames_count as f32;\n\n for c in 0..channels_count {\n\n batch[f * channels_count + c] *= volume;\n\n }\n\n }\n\n }\n\n // fade-ins and cross-fades are rendered in the receive loop directly inside sample_buffer.\n", "file_path": "alvr/audio/src/lib.rs", "rank": 73, "score": 155018.72407772753 }, { "content": "pub fn split(\n\n graphics_context: Arc<GraphicsContext>,\n\n codec_type: CodecType,\n\n csd_0: &[u8],\n\n extra_options: &[(String, MediacodecDataType)],\n\n output_texture: Arc<Texture>,\n\n output_size: UVec2,\n\n slice_index: u32,\n\n) -> StrResult<(\n\n VideoDecoderEnqueuer,\n\n VideoDecoderDequeuer,\n\n VideoDecoderFrameGrabber,\n\n)> {\n\n let mut swapchain = trace_err!(ImageReader::new_with_usage(\n\n 1,\n\n 1,\n\n ImageFormat::YUV_420_888,\n\n HardwareBufferUsage::GPU_SAMPLED_IMAGE,\n\n 3, // to avoid a deadlock, a triple buffered swapchain is required\n\n ))?;\n", "file_path": "alvr/experiments/client/src/video_decoder/mediacodec.rs", "rank": 74, "score": 155018.72407772753 }, { "content": "fn store_session(session: Dynamic) -> String {\n\n match rhai::serde::from_dynamic(&session) {\n\n Ok(res) => {\n\n *SESSION.lock().unwrap() = res;\n\n SESSION_MODIFIED.store(true, Ordering::Relaxed);\n\n\n\n println!(\"store_session\");\n\n\n\n \"\".into()\n\n }\n\n Err(e) => e.to_string(),\n\n }\n\n}\n\n\n", "file_path": "alvr/experiments/gui/src/main.rs", "rank": 75, "score": 153029.25635052024 }, { "content": "// Corresponds to xrGetVulkanGraphicsDeviceKHR\n\npub fn get_vulkan_graphics_device(\n\n instance: &ash::Instance,\n\n adapter_index: Option<usize>,\n\n) -> StrResult<vk::PhysicalDevice> {\n\n let mut physical_devices = unsafe { trace_err!(instance.enumerate_physical_devices())? };\n\n\n\n Ok(physical_devices.remove(adapter_index.unwrap_or(0)))\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 76, "score": 152212.03110397284 }, { "content": "// Hal adapter used to get required device extensions and features\n\npub fn get_temporary_hal_adapter(\n\n entry: ash::Entry,\n\n version: u32,\n\n instance: ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n) -> StrResult<hal::ExposedAdapter<hal::api::Vulkan>> {\n\n let instance_extensions = get_vulkan_instance_extensions(&entry, version)?;\n\n\n\n let mut flags = hal::InstanceFlags::empty();\n\n if cfg!(debug_assertions) {\n\n flags |= hal::InstanceFlags::VALIDATION;\n\n flags |= hal::InstanceFlags::DEBUG;\n\n };\n\n\n\n let hal_instance = unsafe {\n\n trace_err!(<hal::api::Vulkan as hal::Api>::Instance::from_raw(\n\n entry,\n\n instance,\n\n version,\n\n instance_extensions,\n\n flags,\n\n false,\n\n None, // <-- the instance is not destroyed on drop\n\n ))?\n\n };\n\n\n\n trace_none!(hal_instance.expose_adapter(physical_device))\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 77, "score": 152211.8229194252 }, { "content": "// All bindings map to the bind group 0\n\npub fn create_default_render_pipeline(\n\n label: &str,\n\n device: &Device,\n\n fragment_shader: &str,\n\n bindings: Vec<BindingDesc>,\n\n push_constants_size: usize,\n\n) -> (RenderPipeline, BindGroup) {\n\n let quad_shader = quad_shader(device);\n\n\n\n let fragment_shader = device.create_shader_module(&ShaderModuleDescriptor {\n\n label: Some(label),\n\n source: ShaderSource::Wgsl(fragment_shader.into()),\n\n });\n\n\n\n let layout_entries = bindings\n\n .iter()\n\n .map(|binding| BindGroupLayoutEntry {\n\n binding: binding.index,\n\n visibility: ShaderStages::FRAGMENT,\n\n ty: binding.binding_type,\n", "file_path": "alvr/experiments/graphics/src/lib.rs", "rank": 78, "score": 152205.2607880035 }, { "content": "pub fn ipc_listen<CS, CR, SR>(name: &str) -> StrResult<(IpcServer<CS, CR>, IpcSseSender<SR>)> {\n\n let request_socket = trace_err!(trace_err!(LocalSocketListener::bind(format!(\n\n \"/tmp/alvr_{}_request.sock\",\n\n name\n\n )))?\n\n .accept())?;\n\n\n\n request_socket.set_nonblocking(true).unwrap();\n\n\n\n // Wait for the client to setup the sse socket listener\n\n thread::sleep(Duration::from_millis(100));\n\n\n\n let sse_sender = trace_err!(LocalSocketStream::connect(format!(\n\n \"/tmp/alvr_{}_sse.sock\",\n\n name\n\n )))?;\n\n\n\n Ok((\n\n IpcServer {\n\n socket: request_socket,\n\n _phantom: PhantomData,\n\n },\n\n IpcSseSender {\n\n socket: sse_sender,\n\n _phantom: PhantomData,\n\n },\n\n ))\n\n}\n", "file_path": "alvr/experiments/ipc/src/lib.rs", "rank": 79, "score": 152168.58686734366 }, { "content": "pub fn ipc_connect<CS, CR, SR>(name: &str) -> StrResult<(IpcClient<CS, CR>, IpcSseReceiver<SR>)> {\n\n let request_socket = trace_err!(LocalSocketStream::connect(format!(\n\n \"/tmp/alvr_{}_request.sock\",\n\n name\n\n )))?;\n\n let sse_socket = trace_err!(trace_err!(LocalSocketListener::bind(format!(\n\n \"/tmp/alvr_{}_sse.sock\",\n\n name\n\n )))?\n\n .accept())?;\n\n\n\n sse_socket.set_nonblocking(true).unwrap();\n\n\n\n Ok((\n\n IpcClient {\n\n socket: request_socket,\n\n _phantom: PhantomData,\n\n },\n\n IpcSseReceiver {\n\n socket: sse_socket,\n", "file_path": "alvr/experiments/ipc/src/lib.rs", "rank": 80, "score": 152168.58686734366 }, { "content": "pub fn version() -> String {\n\n let manifest_path = packages_dir().join(\"common\").join(\"Cargo.toml\");\n\n println!(\"cargo:rerun-if-changed={}\", manifest_path.to_string_lossy());\n\n\n\n let manifest = fs::read_to_string(manifest_path).unwrap();\n\n let (_, version, _) = split_string(&manifest, \"version = \\\"\", '\\\"');\n\n\n\n version\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 81, "score": 152165.97573091785 }, { "content": "pub fn is_steamvr_running() -> bool {\n\n let mut system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n system.refresh_processes();\n\n\n\n !system\n\n .process_by_name(&afs::exec_fname(\"vrserver\"))\n\n .is_empty()\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 82, "score": 149018.215788358 }, { "content": "fn store_session(session: rhai::Dynamic) -> String {\n\n match rhai::serde::from_dynamic(&session) {\n\n Ok(res) => {\n\n *TEMP_SESSION.lock() = res;\n\n TEMP_SESSION_MODIFIED.store(true, Ordering::Relaxed);\n\n\n\n \"\".into()\n\n }\n\n Err(e) => e.to_string(),\n\n }\n\n}\n\n\n", "file_path": "alvr/server/src/dashboard.rs", "rank": 83, "score": 148402.76030566634 }, { "content": "fn remove_client(hostname: &str) {\n\n println!(\"remove_client\");\n\n}\n\n\n", "file_path": "alvr/experiments/gui/src/main.rs", "rank": 84, "score": 146076.68925392322 }, { "content": "fn trust_client(hostname: &str) {\n\n println!(\"trust_client\");\n\n}\n\n\n", "file_path": "alvr/experiments/gui/src/main.rs", "rank": 85, "score": 146076.68925392322 }, { "content": "fn log(message: &str) {\n\n let c_string = CString::new(message).unwrap();\n\n unsafe { drv::_log(c_string.as_ptr()) };\n\n}\n\n\n", "file_path": "alvr/experiments/openvr_driver/src/lib.rs", "rank": 86, "score": 146076.68925392322 }, { "content": "// Use this when there is no way of determining the current path. The reulting Layout paths will\n\n// be invalid, expect for the ones that disregard the relative path (for example the config dir) and\n\n// the ones that have been overridden.\n\npub fn filesystem_layout_from_invalid() -> Layout {\n\n LAYOUT.clone().unwrap_or_else(|| Layout::new(Path::new(\"\")))\n\n}\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 87, "score": 146050.57886855493 }, { "content": "pub fn date_utc_yyyymmdd() -> String {\n\n let output = if cfg!(windows) {\n\n Command::new(\"powershell\")\n\n .arg(\"(Get-Date).ToUniversalTime().ToString(\\\"yyyy.MM.dd\\\")\")\n\n .output()\n\n .unwrap()\n\n } else {\n\n Command::new(\"date\")\n\n .args(&[\"-u\", \"+%Y.%m.%d\"])\n\n .output()\n\n .unwrap()\n\n };\n\n\n\n String::from_utf8_lossy(&output.stdout)\n\n .as_ref()\n\n .to_owned()\n\n .replace('\\r', \"\")\n\n .replace('\\n', \"\")\n\n}\n", "file_path": "alvr/xtask/src/command.rs", "rank": 88, "score": 146044.63839708266 }, { "content": "pub fn check_steamvr_installation() -> bool {\n\n alvr_commands::openvr_source_file_path().is_ok()\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 89, "score": 146044.63839708266 }, { "content": "pub fn build_dir() -> PathBuf {\n\n workspace_dir().join(\"build\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 90, "score": 146044.63839708266 }, { "content": "pub fn deps_dir() -> PathBuf {\n\n workspace_dir().join(\"deps\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 91, "score": 146044.63839708266 }, { "content": "pub fn installer_path() -> PathBuf {\n\n env::temp_dir().join(exec_fname(\"alvr_installer\"))\n\n}\n\n\n\n// Layout of the ALVR installation. All paths are absolute\n\n#[derive(Clone)]\n\npub struct Layout {\n\n // directory containing the launcher executable\n\n pub executables_dir: PathBuf,\n\n // (linux only) directory where alvr_vulkan_layer.so is saved\n\n pub libraries_dir: PathBuf,\n\n // parent directory of resources like the dashboard and presets folders\n\n pub static_resources_dir: PathBuf,\n\n // directory for storing configuration files (session.json)\n\n pub config_dir: PathBuf,\n\n // directory for storing log\n\n pub log_dir: PathBuf,\n\n // directory to register in openVR driver path\n\n pub openvr_driver_root_dir: PathBuf,\n\n // (linux only) parent directory of the executable to wrap vrcompositor\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 92, "score": 146044.63839708266 }, { "content": "pub fn workspace_dir() -> PathBuf {\n\n Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"../..\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 93, "score": 146044.63839708266 }, { "content": "pub fn target_dir() -> PathBuf {\n\n Path::new(env!(\"OUT_DIR\")).join(\"../../../..\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 94, "score": 146044.63839708266 }, { "content": " pub version: u32,\n\n pub raw_instance: ash::Instance,\n\n pub raw_physical_device: vk::PhysicalDevice,\n\n pub raw_device: ash::Device,\n\n pub queue_family_index: u32,\n\n pub queue_index: u32,\n\n pub drop_guard: Option<Box<dyn Any + Send + Sync>>,\n\n}\n\n\n\nimpl GraphicsContext {\n\n // This constructor is used primarily for the vulkan layer. It corresponds to xrCreateSession\n\n // with GraphicsBindingVulkanKHR. If owned == false, this Context must be dropped before\n\n // destroying vk_instance and vk_device.\n\n pub fn from_vulkan(desc: GraphicsContextVulkanInitDesc) -> StrResult<Self> {\n\n let mut flags = hal::InstanceFlags::empty();\n\n if cfg!(debug_assertions) {\n\n flags |= hal::InstanceFlags::VALIDATION;\n\n flags |= hal::InstanceFlags::DEBUG;\n\n };\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 99, "score": 34.65559628069361 } ]
Rust
src/message/commands/clearchat.rs
Chronophylos/twitch-irc-rs
bf9792ebc085c08bb2e7e49e158ef2ae3e4412c6
use crate::message::commands::IRCMessageParseExt; use crate::message::{IRCMessage, ServerMessageParseError}; use chrono::{DateTime, Utc}; use std::convert::TryFrom; use std::str::FromStr; use std::time::Duration; #[derive(Debug, Clone, PartialEq)] pub struct ClearChatMessage { pub channel_login: String, pub channel_id: String, pub action: ClearChatAction, pub server_timestamp: DateTime<Utc>, pub source: IRCMessage, } #[derive(Debug, Clone, PartialEq)] pub enum ClearChatAction { ChatCleared, UserBanned { user_login: String, user_id: String, }, UserTimedOut { user_login: String, user_id: String, timeout_length: Duration, }, } impl TryFrom<IRCMessage> for ClearChatMessage { type Error = ServerMessageParseError; fn try_from(source: IRCMessage) -> Result<ClearChatMessage, ServerMessageParseError> { if source.command != "CLEARCHAT" { return Err(ServerMessageParseError::MismatchedCommand(source)); } let action = match source.params.get(1) { Some(user_login) => { let user_id = source.try_get_nonempty_tag_value("target-user-id")?; let ban_duration = source.try_get_optional_nonempty_tag_value("ban-duration")?; match ban_duration { Some(ban_duration) => { let ban_duration = u64::from_str(ban_duration).map_err(|_| { ServerMessageParseError::MalformedTagValue( source.to_owned(), "ban-duration", ban_duration.to_owned(), ) })?; ClearChatAction::UserTimedOut { user_login: user_login.to_owned(), user_id: user_id.to_owned(), timeout_length: Duration::from_secs(ban_duration), } } None => ClearChatAction::UserBanned { user_login: user_login.to_owned(), user_id: user_id.to_owned(), }, } } None => ClearChatAction::ChatCleared, }; Ok(ClearChatMessage { channel_login: source.try_get_channel_login()?.to_owned(), channel_id: source.try_get_nonempty_tag_value("room-id")?.to_owned(), action, server_timestamp: source.try_get_timestamp("tmi-sent-ts")?, source, }) } } impl From<ClearChatMessage> for IRCMessage { fn from(msg: ClearChatMessage) -> IRCMessage { msg.source } } #[cfg(test)] mod tests { use crate::message::commands::clearchat::ClearChatAction; use crate::message::{ClearChatMessage, IRCMessage}; use chrono::{TimeZone, Utc}; use std::convert::TryFrom; use std::time::Duration; #[test] pub fn test_timeout() { let src = "@ban-duration=1;room-id=11148817;target-user-id=148973258;tmi-sent-ts=1594553828245 :tmi.twitch.tv CLEARCHAT #pajlada :fabzeef"; let irc_message = IRCMessage::parse(src).unwrap(); let msg = ClearChatMessage::try_from(irc_message.clone()).unwrap(); assert_eq!( msg, ClearChatMessage { channel_login: "pajlada".to_owned(), channel_id: "11148817".to_owned(), action: ClearChatAction::UserTimedOut { user_login: "fabzeef".to_owned(), user_id: "148973258".to_owned(), timeout_length: Duration::from_secs(1) }, server_timestamp: Utc.timestamp_millis(1594553828245), source: irc_message } ) } #[test] pub fn test_permaban() { let src = "@room-id=11148817;target-user-id=70948394;tmi-sent-ts=1594561360331 :tmi.twitch.tv CLEARCHAT #pajlada :weeb123"; let irc_message = IRCMessage::parse(src).unwrap(); let msg = ClearChatMessage::try_from(irc_message.clone()).unwrap(); assert_eq!( msg, ClearChatMessage { channel_login: "pajlada".to_owned(), channel_id: "11148817".to_owned(), action: ClearChatAction::UserBanned { user_login: "weeb123".to_owned(), user_id: "70948394".to_owned(), }, server_timestamp: Utc.timestamp_millis(1594561360331), source: irc_message } ) } #[test] pub fn test_chat_clear() { let src = "@room-id=40286300;tmi-sent-ts=1594561392337 :tmi.twitch.tv CLEARCHAT #randers"; let irc_message = IRCMessage::parse(src).unwrap(); let msg = ClearChatMessage::try_from(irc_message.clone()).unwrap(); assert_eq!( msg, ClearChatMessage { channel_login: "randers".to_owned(), channel_id: "40286300".to_owned(), action: ClearChatAction::ChatCleared, server_timestamp: Utc.timestamp_millis(1594561392337), source: irc_message } ) } }
use crate::message::commands::IRCMessageParseExt; use crate::message::{IRCMessage, ServerMessageParseError}; use chrono::{DateTime, Utc}; use std::convert::TryFrom; use std::str::FromStr; use std::time::Duration; #[derive(Debug, Clone, PartialEq)] pub struct ClearChatMessage { pub channel_login: String, pub channel_id: String, pub action: ClearChatAction, pub server_timestamp: DateTime<Utc>, pub source: IRCMessage, } #[derive(Debug, Clone, PartialEq)] pub enum ClearChatAction { ChatCleared, UserBanned { user_login: String, user_id: String, }, UserTimedOut { user_login: String, user_id: String, timeout_length: Duration, }, } impl TryFrom<IRCMessage> for ClearChatMessage { type Error = ServerMessageParseError; fn try_from(source: IRCMessage) -> Result<ClearChatMessage, ServerMessageParseError> { if source.command != "CLEARCHAT" { return Err(ServerMessageParseError::MismatchedCommand(source)); } let action = match source.params.get(1) { Some(user_login) => { let user_id = source.try_get_nonempty_tag_value("target-user-id")?; let ban_duration = source.try_get_optional_nonempty_tag_value("ban-duration")?; match ban_duration { Some(ban_duration) => { let ban_duration = u64::from_str(ban_duration).map_err(|_| { ServerMessageParseError::MalformedTagValue( source.to_owned(), "ban-duration", ban_duration.to_owned(), ) })?; ClearChatAction::UserTimedOut { user_login: user_login.to_owned(), user_id: user_id.to_owned(), timeout_length: Duration::from_secs(ban_duration), } } None => ClearChatAction::UserBanned { user_login: user_login.to_owned(), user_id: user_id.to_owned(), }, } } None => ClearChatAction::ChatCleared, }; Ok(ClearChatMessage { channel_login: source.try_get_channel_login()?.to_owned(), channel_id: source.try_get_nonempty_tag_value("room-id")?.to_owned(), action, server_timestamp: source.try_get_timestamp("tmi-sent-ts")?, source, }) } } impl From<ClearChatMessage> for IRCMessage { fn from(msg: ClearChatMessage) -> IRCMessage { msg.source } } #[cfg(test)] mod tests { use crate::message::commands::clearchat::ClearChatAction; use crate::message::{ClearChatMessage, IRCMessage}; use chrono::{TimeZone, Utc}; use std::convert::TryFrom; use std::time::Duration; #[test]
#[test] pub fn test_permaban() { let src = "@room-id=11148817;target-user-id=70948394;tmi-sent-ts=1594561360331 :tmi.twitch.tv CLEARCHAT #pajlada :weeb123"; let irc_message = IRCMessage::parse(src).unwrap(); let msg = ClearChatMessage::try_from(irc_message.clone()).unwrap(); assert_eq!( msg, ClearChatMessage { channel_login: "pajlada".to_owned(), channel_id: "11148817".to_owned(), action: ClearChatAction::UserBanned { user_login: "weeb123".to_owned(), user_id: "70948394".to_owned(), }, server_timestamp: Utc.timestamp_millis(1594561360331), source: irc_message } ) } #[test] pub fn test_chat_clear() { let src = "@room-id=40286300;tmi-sent-ts=1594561392337 :tmi.twitch.tv CLEARCHAT #randers"; let irc_message = IRCMessage::parse(src).unwrap(); let msg = ClearChatMessage::try_from(irc_message.clone()).unwrap(); assert_eq!( msg, ClearChatMessage { channel_login: "randers".to_owned(), channel_id: "40286300".to_owned(), action: ClearChatAction::ChatCleared, server_timestamp: Utc.timestamp_millis(1594561392337), source: irc_message } ) } }
pub fn test_timeout() { let src = "@ban-duration=1;room-id=11148817;target-user-id=148973258;tmi-sent-ts=1594553828245 :tmi.twitch.tv CLEARCHAT #pajlada :fabzeef"; let irc_message = IRCMessage::parse(src).unwrap(); let msg = ClearChatMessage::try_from(irc_message.clone()).unwrap(); assert_eq!( msg, ClearChatMessage { channel_login: "pajlada".to_owned(), channel_id: "11148817".to_owned(), action: ClearChatAction::UserTimedOut { user_login: "fabzeef".to_owned(), user_id: "148973258".to_owned(), timeout_length: Duration::from_secs(1) }, server_timestamp: Utc.timestamp_millis(1594553828245), source: irc_message } ) }
function_block-full_function
[ { "content": "fn encode_tag_value(raw: &str) -> String {\n\n let mut output = String::with_capacity((raw.len() as f64 * 1.2) as usize);\n\n\n\n for c in raw.chars() {\n\n match c {\n\n ';' => output.push_str(\"\\\\:\"),\n\n ' ' => output.push_str(\"\\\\s\"),\n\n '\\\\' => output.push_str(\"\\\\\\\\\"),\n\n '\\r' => output.push_str(\"\\\\r\"),\n\n '\\n' => output.push_str(\"\\\\n\"),\n\n c => output.push(c),\n\n };\n\n }\n\n\n\n output\n\n}\n\n\n\n/// A map of key-value [IRCv3 tags](https://ircv3.net/specs/extensions/message-tags.html).\n\n///\n\n/// # Examples\n", "file_path": "src/message/tags.rs", "rank": 0, "score": 76830.00017543783 }, { "content": "fn decode_tag_value(raw: &str) -> String {\n\n let mut output = String::with_capacity(raw.len());\n\n\n\n let mut iter = raw.chars();\n\n while let Some(c) = iter.next() {\n\n if c == '\\\\' {\n\n let next_char = iter.next();\n\n match next_char {\n\n Some(':') => output.push(';'), // \\: escapes to ;\n\n Some('s') => output.push(' '), // \\s decodes to a space\n\n Some('\\\\') => output.push('\\\\'), // \\\\ decodes to \\\n\n Some('r') => output.push('\\r'), // \\r decodes to CR\n\n Some('n') => output.push('\\n'), // \\n decodes to LF\n\n Some(c) => output.push(c), // E.g. a\\bc escapes to abc\n\n None => {} // Dangling \\ at the end of the string\n\n }\n\n } else {\n\n // No escape sequence here\n\n output.push(c);\n\n }\n\n }\n\n output\n\n}\n\n\n", "file_path": "src/message/tags.rs", "rank": 1, "score": 76830.00017543783 }, { "content": "/// Anything that can be converted into the raw IRC wire format.\n\npub trait AsRawIRC {\n\n /// Writes the raw IRC message to the given formatter.\n\n fn format_as_raw_irc(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result;\n\n /// Creates a new string with the raw IRC message.\n\n ///\n\n /// The resulting output string is guaranteed to parse to the same value it was created from,\n\n /// but due to protocol ambiguity it is not guaranteed to be identical to the input\n\n /// the value was parsed from (if it was parsed at all).\n\n ///\n\n /// For example, the order of tags might differ, or the use of trailing parameters\n\n /// might be different.\n\n fn as_raw_irc(&self) -> String\n\n where\n\n Self: Sized,\n\n {\n\n format!(\"{}\", RawIRCDisplay(self))\n\n }\n\n}\n\n\n\n/// A protocol-level IRC message, with arbitrary command, parameters, tags and prefix.\n", "file_path": "src/message/mod.rs", "rank": 2, "score": 70855.91333688803 }, { "content": "struct RawIRCDisplay<'a, T: AsRawIRC>(&'a T);\n\n\n\nimpl<'a, T: AsRawIRC> fmt::Display for RawIRCDisplay<'a, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.0.format_as_raw_irc(f)\n\n }\n\n}\n\n\n", "file_path": "src/message/mod.rs", "rank": 3, "score": 58183.89021760483 }, { "content": "#[async_trait]\n\npub trait Transport: Sized + Send + Sync + Debug + 'static {\n\n /// Error type for creating a new connection via `new()`\n\n type ConnectError: Send + Sync + Debug + Display;\n\n /// Error type returned from the `Self::Incoming` stream type.\n\n type IncomingError: Send + Sync + Debug + Display;\n\n /// Error type returned from the `Self::Outgoing` sink type.\n\n type OutgoingError: Send + Sync + Debug + Display;\n\n\n\n /// Type of stream of incoming messages.\n\n type Incoming: FusedStream<Item = Result<IRCMessage, Either<Self::IncomingError, IRCParseError>>>\n\n + Unpin\n\n + Send\n\n + Sync;\n\n /// Type of outgoing messages sink.\n\n type Outgoing: Sink<IRCMessage, Error = Self::OutgoingError> + Unpin + Send + Sync;\n\n\n\n /// Try to create and connect a new `Transport` of this type. Returns `Ok(Self)` after\n\n /// the connection was established successfully.\n\n async fn new() -> Result<Self, Self::ConnectError>;\n\n /// Split this transport into its incoming and outgoing halves (streams).\n\n fn split(self) -> (Self::Incoming, Self::Outgoing);\n\n}\n", "file_path": "src/transport/mod.rs", "rank": 4, "score": 57505.14533679337 }, { "content": "type CommandQueue<T, L> = VecDeque<(IRCMessage, Option<oneshot::Sender<Result<(), Error<T, L>>>>)>;\n", "file_path": "src/connection/event_loop.rs", "rank": 5, "score": 52216.19280797691 }, { "content": "type MessageSender<T, L> =\n\n mpsc::UnboundedSender<(IRCMessage, Option<Sender<Result<(), Error<T, L>>>>)>;\n\n\n", "file_path": "src/connection/event_loop.rs", "rank": 6, "score": 40220.121123954304 }, { "content": "type MessageReceiver<T, L> =\n\n mpsc::UnboundedReceiver<(IRCMessage, Option<Sender<Result<(), Error<T, L>>>>)>;\n", "file_path": "src/connection/event_loop.rs", "rank": 7, "score": 40220.121123954304 }, { "content": "#[cfg(feature = \"refreshing-token\")]\n\n#[async_trait]\n\npub trait TokenStorage: Debug + Send + 'static {\n\n /// Possible error type when trying to load the token from this storage.\n\n type LoadError: Send + Sync + Debug + Display;\n\n /// Possible error type when trying to update the token in this storage.\n\n type UpdateError: Send + Sync + Debug + Display;\n\n\n\n /// Load the currently stored token from the storage.\n\n async fn load_token(&mut self) -> Result<UserAccessToken, Self::LoadError>;\n\n /// Called after the token was updated successfully, to save the new token.\n\n /// After `update_token()` completes, the `load_token()` method should then return\n\n /// that token for future invocations\n\n async fn update_token(&mut self, token: &UserAccessToken) -> Result<(), Self::UpdateError>;\n\n}\n\n\n\n/// Login credentials backed by a token storage and using OAuth refresh tokens, allowing use of OAuth tokens that expire\n\n#[cfg(feature = \"refreshing-token\")]\n\n#[derive(Debug)]\n\npub struct RefreshingLoginCredentials<S: TokenStorage> {\n\n http_client: reqwest::Client,\n\n // TODO we could fetch this using the API, based on the token provided.\n", "file_path": "src/login.rs", "rank": 8, "score": 37906.06976975539 }, { "content": "#[async_trait]\n\npub trait LoginCredentials: Debug + Send + Sync + 'static {\n\n /// Error type that can occur when trying to fetch the credentials.\n\n type Error: Send + Sync + Debug + Display;\n\n\n\n /// Get a fresh set of credentials to be used right-away.\n\n async fn get_credentials(&self) -> Result<CredentialsPair, Self::Error>;\n\n}\n\n\n\n/// Simple `LoginCredentials` implementation that always returns the same `CredentialsPair`\n\n/// and never fails.\n\n#[derive(Debug, Clone)]\n\npub struct StaticLoginCredentials {\n\n /// The credentials that are always returned.\n\n pub credentials: CredentialsPair,\n\n}\n\n\n\nimpl StaticLoginCredentials {\n\n /// Create new static login credentials from the given Twitch login name and OAuth access token.\n\n /// The `token` should be without the `oauth:` prefix.\n\n pub fn new(login: String, token: Option<String>) -> StaticLoginCredentials {\n", "file_path": "src/login.rs", "rank": 9, "score": 36066.704587266024 }, { "content": "use crate::login::LoginCredentials;\n\nuse crate::message::IRCParseError;\n\nuse crate::transport::Transport;\n\nuse std::sync::Arc;\n\nuse thiserror::Error;\n\n\n\n/// Errors that can occur while trying to execute some action on a `TwitchIRCClient`.\n\n#[derive(Error, Debug)]\n\npub enum Error<T: Transport, L: LoginCredentials> {\n\n /// Underlying transport failed to connect\n\n #[error(\"Underlying transport failed to connect: {0}\")]\n\n ConnectError(Arc<T::ConnectError>),\n\n /// Underlying transport failed to connect in time\n\n #[error(\"Underlying transport failed to connect: Connect timed out\")]\n\n ConnectTimeout,\n\n /// Error received from incoming stream of messages\n\n #[error(\"Error received from incoming stream of messages: {0}\")]\n\n IncomingError(Arc<T::IncomingError>),\n\n /// Error received while trying to send message(s) out\n\n #[error(\"Error received while trying to send message(s) out: {0}\")]\n", "file_path": "src/error.rs", "rank": 10, "score": 35195.91850956438 }, { "content": " match self {\n\n Error::ConnectError(e) => Error::ConnectError(Arc::clone(e)),\n\n Error::ConnectTimeout => Error::ConnectTimeout,\n\n Error::IncomingError(e) => Error::IncomingError(Arc::clone(e)),\n\n Error::OutgoingError(e) => Error::OutgoingError(Arc::clone(e)),\n\n Error::IRCParseError(e) => Error::IRCParseError(*e),\n\n Error::LoginError(e) => Error::LoginError(Arc::clone(e)),\n\n Error::ReconnectCmd => Error::ReconnectCmd,\n\n Error::PingTimeout => Error::PingTimeout,\n\n Error::RemoteUnexpectedlyClosedConnection => Error::RemoteUnexpectedlyClosedConnection,\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 11, "score": 35194.82043808886 }, { "content": " OutgoingError(Arc<T::OutgoingError>),\n\n /// Incoming message was not valid IRC\n\n #[error(\"Incoming message was not valid IRC: {0}\")]\n\n IRCParseError(IRCParseError),\n\n /// Failed to get login credentials to log in with\n\n #[error(\"Failed to get login credentials to log in with: {0}\")]\n\n LoginError(Arc<L::Error>),\n\n /// Received RECONNECT command by IRC server\n\n #[error(\"Received RECONNECT command by IRC server\")]\n\n ReconnectCmd,\n\n /// Did not receive a PONG back after sending PING\n\n #[error(\"Did not receive a PONG back after sending PING\")]\n\n PingTimeout,\n\n /// Remote server unexpectedly closed connection\n\n #[error(\"Remote server unexpectedly closed connection\")]\n\n RemoteUnexpectedlyClosedConnection,\n\n}\n\n\n\nimpl<T: Transport, L: LoginCredentials> Clone for Error<T, L> {\n\n fn clone(&self) -> Self {\n", "file_path": "src/error.rs", "rank": 12, "score": 35192.870297919704 }, { "content": "#[enum_dispatch(ConnectionLoopStateMethods<T, L>)]\n\nenum ConnectionLoopState<T: Transport, L: LoginCredentials> {\n\n Initializing(ConnectionLoopInitializingState<T, L>),\n\n Open(ConnectionLoopOpenState<T, L>),\n\n Closed(ConnectionLoopClosedState<T, L>),\n\n}\n\n\n\npub(crate) struct ConnectionLoopWorker<T: Transport, L: LoginCredentials> {\n\n connection_loop_rx: mpsc::UnboundedReceiver<ConnectionLoopCommand<T, L>>,\n\n state: ConnectionLoopState<T, L>,\n\n #[cfg(feature = \"metrics-collection\")]\n\n config: Arc<ClientConfig<L>>,\n\n}\n\n\n\nimpl<T: Transport, L: LoginCredentials> ConnectionLoopWorker<T, L> {\n\n pub fn spawn(\n\n config: Arc<ClientConfig<L>>,\n\n connection_incoming_tx: mpsc::UnboundedSender<ConnectionIncomingMessage<T, L>>,\n\n connection_loop_tx: Weak<mpsc::UnboundedSender<ConnectionLoopCommand<T, L>>>,\n\n connection_loop_rx: mpsc::UnboundedReceiver<ConnectionLoopCommand<T, L>>,\n\n ) {\n", "file_path": "src/connection/event_loop.rs", "rank": 13, "score": 35053.09681698024 }, { "content": "//\n\n// INITIALIZING STATE\n\n//\n\nstruct ConnectionLoopInitializingState<T: Transport, L: LoginCredentials> {\n\n // a list of queued up ConnectionLoopCommand::SendMessage messages\n\n commands_queue: CommandQueue<T, L>,\n\n connection_loop_tx: Weak<mpsc::UnboundedSender<ConnectionLoopCommand<T, L>>>,\n\n connection_incoming_tx: mpsc::UnboundedSender<ConnectionIncomingMessage<T, L>>,\n\n #[cfg(feature = \"metrics-collection\")]\n\n config: Arc<ClientConfig<L>>,\n\n}\n\n\n\nimpl<T: Transport, L: LoginCredentials> ConnectionLoopInitializingState<T, L> {\n\n fn transition_to_closed(self, err: Error<T, L>) -> ConnectionLoopState<T, L> {\n\n log::info!(\"Closing connection, reason: {}\", err);\n\n\n\n for (_message, return_sender) in self.commands_queue.into_iter() {\n\n if let Some(return_sender) = return_sender {\n\n return_sender.send(Err(err.clone())).ok();\n\n }\n\n }\n\n\n\n self.connection_incoming_tx\n", "file_path": "src/connection/event_loop.rs", "rank": 14, "score": 33329.17829378869 }, { "content": "//\n\n// CLOSED STATE.\n\n//\n\nstruct ConnectionLoopClosedState<T: Transport, L: LoginCredentials> {\n\n reason_for_closure: Error<T, L>,\n\n}\n\n\n\nimpl<T: Transport, L: LoginCredentials> ConnectionLoopStateMethods<T, L>\n\n for ConnectionLoopClosedState<T, L>\n\n{\n\n fn send_message(\n\n &mut self,\n\n _message: IRCMessage,\n\n reply_sender: Option<Sender<Result<(), Error<T, L>>>>,\n\n ) {\n\n if let Some(reply_sender) = reply_sender {\n\n reply_sender.send(Err(self.reason_for_closure.clone())).ok();\n\n }\n\n }\n\n\n\n fn on_transport_init_finished(\n\n self,\n\n _init_result: Result<(T, CredentialsPair), Error<T, L>>,\n", "file_path": "src/connection/event_loop.rs", "rank": 15, "score": 33329.17829378869 }, { "content": "//\n\n// OPEN STATE\n\n//\n\nstruct ConnectionLoopOpenState<T: Transport, L: LoginCredentials> {\n\n connection_incoming_tx: mpsc::UnboundedSender<ConnectionIncomingMessage<T, L>>,\n\n outgoing_messages_tx: MessageSender<T, L>,\n\n pong_received: bool,\n\n /// To kill the background pinger and forward tasks when this gets dropped.\n\n /// These fields are wrapped in `Option` so we can use `take()` in the Drop implementation.\n\n kill_incoming_loop_tx: Option<oneshot::Sender<()>>,\n\n kill_pinger_tx: Option<oneshot::Sender<()>>,\n\n #[cfg(feature = \"metrics-collection\")]\n\n config: Arc<ClientConfig<L>>,\n\n}\n\n\n\nimpl<T: Transport, L: LoginCredentials> ConnectionLoopOpenState<T, L> {\n\n fn transition_to_closed(self, cause: Error<T, L>) -> ConnectionLoopState<T, L> {\n\n log::info!(\"Closing connection, cause: {}\", cause);\n\n\n\n self.connection_incoming_tx\n\n .send(ConnectionIncomingMessage::StateClosed {\n\n cause: cause.clone(),\n\n })\n", "file_path": "src/connection/event_loop.rs", "rank": 16, "score": 33329.17829378869 }, { "content": "pub use commands::userstate::UserStateMessage;\n\npub use commands::whisper::WhisperMessage;\n\npub use commands::{ServerMessage, ServerMessageParseError};\n\npub use prefix::IRCPrefix;\n\npub use tags::IRCTags;\n\npub use twitch::*;\n\n\n\nuse itertools::Itertools;\n\nuse std::fmt;\n\nuse std::fmt::Write;\n\nuse thiserror::Error;\n\n\n\n/// Error while parsing a string into an `IRCMessage`.\n\n#[derive(Debug, Clone, Copy, PartialEq, Error)]\n\npub enum IRCParseError {\n\n /// No space found after tags (no command/prefix)\n\n #[error(\"No space found after tags (no command/prefix)\")]\n\n NoSpaceAfterTags,\n\n /// No tags after @ sign\n\n #[error(\"No tags after @ sign\")]\n", "file_path": "src/message/mod.rs", "rank": 17, "score": 31758.297399961888 }, { "content": "//! Generic and Twitch-specific IRC messages.\n\n\n\npub(crate) mod commands;\n\npub(crate) mod prefix;\n\npub(crate) mod tags;\n\npub(crate) mod twitch;\n\n\n\npub use commands::clearchat::{ClearChatAction, ClearChatMessage};\n\npub use commands::clearmsg::ClearMsgMessage;\n\npub use commands::globaluserstate::GlobalUserStateMessage;\n\npub use commands::hosttarget::{HostTargetAction, HostTargetMessage};\n\npub use commands::join::JoinMessage;\n\npub use commands::notice::NoticeMessage;\n\npub use commands::part::PartMessage;\n\npub use commands::ping::PingMessage;\n\npub use commands::pong::PongMessage;\n\npub use commands::privmsg::PrivmsgMessage;\n\npub use commands::reconnect::ReconnectMessage;\n\npub use commands::roomstate::RoomStateMessage;\n\npub use commands::usernotice::{SubGiftPromo, UserNoticeEvent, UserNoticeMessage};\n", "file_path": "src/message/mod.rs", "rank": 18, "score": 31758.05641554961 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use maplit::hashmap;\n\n\n\n #[test]\n\n fn test_privmsg() {\n\n let source = \"@rm-received-ts=1577040815136;historical=1;badge-info=subscriber/16;badges=moderator/1,subscriber/12;color=#19E6E6;display-name=randers;emotes=;flags=;id=6e2ccb1f-01ed-44d0-85b6-edf762524475;mod=1;room-id=11148817;subscriber=1;tmi-sent-ts=1577040814959;turbo=0;user-id=40286300;user-type=mod :randers!randers@randers.tmi.twitch.tv PRIVMSG #pajlada :Pajapains\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {\n\n \"display-name\".to_owned() => Some(\"randers\".to_owned()),\n", "file_path": "src/message/mod.rs", "rank": 19, "score": 31754.145062188712 }, { "content": "pub mod event_loop;\n\n\n\nuse crate::config::ClientConfig;\n\nuse crate::connection::event_loop::{ConnectionLoopCommand, ConnectionLoopWorker};\n\nuse crate::error::Error;\n\nuse crate::login::LoginCredentials;\n\nuse crate::message::commands::ServerMessage;\n\nuse crate::transport::Transport;\n\nuse std::sync::Arc;\n\nuse tokio::sync::mpsc;\n\n\n\n#[derive(Debug)]\n\npub enum ConnectionIncomingMessage<T: Transport, L: LoginCredentials> {\n\n IncomingMessage(ServerMessage),\n\n #[cfg(feature = \"metrics-collection\")]\n\n StateOpen,\n\n StateClosed {\n\n cause: Error<T, L>,\n\n },\n\n}\n", "file_path": "src/connection/mod.rs", "rank": 20, "score": 31753.999121395947 }, { "content": "#[cfg(feature = \"transport-tcp\")]\n\npub mod tcp;\n\n#[cfg(feature = \"transport-wss\")]\n\npub mod websocket;\n\n\n\nuse crate::message::{IRCMessage, IRCParseError};\n\nuse async_trait::async_trait;\n\nuse futures_util::{sink::Sink, stream::FusedStream};\n\nuse itertools::Either;\n\nuse std::fmt::{Debug, Display};\n\n\n\n/// Abstracts over different ways of connecting to Twitch Chat, which are currently\n\n/// plain IRC (TCP), and the Twitch-specific WebSocket extension.\n\n#[async_trait]\n", "file_path": "src/transport/mod.rs", "rank": 21, "score": 31753.926387246123 }, { "content": " command: \"PING\".to_owned(),\n\n params: vec![\"\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_numeric_command() {\n\n let source = \"500 :Internal Server Error\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: None,\n\n command: \"500\".to_owned(),\n\n params: vec![\"Internal Server Error\".to_owned()],\n\n }\n\n );\n", "file_path": "src/message/mod.rs", "rank": 22, "score": 31753.64754481889 }, { "content": "\n\nimpl IRCMessage {\n\n /// Create a new `IRCMessage` with just a command and parameters, similar to the\n\n /// `irc!` macro.\n\n pub fn new_simple(command: String, params: Vec<String>) -> IRCMessage {\n\n IRCMessage {\n\n tags: IRCTags::new(),\n\n prefix: None,\n\n command,\n\n params,\n\n }\n\n }\n\n\n\n /// Create a new `IRCMessage` by specifying all fields.\n\n pub fn new(\n\n tags: IRCTags,\n\n prefix: Option<IRCPrefix>,\n\n command: String,\n\n params: Vec<String>,\n\n ) -> IRCMessage {\n", "file_path": "src/message/mod.rs", "rank": 23, "score": 31753.212728376737 }, { "content": "mod event_loop;\n\nmod pool_connection;\n\n\n\nuse crate::client::event_loop::{ClientLoopCommand, ClientLoopWorker};\n\nuse crate::config::ClientConfig;\n\nuse crate::error::Error;\n\nuse crate::irc;\n\nuse crate::login::LoginCredentials;\n\nuse crate::message::commands::ServerMessage;\n\nuse crate::message::IRCMessage;\n\nuse crate::message::{IRCTags, PrivmsgMessage};\n\nuse crate::transport::Transport;\n\nuse std::collections::HashSet;\n\nuse std::sync::Arc;\n\nuse tokio::sync::{mpsc, oneshot};\n\n\n\n/// A send-only handle to control the Twitch IRC Client.\n\n#[derive(Debug)]\n\npub struct TwitchIRCClient<T: Transport, L: LoginCredentials> {\n\n // we use an Arc<>.\n", "file_path": "src/client/mod.rs", "rank": 24, "score": 31753.133133548803 }, { "content": " /// Behaves the same as `say()` when `reply_to_id` is None, but tags the original message and it's sender if specified.\n\n pub async fn say_in_response(\n\n &self,\n\n channel_login: String,\n\n message: String,\n\n reply_to_id: Option<String>,\n\n ) -> Result<(), Error<T, L>> {\n\n let mut tags = IRCTags::new();\n\n\n\n if let Some(id) = reply_to_id {\n\n tags.0.insert(\"reply-parent-msg-id\".to_string(), Some(id));\n\n }\n\n\n\n let irc_message = IRCMessage::new(\n\n tags,\n\n None,\n\n \"PRIVMSG\".to_string(),\n\n vec![format!(\"#{}\", channel_login), format!(\". {}\", message)], // The prefixed \".\" prevents commands from being executed\n\n );\n\n self.send_message(irc_message).await\n", "file_path": "src/client/mod.rs", "rank": 25, "score": 31752.62246233603 }, { "content": " pub async fn privmsg(&self, channel_login: String, message: String) -> Result<(), Error<T, L>> {\n\n self.send_message(irc![\"PRIVMSG\", format!(\"#{}\", channel_login), message])\n\n .await\n\n }\n\n\n\n /// Say a chat message in the given Twitch channel.\n\n ///\n\n /// This method automatically prevents commands from being executed. For example\n\n /// `say(\"a_channel\", \"/ban a_user\") would not actually ban a user, instead it would\n\n /// send that exact message as a normal chat message instead.\n\n ///\n\n /// No particular filtering is performed on the message. If the message is too long for chat,\n\n /// it will not be cut short or split into multiple messages (what happens is determined\n\n /// by the behaviour of the Twitch IRC server).\n\n pub async fn say(&self, channel_login: String, message: String) -> Result<(), Error<T, L>> {\n\n self.say_in_response(channel_login, message, None).await\n\n }\n\n\n\n /// Say a chat message in the given Twitch channel, but send it as a response to another message if `reply_to_id` is specified.\n\n ///\n", "file_path": "src/client/mod.rs", "rank": 26, "score": 31751.622658225293 }, { "content": " }\n\n\n\n /// Replies to a given `PrivmsgMessage`, tagging the original message and it's sender.\n\n ///\n\n /// Similarly to `say()`, this method strips the message of executing commands, but does not filter out messages which are too long.\n\n /// Refer to `say()` for the exact behaviour.\n\n pub async fn reply_to_privmsg(\n\n &self,\n\n message: String,\n\n reply_to: &PrivmsgMessage,\n\n ) -> Result<(), Error<T, L>> {\n\n self.say_in_response(\n\n reply_to.channel_login.clone(),\n\n message,\n\n Some(reply_to.message_id.clone()),\n\n )\n\n .await\n\n }\n\n\n\n /// Join the given Twitch channel (When a channel is joined, the client will receive messages\n", "file_path": "src/client/mod.rs", "rank": 27, "score": 31751.102860979972 }, { "content": " assert_eq!(result, Err(IRCParseError::TooManySpacesInMiddleParams))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_middle_params_too_many_spaces_after_command() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv PING asd def\");\n\n assert_eq!(result, Err(IRCParseError::TooManySpacesInMiddleParams))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_middle_params_trailing_space() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv PING asd def \");\n\n assert_eq!(result, Err(IRCParseError::TooManySpacesInMiddleParams))\n\n }\n\n\n\n #[test]\n\n fn test_empty_trailing_param_1() {\n\n let source = \"PING asd def :\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n", "file_path": "src/message/mod.rs", "rank": 28, "score": 31751.089049464932 }, { "content": " message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: None,\n\n command: \"PING\".to_owned(),\n\n params: vec![\"asd\".to_owned(), \"def\".to_owned(), \"\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_empty_trailing_param_2() {\n\n let source = \"PING :\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: None,\n", "file_path": "src/message/mod.rs", "rank": 29, "score": 31750.91337836874 }, { "content": " IRCMessage {\n\n tags,\n\n prefix,\n\n command,\n\n params,\n\n }\n\n }\n\n\n\n /// Parse a raw IRC wire-format message into an `IRCMessage`. `source` should be specified\n\n /// without trailing newline character(s).\n\n pub fn parse(mut source: &str) -> Result<IRCMessage, IRCParseError> {\n\n if source.chars().any(|c| c == '\\r' || c == '\\n') {\n\n return Err(IRCParseError::NewlinesInMessage);\n\n }\n\n\n\n let tags = if source.starts_with('@') {\n\n // str[1..] removes the leading @ sign\n\n let (tags_part, remainder) = source[1..]\n\n .splitn(2, ' ')\n\n .next_tuple()\n", "file_path": "src/message/mod.rs", "rank": 30, "score": 31750.72438128307 }, { "content": " assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_stringify_pass() {\n\n assert_eq!(\n\n irc![\"PASS\", \"oauth:9892879487293847\"].as_raw_irc(),\n\n \"PASS oauth:9892879487293847\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_newline_in_source() {\n\n assert_eq!(\n\n IRCMessage::parse(\"abc\\ndef\"),\n\n Err(IRCParseError::NewlinesInMessage)\n\n );\n\n assert_eq!(\n\n IRCMessage::parse(\"abc\\rdef\"),\n\n Err(IRCParseError::NewlinesInMessage)\n", "file_path": "src/message/mod.rs", "rank": 31, "score": 31750.610965431624 }, { "content": " let source = \"@a=b;c=32;k;rt=ql7 foo\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {\n\n \"a\".to_owned() => Some(\"b\".to_owned()),\n\n \"c\".to_owned() => Some(\"32\".to_owned()),\n\n \"k\".to_owned() => None,\n\n \"rt\".to_owned() => Some(\"ql7\".to_owned())\n\n }),\n\n prefix: None,\n\n command: \"FOO\".to_owned(),\n\n params: vec![],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n", "file_path": "src/message/mod.rs", "rank": 32, "score": 31750.259175030784 }, { "content": " prefix: Some(IRCPrefix::HostOnly {\n\n host: \"tmi.twitch.tv\".to_owned()\n\n }),\n\n command: \"PRIVMSG\".to_owned(),\n\n params: vec![\"#pajlada\".to_owned(), \"test\".to_owned(),],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_ping_1() {\n\n let source = \"PING :tmi.twitch.tv\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: None,\n\n command: \"PING\".to_owned(),\n", "file_path": "src/message/mod.rs", "rank": 33, "score": 31750.226273999 }, { "content": " );\n\n assert_eq!(\n\n IRCMessage::parse(\"abc\\n\\rdef\"),\n\n Err(IRCParseError::NewlinesInMessage)\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_lowercase_command() {\n\n assert_eq!(IRCMessage::parse(\"ping\").unwrap().command, \"PING\")\n\n }\n\n\n\n #[test]\n\n fn test_irc_macro() {\n\n assert_eq!(\n\n irc![\"PRIVMSG\"],\n\n IRCMessage {\n\n tags: IRCTags::new(),\n\n prefix: None,\n\n command: \"PRIVMSG\".to_owned(),\n", "file_path": "src/message/mod.rs", "rank": 34, "score": 31750.21362008463 }, { "content": " fn test_pure_irc_7() {\n\n let source = \"@a=b\\\\\\\\and\\\\nk;c=72\\\\s45;d=gh\\\\:764 foo\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {\n\n \"a\".to_owned() => Some(\"b\\\\and\\nk\".to_owned()),\n\n \"c\".to_owned() => Some(\"72 45\".to_owned()),\n\n \"d\".to_owned() => Some(\"gh;764\".to_owned()),\n\n }),\n\n prefix: None,\n\n command: \"FOO\".to_owned(),\n\n params: vec![],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n", "file_path": "src/message/mod.rs", "rank": 35, "score": 31750.1204254522 }, { "content": " assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_vendor_tags() {\n\n let source = \"@tag1=value1;tag2;vendor1/tag3=value2;vendor2/tag4 :irc.example.com COMMAND param1 param2 :param3 param3\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {\n\n \"tag1\".to_owned() => Some(\"value1\".to_owned()),\n\n \"tag2\".to_owned() => None,\n\n \"vendor1/tag3\".to_owned() => Some(\"value2\".to_owned()),\n\n \"vendor2/tag4\".to_owned() => None\n\n }),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"irc.example.com\".to_owned()\n\n }),\n\n command: \"COMMAND\".to_owned(),\n", "file_path": "src/message/mod.rs", "rank": 36, "score": 31749.81315742317 }, { "content": " if prefix_part.is_empty() {\n\n return Err(IRCParseError::EmptyPrefixDeclaration);\n\n }\n\n\n\n Some(IRCPrefix::parse(prefix_part))\n\n } else {\n\n None\n\n };\n\n\n\n let mut command_split = source.splitn(2, ' ');\n\n let mut command = command_split.next().unwrap().to_owned();\n\n command.make_ascii_uppercase();\n\n\n\n if command.is_empty()\n\n || !command.chars().all(|c| c.is_ascii_alphabetic())\n\n && !command.chars().all(|c| c.is_ascii() && c.is_numeric())\n\n {\n\n return Err(IRCParseError::MalformedCommand);\n\n }\n\n\n", "file_path": "src/message/mod.rs", "rank": 37, "score": 31749.269170000473 }, { "content": " params: vec![\"bar\".to_owned(), \"baz\".to_owned(), \"asdf\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_pure_irc_1() {\n\n let source = \"foo bar baz ::asdf\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: None,\n\n command: \"FOO\".to_owned(),\n\n params: vec![\"bar\".to_owned(), \"baz\".to_owned(), \":asdf\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n", "file_path": "src/message/mod.rs", "rank": 38, "score": 31749.25863867424 }, { "content": "///\n\n/// See [RFC 2812, section 2.3.1](https://tools.ietf.org/html/rfc2812#section-2.3.1)\n\n/// for the message format that this is based on.\n\n/// Further, this implements [IRCv3 tags](https://ircv3.net/specs/extensions/message-tags.html).\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct IRCMessage {\n\n /// A map of additional key-value tags on this message.\n\n pub tags: IRCTags,\n\n /// The \"prefix\" of this message, as defined by RFC 2812. Typically specifies the sending\n\n /// server and/or user.\n\n pub prefix: Option<IRCPrefix>,\n\n /// A command like `PRIVMSG` or `001` (see RFC 2812 for the definition).\n\n pub command: String,\n\n /// A list of parameters on this IRC message. See RFC 2812 for the definition.\n\n ///\n\n /// Middle parameters and trailing parameters are treated the same here, and as long as\n\n /// there are no spaces in the last parameter, there is no way to tell if that parameter\n\n /// was a middle or trailing parameter when it was parsed.\n\n pub params: Vec<String>,\n\n}\n", "file_path": "src/message/mod.rs", "rank": 39, "score": 31749.23107769766 }, { "content": " fn test_pure_irc_8() {\n\n let source = \"@c;h=;a=b :quux ab cd\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {\n\n \"c\".to_owned() => None,\n\n \"h\".to_owned() => Some(\"\".to_owned()),\n\n \"a\".to_owned() => Some(\"b\".to_owned()),\n\n }),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"quux\".to_owned()\n\n }),\n\n command: \"AB\".to_owned(),\n\n params: vec![\"cd\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n", "file_path": "src/message/mod.rs", "rank": 40, "score": 31748.997408818952 }, { "content": " #[test]\n\n fn test_invalid_empty_command_2() {\n\n let result = IRCMessage::parse(\"\");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_command_1() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv PING\");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_command_2() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv P!NG\");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_command_3() {\n", "file_path": "src/message/mod.rs", "rank": 41, "score": 31748.77611342594 }, { "content": " .ok_or(IRCParseError::NoSpaceAfterTags)?;\n\n source = remainder;\n\n\n\n if tags_part.is_empty() {\n\n return Err(IRCParseError::EmptyTagsDeclaration);\n\n }\n\n\n\n IRCTags::parse(tags_part)\n\n } else {\n\n IRCTags::new()\n\n };\n\n\n\n let prefix = if source.starts_with(':') {\n\n // str[1..] removes the leading : sign\n\n let (prefix_part, remainder) = source[1..]\n\n .splitn(2, ' ')\n\n .next_tuple()\n\n .ok_or(IRCParseError::NoSpaceAfterPrefix)?;\n\n source = remainder;\n\n\n", "file_path": "src/message/mod.rs", "rank": 42, "score": 31748.67481346023 }, { "content": " );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_invalid_empty_tags() {\n\n let result = IRCMessage::parse(\"@ :tmi.twitch.tv TEST\");\n\n assert_eq!(result, Err(IRCParseError::EmptyTagsDeclaration))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_nothing_after_tags() {\n\n let result = IRCMessage::parse(\"@key=value\");\n\n assert_eq!(result, Err(IRCParseError::NoSpaceAfterTags))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_empty_prefix() {\n\n let result = IRCMessage::parse(\"@key=value : TEST\");\n\n assert_eq!(result, Err(IRCParseError::EmptyPrefixDeclaration))\n", "file_path": "src/message/mod.rs", "rank": 43, "score": 31748.64465108888 }, { "content": " }\n\n\n\n #[test]\n\n fn test_invalid_nothing_after_prefix() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv\");\n\n assert_eq!(result, Err(IRCParseError::NoSpaceAfterPrefix))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_spaces_at_start_of_line() {\n\n let result = IRCMessage::parse(\" @key=value :tmi.twitch.tv PING\");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_empty_command_1() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv \");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n", "file_path": "src/message/mod.rs", "rank": 44, "score": 31748.497931511734 }, { "content": "\n\n /// Part (leave) a channel, to stop receiving messages sent to that channel.\n\n ///\n\n /// This has the same semantics as `join()`. Similarly, a `part()` call will have no effect\n\n /// if the channel is not currently joined.\n\n pub fn part(&self, channel_login: String) {\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::Part { channel_login })\n\n .unwrap();\n\n }\n\n\n\n /// Ping a random connection. This does not await the `PONG` response from Twitch.\n\n /// The future resolves once the `PING` command is sent to the wire.\n\n /// An error is returned in case the message could not be sent over the picked connection.\n\n pub async fn ping(&self) -> Result<(), Error<T, L>> {\n\n let (return_tx, return_rx) = oneshot::channel();\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::Ping {\n\n return_sender: return_tx,\n\n })\n\n .unwrap();\n\n // unwrap: ClientLoopWorker should not die before all sender handles have been dropped\n\n return_rx.await.unwrap()\n\n }\n\n}\n", "file_path": "src/client/mod.rs", "rank": 45, "score": 31748.202762751345 }, { "content": " #[test]\n\n fn test_pure_irc_5() {\n\n let source = \":coolguy foo bar baz : \";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"coolguy\".to_owned()\n\n }),\n\n command: \"FOO\".to_owned(),\n\n params: vec![\"bar\".to_owned(), \"baz\".to_owned(), \" \".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_pure_irc_6() {\n", "file_path": "src/message/mod.rs", "rank": 46, "score": 31748.028825254838 }, { "content": "\n\n #[test]\n\n fn test_join_1() {\n\n let source = \":src JOIN #chan\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"src\".to_owned()\n\n }),\n\n command: \"JOIN\".to_owned(),\n\n params: vec![\"#chan\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n", "file_path": "src/message/mod.rs", "rank": 47, "score": 31747.997628000834 }, { "content": " let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv PØNG\");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_command_4() {\n\n // mix of ascii numeric and ascii alphabetic\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv P1NG\");\n\n assert_eq!(result, Err(IRCParseError::MalformedCommand))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_middle_params_space_after_command() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv PING \");\n\n assert_eq!(result, Err(IRCParseError::TooManySpacesInMiddleParams))\n\n }\n\n\n\n #[test]\n\n fn test_invalid_middle_params_too_many_spaces_between_params() {\n\n let result = IRCMessage::parse(\"@key=value :tmi.twitch.tv PING asd def\");\n", "file_path": "src/message/mod.rs", "rank": 48, "score": 31747.947157744034 }, { "content": " fn test_join_2() {\n\n assert_eq!(\n\n IRCMessage::parse(\":src JOIN #chan\"),\n\n IRCMessage::parse(\":src JOIN :#chan\"),\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_away_1() {\n\n let source = \":src AWAY\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"src\".to_owned()\n\n }),\n\n command: \"AWAY\".to_owned(),\n\n params: vec![],\n", "file_path": "src/message/mod.rs", "rank": 49, "score": 31747.9261679371 }, { "content": " params: vec![\n\n \"param1\".to_owned(),\n\n \"param2\".to_owned(),\n\n \"param3 param3\".to_owned()\n\n ],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_asian_characters_display_name() {\n\n let source = \"@display-name=테스트계정420 :tmi.twitch.tv PRIVMSG #pajlada :test\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {\n\n \"display-name\".to_owned() => Some(\"테스트계정420\".to_owned()),\n\n }),\n", "file_path": "src/message/mod.rs", "rank": 50, "score": 31747.787255871972 }, { "content": "\n\n/// Allows quick creation of simple IRC messages using a command and optional parameters.\n\n///\n\n/// The given command and parameters have to implement `From<T> for String` if they are not\n\n/// already of type `String`.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use twitch_irc::irc;\n\n/// use twitch_irc::message::AsRawIRC;\n\n///\n\n/// # fn main() {\n\n/// let msg = irc![\"PRIVMSG\", \"#sodapoppin\", \"Hello guys!\"];\n\n///\n\n/// assert_eq!(msg.command, \"PRIVMSG\");\n\n/// assert_eq!(msg.params, vec![\"#sodapoppin\".to_owned(), \"Hello guys!\".to_owned()]);\n\n/// assert_eq!(msg.as_raw_irc(), \"PRIVMSG #sodapoppin :Hello guys!\");\n\n/// # }\n\n/// ```\n", "file_path": "src/message/mod.rs", "rank": 51, "score": 31747.68060418155 }, { "content": " }\n\n\n\n #[test]\n\n fn test_pure_irc_2() {\n\n let source = \":coolguy foo bar baz : asdf quux \";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"coolguy\".to_owned()\n\n }),\n\n command: \"FOO\".to_owned(),\n\n params: vec![\n\n \"bar\".to_owned(),\n\n \"baz\".to_owned(),\n\n \" asdf quux \".to_owned()\n\n ],\n\n }\n", "file_path": "src/message/mod.rs", "rank": 52, "score": 31747.595674425684 }, { "content": " }\n\n\n\n #[test]\n\n fn test_pure_irc_4() {\n\n let source = \":coolguy foo bar baz :\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"coolguy\".to_owned()\n\n }),\n\n command: \"FOO\".to_owned(),\n\n params: vec![\"bar\".to_owned(), \"baz\".to_owned(), \"\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n", "file_path": "src/message/mod.rs", "rank": 53, "score": 31747.364733508588 }, { "content": " }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_away_2() {\n\n let source = \":cool\\tguy foo bar baz\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"cool\\tguy\".to_owned()\n\n }),\n\n command: \"FOO\".to_owned(),\n\n params: vec![\"bar\".to_owned(), \"baz\".to_owned()],\n\n }\n\n );\n", "file_path": "src/message/mod.rs", "rank": 54, "score": 31747.364733508588 }, { "content": " params: vec![\"tmi.twitch.tv\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_ping_2() {\n\n let source = \":tmi.twitch.tv PING\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"tmi.twitch.tv\".to_owned()\n\n }),\n\n command: \"PING\".to_owned(),\n\n params: vec![],\n\n }\n", "file_path": "src/message/mod.rs", "rank": 55, "score": 31747.290991913716 }, { "content": " }),\n\n command: \"PRIVMSG\".to_owned(),\n\n params: vec![\"#pajlada\".to_owned(), \"Pajapains\".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_confusing_prefix_trailing_param() {\n\n let source = \":coolguy foo bar baz asdf\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"coolguy\".to_owned()\n\n }),\n\n command: \"FOO\".to_owned(),\n", "file_path": "src/message/mod.rs", "rank": 56, "score": 31747.183242894764 }, { "content": " );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_pure_irc_3() {\n\n let source = \":coolguy PRIVMSG bar :lol :) \";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::HostOnly {\n\n host: \"coolguy\".to_owned()\n\n }),\n\n command: \"PRIVMSG\".to_owned(),\n\n params: vec![\"bar\".to_owned(), \"lol :) \".to_owned()],\n\n }\n\n );\n\n assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n", "file_path": "src/message/mod.rs", "rank": 57, "score": 31747.044678499988 }, { "content": " \"twitch_irc_messages_received\",\n\n \"Counts all incoming messages\",\n\n \"client\" => metrics_identifier.clone().into_owned()\n\n );\n\n metrics::register_counter!(\n\n \"twitch_irc_messages_sent\",\n\n \"Counts all outgoing messages\",\n\n \"client\" => metrics_identifier.clone().into_owned()\n\n );\n\n metrics::register_gauge!(\n\n \"twitch_irc_channels\",\n\n \"Number of joined channels\",\n\n \"client\" => metrics_identifier.clone().into_owned()\n\n );\n\n metrics::register_gauge!(\n\n \"twitch_irc_connections\",\n\n \"Number of connections in use by this client\",\n\n \"client\" => metrics_identifier.clone().into_owned()\n\n );\n\n metrics::register_counter!(\n", "file_path": "src/client/mod.rs", "rank": 58, "score": 31747.031309628073 }, { "content": " assert_eq!(IRCMessage::parse(&message.as_raw_irc()).unwrap(), message);\n\n }\n\n\n\n #[test]\n\n fn test_complex_prefix() {\n\n let source = \":coolguy!~ag@n\\u{0002}et\\u{0003}05w\\u{000f}ork.admin PRIVMSG foo :bar baz\";\n\n let message = IRCMessage::parse(source).unwrap();\n\n assert_eq!(\n\n message,\n\n IRCMessage {\n\n tags: IRCTags::from(hashmap! {}),\n\n prefix: Some(IRCPrefix::Full {\n\n nick: \"coolguy\".to_owned(),\n\n user: Some(\"~ag\".to_owned()),\n\n host: Some(\"n\\u{0002}et\\u{0003}05w\\u{000f}ork.admin\".to_owned())\n\n }),\n\n command: \"PRIVMSG\".to_owned(),\n\n params: vec![\"foo\".to_owned(), \"bar baz\".to_owned()],\n\n }\n\n );\n", "file_path": "src/message/mod.rs", "rank": 59, "score": 31746.911624980014 }, { "content": " pub fn set_wanted_channels(&self, channels: HashSet<String>) {\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::SetWantedChannels { channels })\n\n .unwrap();\n\n }\n\n\n\n /// Query the client for what status a certain channel is in.\n\n ///\n\n /// Returns two booleans: The first indicates whether a channel is `wanted`. This is true\n\n /// if the last operation for this channel was a `join()` method, or alternatively whether\n\n /// it was included in the set of channels in a `set_wanted_channels` call.\n\n ///\n\n /// The second boolean indicates whether this channel is currently joined server-side.\n\n /// (This is purely based on `JOIN` and `PART` messages being received from the server).\n\n ///\n\n /// Note that any combination of `true` and `false` is possible here.\n\n ///\n\n /// For example, `(true, false)` could indicate that the `JOIN` message to join this channel is currently\n\n /// being sent or already sent, but no response confirming the `JOIN` has been received yet.\n\n /// **Note this status can also mean that the server did not answer the `JOIN` request because\n", "file_path": "src/client/mod.rs", "rank": 60, "score": 31746.238292458893 }, { "content": " // the client loop has to also hold a handle to this sender to be able to feed itself\n\n // with commands as well. (e.g. to rejoin channels)\n\n // the client loop gets a Weak<> (a weak reference) and this client holds strong\n\n // references. That means when the last client handle is dropped, the client loop\n\n // exits, because the underlying mpsc::UnboundedSender will be dropped.\n\n // The client will then also no longer be able to send \"itself\" messages, because\n\n // it always only holds a Weak<> and has to check whether the weak reference is still\n\n // valid before sending itself messages.\n\n client_loop_tx: Arc<mpsc::UnboundedSender<ClientLoopCommand<T, L>>>,\n\n}\n\n\n\n// we have to implement Debug and Clone manually, the derive macro places\n\n// the requirement `T: Clone` which we cannot currently satisfy and don't need\n\nimpl<T: Transport, L: LoginCredentials> Clone for TwitchIRCClient<T, L> {\n\n fn clone(&self) -> Self {\n\n TwitchIRCClient {\n\n client_loop_tx: self.client_loop_tx.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/client/mod.rs", "rank": 61, "score": 31746.22686170972 }, { "content": "\n\npub(crate) struct Connection<T: Transport, L: LoginCredentials> {\n\n /// sends commands to the this connection's event loop.\n\n pub connection_loop_tx: Arc<mpsc::UnboundedSender<ConnectionLoopCommand<T, L>>>,\n\n}\n\n\n\nimpl<T: Transport, L: LoginCredentials> Connection<T, L> {\n\n /// makes a tuple with the incoming messages and the `Connection` handle for outgoing\n\n /// messages.\n\n pub fn new(\n\n config: Arc<ClientConfig<L>>,\n\n ) -> (\n\n mpsc::UnboundedReceiver<ConnectionIncomingMessage<T, L>>,\n\n Connection<T, L>,\n\n ) {\n\n let (connection_loop_tx, connection_loop_rx) = mpsc::unbounded_channel();\n\n let (connection_incoming_tx, connection_incoming_rx) = mpsc::unbounded_channel();\n\n let connection_loop_tx = Arc::new(connection_loop_tx);\n\n\n\n ConnectionLoopWorker::spawn(\n", "file_path": "src/connection/mod.rs", "rank": 62, "score": 31745.77663905316 }, { "content": " /// Send an arbitrary IRC message to one of the connections in the connection pool.\n\n ///\n\n /// An error is returned in case the message could not be sent over the picked connection.\n\n pub async fn send_message(&self, message: IRCMessage) -> Result<(), Error<T, L>> {\n\n let (return_tx, return_rx) = oneshot::channel();\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::SendMessage {\n\n message,\n\n return_sender: return_tx,\n\n })\n\n .unwrap();\n\n // unwrap: ClientLoopWorker should not die before all sender handles have been dropped\n\n return_rx.await.unwrap()\n\n }\n\n\n\n /// Send a `PRIVMSG`-type IRC message to a Twitch channel. The `message` can be a normal\n\n /// chat message or a chat command like `/ban` or similar.\n\n ///\n\n /// If you want to just send a normal chat message, `say()` should be preferred since it\n\n /// prevents commands like `/ban` from accidentally being executed.\n", "file_path": "src/client/mod.rs", "rank": 63, "score": 31745.30617475969 }, { "content": " let mut params;\n\n if let Some(params_part) = command_split.next() {\n\n params = vec![];\n\n\n\n let mut rest = Some(params_part);\n\n while let Some(rest_str) = rest {\n\n if let Some(sub_str) = rest_str.strip_prefix(':') {\n\n // trailing param, remove : and consume the rest of the input\n\n params.push(sub_str.to_owned());\n\n rest = None;\n\n } else {\n\n let mut split = rest_str.splitn(2, ' ');\n\n let param = split.next().unwrap();\n\n rest = split.next();\n\n\n\n if param.is_empty() {\n\n return Err(IRCParseError::TooManySpacesInMiddleParams);\n\n }\n\n params.push(param.to_owned());\n\n }\n", "file_path": "src/message/mod.rs", "rank": 64, "score": 31745.171865879758 }, { "content": "#[macro_export]\n\nmacro_rules! irc {\n\n (@replace_expr $_t:tt $sub:expr) => {\n\n $sub\n\n };\n\n (@count_exprs $($expression:expr),*) => {\n\n 0usize $(+ irc!(@replace_expr $expression 1usize))*\n\n };\n\n ($command:expr $(, $argument:expr )* ) => {\n\n {\n\n let capacity = irc!(@count_exprs $($argument),*);\n\n #[allow(unused_mut)]\n\n let mut temp_vec: ::std::vec::Vec<String> = ::std::vec::Vec::with_capacity(capacity);\n\n $(\n\n temp_vec.push(::std::string::String::from($argument));\n\n )*\n\n $crate::message::IRCMessage::new_simple(::std::string::String::from($command), temp_vec)\n\n }\n\n };\n\n}\n", "file_path": "src/message/mod.rs", "rank": 65, "score": 31744.817988401162 }, { "content": " /// the channel did not exist/was suspended or similar conditions.**\n\n ///\n\n /// `(false, true)` might on the other hand (similarly) that a `PART` message is sent but not\n\n /// answered yet by the server.\n\n ///\n\n /// `(true, true)` confirms that the channel is currently successfully joined in a normal fashion.\n\n ///\n\n /// `(false, false)` is returned for a channel that has not been joined previously at all\n\n /// or where a previous `PART` command has completed.\n\n pub async fn get_channel_status(&self, channel_login: String) -> (bool, bool) {\n\n let (return_tx, return_rx) = oneshot::channel();\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::GetChannelStatus {\n\n channel_login,\n\n return_sender: return_tx,\n\n })\n\n .unwrap();\n\n // unwrap: ClientLoopWorker should not die before all sender handles have been dropped\n\n return_rx.await.unwrap()\n\n }\n", "file_path": "src/client/mod.rs", "rank": 66, "score": 31744.706328480697 }, { "content": " EmptyTagsDeclaration,\n\n /// No space found after prefix (no command)\n\n #[error(\"No space found after prefix (no command)\")]\n\n NoSpaceAfterPrefix,\n\n /// No tags after : sign\n\n #[error(\"No tags after : sign\")]\n\n EmptyPrefixDeclaration,\n\n /// Expected command to only consist of alphabetic or numeric characters\n\n #[error(\"Expected command to only consist of alphabetic or numeric characters\")]\n\n MalformedCommand,\n\n /// Expected only single spaces between middle parameters\n\n #[error(\"Expected only single spaces between middle parameters\")]\n\n TooManySpacesInMiddleParams,\n\n /// Newlines are not permitted in raw IRC messages\n\n #[error(\"Newlines are not permitted in raw IRC messages\")]\n\n NewlinesInMessage,\n\n}\n\n\n", "file_path": "src/message/mod.rs", "rank": 67, "score": 31744.661488499034 }, { "content": "\n\nimpl<T: Transport, L: LoginCredentials> TwitchIRCClient<T, L> {\n\n /// Connect to Twitch IRC without joining any channels.\n\n ///\n\n /// **You typically do not need to call this method.** This is only provided for the rare\n\n /// case that one would only want to receive incoming whispers without joining channels\n\n /// or ever sending messages out. If your application joins channels during startup,\n\n /// calling `.connect()` is superfluous, as the client will automatically open the necessary\n\n /// connections when you join channels or send messages.\n\n pub async fn connect(&self) {\n\n let (return_tx, return_rx) = oneshot::channel();\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::Connect {\n\n return_sender: return_tx,\n\n })\n\n .unwrap();\n\n // unwrap: ClientLoopWorker should not die before all sender handles have been dropped\n\n return_rx.await.unwrap()\n\n }\n\n\n", "file_path": "src/client/mod.rs", "rank": 68, "score": 31744.544115455228 }, { "content": "\n\nimpl<T: Transport, L: LoginCredentials> TwitchIRCClient<T, L> {\n\n /// Create a new client from the given configuration.\n\n ///\n\n /// Note this method is not side-effect-free - a background task will be spawned\n\n /// as a result of calling this function.\n\n pub fn new(\n\n config: ClientConfig<L>,\n\n ) -> (\n\n mpsc::UnboundedReceiver<ServerMessage>,\n\n TwitchIRCClient<T, L>,\n\n ) {\n\n let config = Arc::new(config);\n\n let (client_loop_tx, client_loop_rx) = mpsc::unbounded_channel();\n\n let client_loop_tx = Arc::new(client_loop_tx);\n\n let (client_incoming_messages_tx, client_incoming_messages_rx) = mpsc::unbounded_channel();\n\n\n\n #[cfg(feature = \"metrics-collection\")]\n\n if let Some(ref metrics_identifier) = config.metrics_identifier {\n\n metrics::register_counter!(\n", "file_path": "src/client/mod.rs", "rank": 69, "score": 31744.342799765527 }, { "content": " /// joining to freshly created channels or freshly renamed channels is a concern in your application.\n\n ///\n\n /// Another note on Twitch behaviour: If a channel gets suspended, the `JOIN` membership stays\n\n /// active as long as the connection with that `JOIN` membership stays active. For this reason,\n\n /// there is no special logic or handling required for when a channel gets suspended.\n\n /// (The `JOIN` membership in that channel will continue to count as confirmed for as long\n\n /// as the connection stays alive. If the connection fails, the \"confirmed\" status for that\n\n /// channel is reset, and the client will automatically attempt to re-join that channel on a\n\n /// different or new connection.\n\n /// Unless an answer is again received by the server, the `join()` will then make attempts again\n\n /// to join that channel.\n\n pub fn join(&self, channel_login: String) {\n\n self.client_loop_tx\n\n .send(ClientLoopCommand::Join { channel_login })\n\n .unwrap();\n\n }\n\n\n\n /// Instruct the client to only be connected to these channels. Channels currently joined\n\n /// but not in the given set are parted, and channels in the set that are not currently\n\n /// joined are joined.\n", "file_path": "src/client/mod.rs", "rank": 70, "score": 31744.072407499632 }, { "content": " params: vec![],\n\n }\n\n );\n\n assert_eq!(\n\n irc![\"PRIVMSG\", \"#pajlada\"],\n\n IRCMessage {\n\n tags: IRCTags::new(),\n\n prefix: None,\n\n command: \"PRIVMSG\".to_owned(),\n\n params: vec![\"#pajlada\".to_owned()],\n\n }\n\n );\n\n assert_eq!(\n\n irc![\"PRIVMSG\", \"#pajlada\", \"LUL xD\"],\n\n IRCMessage {\n\n tags: IRCTags::new(),\n\n prefix: None,\n\n command: \"PRIVMSG\".to_owned(),\n\n params: vec![\"#pajlada\".to_owned(), \"LUL xD\".to_owned()],\n\n }\n\n );\n\n }\n\n}\n", "file_path": "src/message/mod.rs", "rank": 71, "score": 31744.036802115632 }, { "content": " \"tmi-sent-ts\" .to_owned() => Some(\"1577040814959\".to_owned()),\n\n \"historical\".to_owned() => Some(\"1\".to_owned()),\n\n \"room-id\".to_owned() => Some(\"11148817\".to_owned()),\n\n \"emotes\".to_owned() => Some(\"\".to_owned()),\n\n \"color\".to_owned() => Some(\"#19E6E6\".to_owned()),\n\n \"id\".to_owned() => Some(\"6e2ccb1f-01ed-44d0-85b6-edf762524475\".to_owned()),\n\n \"turbo\".to_owned() => Some(\"0\".to_owned()),\n\n \"flags\".to_owned() => Some(\"\".to_owned()),\n\n \"user-id\".to_owned() => Some(\"40286300\".to_owned()),\n\n \"rm-received-ts\".to_owned() => Some(\"1577040815136\".to_owned()),\n\n \"user-type\".to_owned() => Some(\"mod\".to_owned()),\n\n \"subscriber\".to_owned() => Some(\"1\".to_owned()),\n\n \"badges\".to_owned() => Some(\"moderator/1,subscriber/12\".to_owned()),\n\n \"badge-info\".to_owned() => Some(\"subscriber/16\".to_owned()),\n\n \"mod\".to_owned() => Some(\"1\".to_owned()),\n\n }),\n\n prefix: Some(IRCPrefix::Full {\n\n nick: \"randers\".to_owned(),\n\n user: Some(\"randers\".to_owned()),\n\n host: Some(\"randers.tmi.twitch.tv\".to_owned()),\n", "file_path": "src/message/mod.rs", "rank": 72, "score": 31743.52068734879 }, { "content": " }\n\n } else {\n\n params = vec![];\n\n };\n\n\n\n Ok(IRCMessage {\n\n tags,\n\n prefix,\n\n command,\n\n params,\n\n })\n\n }\n\n}\n\n\n\nimpl AsRawIRC for IRCMessage {\n\n fn format_as_raw_irc(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n if !self.tags.0.is_empty() {\n\n f.write_char('@')?;\n\n self.tags.format_as_raw_irc(f)?;\n\n f.write_char(' ')?;\n", "file_path": "src/message/mod.rs", "rank": 73, "score": 31743.455663116114 }, { "content": " /// sent to it).\n\n ///\n\n /// The client will internally ensure that there has always been at least _an attempt_ to join\n\n /// this channel. However this does not necessarily mean the join is always successful.\n\n ///\n\n /// If the given `channel_login` does not exist (or is suspended) then the IRC server\n\n /// will ignore the `JOIN` and you will not be joined to the given channel (what channel would\n\n /// you even expect to join if the channel does not exist?).\n\n ///\n\n /// However, the client listens for a server-side confirmation to this `JOIN` command.\n\n /// If the server confirms that the `JOIN` was successful, then the client saves this information.\n\n /// This information can be queried using `get_channel_status()`.\n\n ///\n\n /// If you later issue another `join()` call, and the server previously confirmed the successful\n\n /// joining of `channel_login`, then no message will be sent out.\n\n ///\n\n /// However if the server *did not* confirm the successful `JOIN` command previously, then the\n\n /// `JOIN` is attempted again.\n\n ///\n\n /// You can use this mechanism to e.g. periodically re-try `JOIN`ing a given channel if\n", "file_path": "src/client/mod.rs", "rank": 74, "score": 31743.230634631884 }, { "content": " \"twitch_irc_reconnects\",\n\n \"Counts up every time a connection in the connection pool fails unexpectedly\",\n\n \"client\" => metrics_identifier.clone().into_owned()\n\n );\n\n }\n\n\n\n ClientLoopWorker::spawn(\n\n config,\n\n // the worker gets only a weak reference\n\n Arc::downgrade(&client_loop_tx),\n\n client_loop_rx,\n\n client_incoming_messages_tx,\n\n );\n\n\n\n (\n\n client_incoming_messages_rx,\n\n TwitchIRCClient { client_loop_tx },\n\n )\n\n }\n\n}\n", "file_path": "src/client/mod.rs", "rank": 75, "score": 31743.040033081645 }, { "content": " }\n\n\n\n if let Some(prefix) = &self.prefix {\n\n f.write_char(':')?;\n\n prefix.format_as_raw_irc(f)?;\n\n f.write_char(' ')?;\n\n }\n\n\n\n f.write_str(&self.command)?;\n\n\n\n for param in self.params.iter() {\n\n if !param.contains(' ') && !param.is_empty() && !param.starts_with(':') {\n\n // middle parameter\n\n write!(f, \" {}\", param)?;\n\n } else {\n\n // trailing parameter\n\n write!(f, \" :{}\", param)?;\n\n // TODO should there be a panic if this is not the last parameter?\n\n break;\n\n }\n", "file_path": "src/message/mod.rs", "rank": 76, "score": 31740.353733368498 }, { "content": " config,\n\n connection_incoming_tx,\n\n Arc::downgrade(&connection_loop_tx),\n\n connection_loop_rx,\n\n );\n\n\n\n (connection_incoming_rx, Connection { connection_loop_tx })\n\n }\n\n}\n", "file_path": "src/connection/mod.rs", "rank": 77, "score": 31740.353733368498 }, { "content": "pub mod clearchat;\n\npub mod clearmsg;\n\npub mod globaluserstate;\n\npub mod hosttarget;\n\npub mod join;\n\npub mod notice;\n\npub mod part;\n\npub mod ping;\n\npub mod pong;\n\npub mod privmsg;\n\npub mod reconnect;\n\npub mod roomstate;\n\npub mod usernotice;\n\npub mod userstate;\n\npub mod whisper;\n\n// TODO types: CLEARMSG, ROOMSTATE, USERSTATE, GLOBALUSERSTATE, WHISPER, HOSTTARGET, NOTICE, USERNOTICE\n\n\n\nuse self::ServerMessageParseError::*;\n\nuse crate::message::commands::clearmsg::ClearMsgMessage;\n\nuse crate::message::commands::join::JoinMessage;\n", "file_path": "src/message/commands/mod.rs", "rank": 87, "score": 30148.17064275206 }, { "content": " let is_action =\n\n message_text.starts_with(\"\\u{0001}ACTION \") && message_text.ends_with('\\u{0001}');\n\n if is_action {\n\n // remove the prefix and suffix\n\n message_text = &message_text[8..message_text.len() - 1]\n\n }\n\n\n\n Ok((message_text, is_action))\n\n }\n\n\n\n fn try_get_tag_value(\n\n &self,\n\n key: &'static str,\n\n ) -> Result<Option<&str>, ServerMessageParseError> {\n\n match self.tags.0.get(key) {\n\n Some(Some(value)) => Ok(Some(value)),\n\n Some(None) => Ok(None),\n\n None => Err(MissingTag(self.to_owned(), key)),\n\n }\n\n }\n", "file_path": "src/message/commands/mod.rs", "rank": 88, "score": 30143.728948816373 }, { "content": " type Error = ServerMessageParseError;\n\n\n\n fn try_from(source: IRCMessage) -> Result<ServerMessage, ServerMessageParseError> {\n\n use ServerMessage::*;\n\n\n\n Ok(match source.command.as_str() {\n\n \"CLEARCHAT\" => ClearChat(ClearChatMessage::try_from(source)?),\n\n \"CLEARMSG\" => ClearMsg(ClearMsgMessage::try_from(source)?),\n\n \"GLOBALUSERSTATE\" => GlobalUserState(GlobalUserStateMessage::try_from(source)?),\n\n \"HOSTTARGET\" => HostTarget(HostTargetMessage::try_from(source)?),\n\n \"JOIN\" => Join(JoinMessage::try_from(source)?),\n\n \"NOTICE\" => Notice(NoticeMessage::try_from(source)?),\n\n \"PART\" => Part(PartMessage::try_from(source)?),\n\n \"PING\" => Ping(PingMessage::try_from(source)?),\n\n \"PONG\" => Pong(PongMessage::try_from(source)?),\n\n \"PRIVMSG\" => Privmsg(PrivmsgMessage::try_from(source)?),\n\n \"RECONNECT\" => Reconnect(ReconnectMessage::try_from(source)?),\n\n \"ROOMSTATE\" => RoomState(RoomStateMessage::try_from(source)?),\n\n \"USERNOTICE\" => UserNotice(UserNoticeMessage::try_from(source)?),\n\n \"USERSTATE\" => UserState(UserStateMessage::try_from(source)?),\n", "file_path": "src/message/commands/mod.rs", "rank": 89, "score": 30143.446858829422 }, { "content": "\n\n fn try_get_nonempty_tag_value(\n\n &self,\n\n key: &'static str,\n\n ) -> Result<&str, ServerMessageParseError> {\n\n match self.tags.0.get(key) {\n\n Some(Some(value)) => Ok(value),\n\n Some(None) => Err(MissingTagValue(self.to_owned(), key)),\n\n None => Err(MissingTag(self.to_owned(), key)),\n\n }\n\n }\n\n\n\n fn try_get_optional_nonempty_tag_value(\n\n &self,\n\n key: &'static str,\n\n ) -> Result<Option<&str>, ServerMessageParseError> {\n\n match self.tags.0.get(key) {\n\n Some(Some(value)) => Ok(Some(value)),\n\n Some(None) => Err(MissingTagValue(self.to_owned(), key)),\n\n None => Ok(None),\n", "file_path": "src/message/commands/mod.rs", "rank": 90, "score": 30140.993624510964 }, { "content": "// that way (which would break their implementations if there is an enum variant added and they\n\n// expect certain commands to be emitted under Generic)\n\n// that means the only way to get the IRCMessage is via IRCMessage::from()/.into()\n\n// which combined with #[non_exhaustive] allows us to add enum variants\n\n// without making a major release\n\n#[derive(Debug, PartialEq, Clone)]\n\n#[doc(hidden)]\n\npub struct HiddenIRCMessage(pub(self) IRCMessage);\n\n\n\n/// An IRCMessage that has been parsed into a more concrete type based on its command.\n\n///\n\n/// This type is non-exhausive, because more types of commands exist and can be added.\n\n///\n\n/// If you wish to (manually) parse a type of command that is not already parsed by this library,\n\n/// use `IRCMessage::from` to convert the `ServerMessage` back to an `IRCMessage`, then\n\n/// check the message's `command` and perform your parsing.\n\n///\n\n/// There is intentionally no generic `Unparsed` variant here. If there was, and the library\n\n/// added parsing for the command you were trying to catch by matching against the `Unparsed`\n\n/// variant, your code would be broken without any compiler error.\n", "file_path": "src/message/commands/mod.rs", "rank": 91, "score": 30140.616688686994 }, { "content": " ServerMessage::RoomState(msg) => msg.source,\n\n ServerMessage::UserNotice(msg) => msg.source,\n\n ServerMessage::UserState(msg) => msg.source,\n\n ServerMessage::Whisper(msg) => msg.source,\n\n ServerMessage::Generic(msg) => msg.0,\n\n }\n\n }\n\n}\n\n\n\n// borrowed variant of the above\n\nimpl ServerMessage {\n\n /// Get a reference to the `IRCMessage` this `ServerMessage` was parsed from.\n\n pub fn source(&self) -> &IRCMessage {\n\n match self {\n\n ServerMessage::ClearChat(msg) => &msg.source,\n\n ServerMessage::ClearMsg(msg) => &msg.source,\n\n ServerMessage::GlobalUserState(msg) => &msg.source,\n\n ServerMessage::HostTarget(msg) => &msg.source,\n\n ServerMessage::Join(msg) => &msg.source,\n\n ServerMessage::Notice(msg) => &msg.source,\n", "file_path": "src/message/commands/mod.rs", "rank": 92, "score": 30140.612709438174 }, { "content": "use crate::message::commands::part::PartMessage;\n\nuse crate::message::commands::ping::PingMessage;\n\nuse crate::message::commands::pong::PongMessage;\n\nuse crate::message::commands::reconnect::ReconnectMessage;\n\nuse crate::message::commands::userstate::UserStateMessage;\n\nuse crate::message::prefix::IRCPrefix;\n\nuse crate::message::twitch::{Badge, Emote, RGBColor};\n\nuse crate::message::{\n\n AsRawIRC, ClearChatMessage, GlobalUserStateMessage, HostTargetMessage, IRCMessage,\n\n NoticeMessage, PrivmsgMessage, RoomStateMessage, UserNoticeMessage, WhisperMessage,\n\n};\n\nuse chrono::{DateTime, TimeZone, Utc};\n\nuse itertools::Itertools;\n\nuse std::collections::HashSet;\n\nuse std::convert::TryFrom;\n\nuse std::ops::Range;\n\nuse std::str::FromStr;\n\nuse thiserror::Error;\n\n\n\n/// Errors encountered while trying to parse an IRC message as a more specialized \"server message\",\n", "file_path": "src/message/commands/mod.rs", "rank": 93, "score": 30139.613473274203 }, { "content": "/// based on its IRC command.\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum ServerMessageParseError {\n\n /// That command's data is not parsed by this implementation\n\n ///\n\n /// This type of error is only returned if you use `try_from` directly on a special\n\n /// server message implementation, instead of the general `ServerMessage::try_from`\n\n /// which covers all implementations and does not emit this type of error.\n\n #[error(\"Could not parse IRC message {} as ServerMessage: That command's data is not parsed by this implementation\", .0.as_raw_irc())]\n\n MismatchedCommand(IRCMessage),\n\n /// No tag present under key `key`\n\n #[error(\"Could not parse IRC message {} as ServerMessage: No tag present under key `{1}`\", .0.as_raw_irc())]\n\n MissingTag(IRCMessage, &'static str),\n\n /// No tag value present under key `key`\n\n #[error(\"Could not parse IRC message {} as ServerMessage: No tag value present under key `{1}`\", .0.as_raw_irc())]\n\n MissingTagValue(IRCMessage, &'static str),\n\n /// Malformed tag value for tag `key`, value was `value`\n\n #[error(\"Could not parse IRC message {} as ServerMessage: Malformed tag value for tag `{1}`, value was `{2}`\", .0.as_raw_irc())]\n\n MalformedTagValue(IRCMessage, &'static str, String),\n\n /// No parameter found at index `n`\n", "file_path": "src/message/commands/mod.rs", "rank": 94, "score": 30138.99615390306 }, { "content": " \"WHISPER\" => Whisper(WhisperMessage::try_from(source)?),\n\n _ => Generic(HiddenIRCMessage(source)),\n\n })\n\n }\n\n}\n\n\n\nimpl From<ServerMessage> for IRCMessage {\n\n fn from(msg: ServerMessage) -> IRCMessage {\n\n match msg {\n\n ServerMessage::ClearChat(msg) => msg.source,\n\n ServerMessage::ClearMsg(msg) => msg.source,\n\n ServerMessage::GlobalUserState(msg) => msg.source,\n\n ServerMessage::HostTarget(msg) => msg.source,\n\n ServerMessage::Join(msg) => msg.source,\n\n ServerMessage::Notice(msg) => msg.source,\n\n ServerMessage::Part(msg) => msg.source,\n\n ServerMessage::Ping(msg) => msg.source,\n\n ServerMessage::Pong(msg) => msg.source,\n\n ServerMessage::Privmsg(msg) => msg.source,\n\n ServerMessage::Reconnect(msg) => msg.source,\n", "file_path": "src/message/commands/mod.rs", "rank": 95, "score": 30138.691703275887 }, { "content": " &self,\n\n tag_key: &'static str,\n\n ) -> Result<Option<bool>, ServerMessageParseError>;\n\n fn try_get_timestamp(\n\n &self,\n\n tag_key: &'static str,\n\n ) -> Result<DateTime<Utc>, ServerMessageParseError>;\n\n}\n\n\n\nimpl IRCMessageParseExt for IRCMessage {\n\n fn try_get_param(&self, index: usize) -> Result<&str, ServerMessageParseError> {\n\n Ok(self\n\n .params\n\n .get(index)\n\n .ok_or_else(|| MissingParameter(self.to_owned(), index))?)\n\n }\n\n\n\n fn try_get_message_text(&self) -> Result<(&str, bool), ServerMessageParseError> {\n\n let mut message_text = self.try_get_param(1)?;\n\n\n", "file_path": "src/message/commands/mod.rs", "rank": 96, "score": 30138.680946215445 }, { "content": " Ok(self.try_get_number::<u8>(tag_key)? > 0)\n\n }\n\n\n\n fn try_get_optional_number<N: FromStr>(\n\n &self,\n\n tag_key: &'static str,\n\n ) -> Result<Option<N>, ServerMessageParseError> {\n\n let tag_value = match self.tags.0.get(tag_key) {\n\n Some(Some(value)) => value,\n\n Some(None) => return Err(MissingTagValue(self.to_owned(), tag_key)),\n\n None => return Ok(None),\n\n };\n\n\n\n let number = N::from_str(tag_value)\n\n .map_err(|_| MalformedTagValue(self.to_owned(), tag_key, tag_value.to_owned()))?;\n\n Ok(Some(number))\n\n }\n\n\n\n fn try_get_optional_bool(\n\n &self,\n", "file_path": "src/message/commands/mod.rs", "rank": 97, "score": 30138.586861891952 }, { "content": " ServerMessage::Part(msg) => &msg.source,\n\n ServerMessage::Ping(msg) => &msg.source,\n\n ServerMessage::Pong(msg) => &msg.source,\n\n ServerMessage::Privmsg(msg) => &msg.source,\n\n ServerMessage::Reconnect(msg) => &msg.source,\n\n ServerMessage::RoomState(msg) => &msg.source,\n\n ServerMessage::UserNotice(msg) => &msg.source,\n\n ServerMessage::UserState(msg) => &msg.source,\n\n ServerMessage::Whisper(msg) => &msg.source,\n\n ServerMessage::Generic(msg) => &msg.0,\n\n }\n\n }\n\n\n\n pub(crate) fn new_generic(message: IRCMessage) -> ServerMessage {\n\n ServerMessage::Generic(HiddenIRCMessage(message))\n\n }\n\n}\n\n\n\nimpl AsRawIRC for ServerMessage {\n\n fn format_as_raw_irc(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.source().format_as_raw_irc(f)\n\n }\n\n}\n", "file_path": "src/message/commands/mod.rs", "rank": 98, "score": 30138.27592477951 }, { "content": " tag_key: &'static str,\n\n ) -> Result<Option<bool>, ServerMessageParseError> {\n\n Ok(self.try_get_optional_number::<u8>(tag_key)?.map(|n| n > 0))\n\n }\n\n\n\n fn try_get_timestamp(\n\n &self,\n\n tag_key: &'static str,\n\n ) -> Result<DateTime<Utc>, ServerMessageParseError> {\n\n // e.g. tmi-sent-ts.\n\n let tag_value = self.try_get_nonempty_tag_value(tag_key)?;\n\n let milliseconds_since_epoch = i64::from_str(tag_value)\n\n .map_err(|_| MalformedTagValue(self.to_owned(), tag_key, tag_value.to_owned()))?;\n\n Utc.timestamp_millis_opt(milliseconds_since_epoch)\n\n .single()\n\n .ok_or_else(|| MalformedTagValue(self.to_owned(), tag_key, tag_value.to_owned()))\n\n }\n\n}\n\n\n\n// makes it so users cannot match against Generic and get the underlying IRCMessage\n", "file_path": "src/message/commands/mod.rs", "rank": 99, "score": 30137.958239359617 } ]
Rust
src/test_framework/incremental_interface.rs
vlmutolo/orion
e2af271cc3b7ce763591e02a5e6808579c1e3504
use crate::errors::UnknownCryptoError; use core::marker::PhantomData; pub trait TestableStreamingContext<T: PartialEq> { fn reset(&mut self) -> Result<(), UnknownCryptoError>; fn update(&mut self, input: &[u8]) -> Result<(), UnknownCryptoError>; fn finalize(&mut self) -> Result<T, UnknownCryptoError>; fn one_shot(input: &[u8]) -> Result<T, UnknownCryptoError>; fn verify_result(expected: &T, input: &[u8]) -> Result<(), UnknownCryptoError>; fn compare_states(state_1: &Self, state_2: &Self); } #[allow(dead_code)] pub struct StreamingContextConsistencyTester<R, T> { _return_type: PhantomData<R>, _initial_context: T, blocksize: usize, } impl<R, T> StreamingContextConsistencyTester<R, T> where R: PartialEq + core::fmt::Debug, T: TestableStreamingContext<R> + Clone, { pub fn new(streaming_context: T, blocksize: usize) -> Self { Self { _return_type: PhantomData, _initial_context: streaming_context, blocksize, } } const DEFAULT_INPUT: [u8; 37] = [255u8; 37]; #[cfg(feature = "safe_api")] pub fn run_all_tests_property(&self, data: &[u8]) { self.consistency(data); self.consistency(&[0u8; 0]); self.produces_same_state(data); self.incremental_and_one_shot(data); self.double_finalize_with_reset_no_update_ok(data); self.double_finalize_with_reset_ok(data); self.double_finalize_err(data); self.update_after_finalize_with_reset_ok(data); self.update_after_finalize_err(data); self.double_reset_ok(data); self.immediate_finalize(); Self::verify_same_input_ok(data); Self::verify_diff_input_err(data); } #[cfg(feature = "safe_api")] pub fn run_all_tests(&self) { self.consistency(&Self::DEFAULT_INPUT); self.consistency(&[0u8; 0]); self.produces_same_state(&Self::DEFAULT_INPUT); self.incremental_processing_with_leftover(); self.incremental_and_one_shot(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_no_update_ok(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.double_finalize_err(&Self::DEFAULT_INPUT); self.update_after_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.update_after_finalize_err(&Self::DEFAULT_INPUT); self.double_reset_ok(&Self::DEFAULT_INPUT); self.immediate_finalize(); Self::verify_same_input_ok(&Self::DEFAULT_INPUT); Self::verify_diff_input_err(&Self::DEFAULT_INPUT); } #[cfg(not(feature = "safe_api"))] pub fn run_all_tests(&self) { self.consistency(&Self::DEFAULT_INPUT); self.consistency(&[0u8; 0]); self.produces_same_state(&Self::DEFAULT_INPUT); self.incremental_and_one_shot(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_no_update_ok(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.double_finalize_err(&Self::DEFAULT_INPUT); self.update_after_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.update_after_finalize_err(&Self::DEFAULT_INPUT); self.double_reset_ok(&Self::DEFAULT_INPUT); self.immediate_finalize(); Self::verify_same_input_ok(&Self::DEFAULT_INPUT); Self::verify_diff_input_err(&Self::DEFAULT_INPUT); } fn consistency(&self, data: &[u8]) { let mut state_1 = self._initial_context.clone(); state_1.update(data).unwrap(); let res_1 = state_1.finalize().unwrap(); let mut state_2 = self._initial_context.clone(); state_2.reset().unwrap(); state_2.update(data).unwrap(); let res_2 = state_2.finalize().unwrap(); let mut state_3 = self._initial_context.clone(); state_3.update(data).unwrap(); state_3.reset().unwrap(); state_3.update(data).unwrap(); let res_3 = state_3.finalize().unwrap(); let mut state_4 = self._initial_context.clone(); state_4.update(data).unwrap(); let _ = state_4.finalize().unwrap(); state_4.reset().unwrap(); state_4.update(data).unwrap(); let res_4 = state_4.finalize().unwrap(); assert_eq!(res_1, res_2); assert_eq!(res_2, res_3); assert_eq!(res_3, res_4); if data.is_empty() { let mut state_5 = self._initial_context.clone(); let res_5 = state_5.finalize().unwrap(); let mut state_6 = self._initial_context.clone(); state_6.reset().unwrap(); let res_6 = state_6.finalize().unwrap(); let mut state_7 = self._initial_context.clone(); state_7.update(b"WRONG DATA").unwrap(); state_7.reset().unwrap(); let res_7 = state_7.finalize().unwrap(); assert_eq!(res_4, res_5); assert_eq!(res_5, res_6); assert_eq!(res_6, res_7); } } fn produces_same_state(&self, data: &[u8]) { let state_1 = self._initial_context.clone(); let mut state_2 = self._initial_context.clone(); state_2.reset().unwrap(); let mut state_3 = self._initial_context.clone(); state_3.update(data).unwrap(); state_3.reset().unwrap(); let mut state_4 = self._initial_context.clone(); state_4.update(data).unwrap(); let _ = state_4.finalize().unwrap(); state_4.reset().unwrap(); T::compare_states(&state_1, &state_2); T::compare_states(&state_2, &state_3); T::compare_states(&state_3, &state_4); } #[cfg(feature = "safe_api")] fn incremental_processing_with_leftover(&self) { for len in 0..self.blocksize * 4 { let data = vec![0u8; len]; let mut state = self._initial_context.clone(); let mut other_data: Vec<u8> = Vec::new(); other_data.extend_from_slice(&data); state.update(&data).unwrap(); if data.len() > self.blocksize { other_data.extend_from_slice(b""); state.update(b"").unwrap(); } if data.len() > self.blocksize * 2 { other_data.extend_from_slice(b"Extra"); state.update(b"Extra").unwrap(); } if data.len() > self.blocksize * 3 { other_data.extend_from_slice(&[0u8; 256]); state.update(&[0u8; 256]).unwrap(); } let streaming_result = state.finalize().unwrap(); let one_shot_result = T::one_shot(&other_data).unwrap(); assert_eq!(streaming_result, one_shot_result); } } fn incremental_and_one_shot(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let streaming_result = state.finalize().unwrap(); let one_shot_result = T::one_shot(data).unwrap(); assert_eq!(streaming_result, one_shot_result); } fn double_finalize_with_reset_no_update_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); assert!(state.finalize().is_ok()); } fn double_finalize_with_reset_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); state.update(data).unwrap(); assert!(state.finalize().is_ok()); } fn double_finalize_err(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); assert!(state.finalize().is_err()); } fn update_after_finalize_with_reset_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); assert!(state.update(data).is_ok()); } fn update_after_finalize_err(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); assert!(state.update(data).is_err()); } fn double_reset_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); assert!(state.reset().is_ok()); } fn immediate_finalize(&self) { let mut state = self._initial_context.clone(); assert!(state.finalize().is_ok()); } pub fn verify_same_input_ok(data: &[u8]) { let expected = T::one_shot(data).unwrap(); assert!(T::verify_result(&expected, data).is_ok()); } pub fn verify_diff_input_err(data: &[u8]) { let expected = T::one_shot(data).unwrap(); assert!(T::verify_result(&expected, b"Bad data").is_err()); } }
use crate::errors::UnknownCryptoError; use core::marker::PhantomData; pub trait TestableStreamingContext<T: PartialEq> { fn reset(&mut self) -> Result<(), UnknownCryptoError>; fn update(&mut self, input: &[u8]) -> Result<(), UnknownCryptoError>; fn finalize(&mut self) -> Result<T, UnknownCryptoError>; fn one_shot(input: &[u8]) -> Result<T, UnknownCryptoError>; fn verify_result(expected: &T, input: &[u8]) -> Result<(), UnknownCryptoError>; fn compare_states(state_1: &Self, state_2: &Self); } #[allow(dead_code)] pub struct StreamingContextConsistencyTester<R, T> { _return_type: PhantomData<R>, _initial_context: T, blocksize: usize, } impl<R, T> StreamingContextConsistencyTester<R, T> where R: PartialEq + core::fmt::Debug, T: TestableStreamingContext<R> + Clone, { pub fn new(streaming_context: T, blocksize: usize) -> Self { Self { _return_type: PhantomData, _initial_context: streaming_context, blocksize, } } const DEFAULT_INPUT: [u8; 37] = [255u8; 37]; #[cfg(feature = "safe_api")] pub fn run_all_tests_property(&self, data: &[u8]) { self.consistency(data); self.consistency(&[0u8; 0]); self.produces_same_state(data); self.incremental_and_one_shot(data); self.double_finalize_with_reset_no_update_ok(data); self.double_finalize_with_reset_ok(data); self.double_finalize_err(data); self.update_after_finalize_with_reset_ok(data); self.update_after_finalize_err(data); self.double_reset_ok(data); self.immediate_finalize(); Self::verify_same_input_ok(data); Self::verify_diff_input_err(data); } #[cfg(feature = "safe_api")] pub fn run_all_tests(&self) { self.consistency(&Self::DEFAULT_INPUT); self.consistency(&[0u8; 0]); self.produces_same_state(&Self::DEFAULT_INPUT); self.incremental_processing_with_leftover(); self.incremental_and_one_shot(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_no_update_ok(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.double_finalize_err(&Self::DEFAULT_INPUT); self.update_after_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.update_after_finalize_err(&Self::DEFAULT_INPUT); self.double_reset_ok(&Self::DEFAULT_INPUT); self.immediate_finalize(); Self::verify_same_input_ok(&Self::DEFAULT_INPUT); Self::verify_diff_input_err(&Self::DEFAULT_INPUT); } #[cfg(not(feature = "safe_api"))] pub fn run_all_tests(&self) { self.consistency(&Self::DEFAULT_INPUT); self.consistency(&[0u8; 0]); self.produces_same_state(&Self::DEFAULT_INPUT); self.incremental_and_one_shot(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_no_update_ok(&Self::DEFAULT_INPUT); self.double_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.double_finalize_err(&Self::DEFAULT_INPUT); self.update_after_finalize_with_reset_ok(&Self::DEFAULT_INPUT); self.update_after_finalize_err(&Self::DEFAULT_INPUT); self.double_reset_ok(&Self::DEFAULT_INPUT); self.immediate_finalize(); Self::verify_same_input_ok(&Self::DEFAULT_INPUT); Self::verify_diff_input_err(&Self::DEFAULT_INPUT); } fn consistency(&self, data: &[u8]) { let mut state_1 = self._initial_context.clone(); state_1.update(data).unwrap(); let res_1 = state_1.finalize().unwrap(); let mut state_2 = self._initial_context.clone(); state_2.reset().unwrap(); state_2.update(data).unwrap(); let res_2 = state_2.finalize().unwrap(); let mut state_3 = self._initial_context.clone(); state_3.update(data).unwrap(); state_3.reset().unwrap(); state_3.update(data).unwrap(); let res_3 = state_3.finalize().unwrap(); let mut state_4 = self._initial_context.clone(); state_4.update(data).unwrap(); let _ = state_4.finalize().unwrap(); state_4.reset().unwrap(); state_4.update(data).unwrap(); let res_4 = state_4.finalize().unwrap(); assert_eq!(res_1, res_2); assert_eq!(res_2, res_3); assert_eq!(res_3, res_4); if data.is_empty() { let mut state_5 = self._initial_context.clone(); let res_5 = state_5.finalize().unwrap(); let mut state_6 = self._initial_context.clone(); state_6.reset().unwrap(); let res_6 = state_6.finalize().unwrap(); let mut state_7 = self._initial_context.clone(); state_7.update(b"WRONG DATA").unwrap(); state_7.reset().unwrap(); let res_7 = state_7.finalize().unwrap(); assert_eq!(res_4, res_5); assert_eq!(res_5, res_6); assert_eq!(res_6, res_7); } } fn produces_same_state(&self, data: &[u8]) { let state_1 = self._initial_context.clone(); let mut state_2 = self._initial_context.clone(); state_2.reset().unwrap(); let mut state_3 = self._initial_context.clone(); state_3.update(data).unwrap(); state_3.reset().unwrap(); let mut state_4 = self._initial_context.clone(); state_4.update(data).unwrap(); let _ = state_4.finalize().unwrap(); state_4.reset().unwrap(); T::compare_states(&state_1, &state_2); T::compare_states(&state_2, &state_3); T::compare_states(&state_3, &state_4); } #[cfg(feature = "safe_api")] fn incremental_processing_with_leftover(&self) { for len in 0..self.blocksize * 4 { let data = vec![0u8; len]; let mut state = self._initial_context.clone(); let mut other_data: Vec<u8> = Vec::new(); other_data.extend_from_slice(&data); state.update(&data).unwrap(); if data.len() > self.blocksize { other_data.extend_from_slice(b""); state.update(b"").unwrap(); } if data.len() > self.blocksize * 2 { other_data.extend_from_slice(b"Extra"); state.update(b"Extra").unwrap(); } if data.len() > self.blocksize * 3 { other_data.extend_from_slice(&[0u8; 256]); state.update(&[0u8; 256]).unwrap(); } let streaming_result = state.finalize().unwrap(); let one_shot_result = T::one_shot(&other_data).unwrap(); assert_eq!(streaming_result, one_shot_result); } } fn incremental_and_one_shot(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let streaming_result = state.finalize().unwrap(); let one_shot_result = T::one_shot(data).unwrap(); assert_eq!(streaming_result, one_shot_result); } fn double_finalize_with_reset_no_update_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); assert!(state.finalize().is_ok()); } fn double_finalize_with_reset_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); state.update(data).unwrap(); assert!(state.finalize().is_ok()); } fn double_finalize_err(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); assert!(state.finalize().is_err()); }
fn update_after_finalize_err(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); assert!(state.update(data).is_err()); } fn double_reset_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); assert!(state.reset().is_ok()); } fn immediate_finalize(&self) { let mut state = self._initial_context.clone(); assert!(state.finalize().is_ok()); } pub fn verify_same_input_ok(data: &[u8]) { let expected = T::one_shot(data).unwrap(); assert!(T::verify_result(&expected, data).is_ok()); } pub fn verify_diff_input_err(data: &[u8]) { let expected = T::one_shot(data).unwrap(); assert!(T::verify_result(&expected, b"Bad data").is_err()); } }
fn update_after_finalize_with_reset_ok(&self, data: &[u8]) { let mut state = self._initial_context.clone(); state.update(data).unwrap(); let _ = state.finalize().unwrap(); state.reset().unwrap(); assert!(state.update(data).is_ok()); }
function_block-full_function
[ { "content": "/// H' as defined in the specification.\n\nfn extended_hash(input: &[u8], dst: &mut [u8]) -> Result<(), UnknownCryptoError> {\n\n if dst.is_empty() {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let outlen = dst.len() as u32;\n\n\n\n if dst.len() <= BLAKE2B_OUTSIZE {\n\n let mut ctx = Blake2b::new(dst.len())?;\n\n ctx.update(&outlen.to_le_bytes())?;\n\n ctx.update(input)?;\n\n dst.copy_from_slice(ctx.finalize()?.as_ref());\n\n } else {\n\n let mut ctx = Blake2b::new(BLAKE2B_OUTSIZE)?;\n\n ctx.update(&outlen.to_le_bytes())?;\n\n ctx.update(input)?;\n\n\n\n let mut tmp = ctx.finalize()?;\n\n dst[..BLAKE2B_OUTSIZE].copy_from_slice(tmp.as_ref());\n\n\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 0, "score": 265017.1695460619 }, { "content": "/// Hashing using BLAKE2b-256.\n\npub fn digest(data: &[u8]) -> Result<Digest, UnknownCryptoError> {\n\n blake2b::Hasher::Blake2b256.digest(data)\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(feature = \"safe_api\")]\n\n#[cfg(test)]\n\nmod public {\n\n use super::*;\n\n\n\n #[quickcheck]\n\n /// Hashing twice with same input should always produce same output.\n\n fn prop_digest_same_result(input: Vec<u8>) -> bool {\n\n digest(&input[..]).unwrap() == digest(&input[..]).unwrap()\n\n }\n\n\n\n #[quickcheck]\n\n /// Hashing twice with different input should never produce same output.\n\n fn prop_digest_diff_result(input: Vec<u8>) -> bool {\n\n digest(&input[..]).unwrap() != digest(b\"Completely wrong input\").unwrap()\n\n }\n\n}\n", "file_path": "src/high_level/hash.rs", "rank": 1, "score": 258399.9161343361 }, { "content": "/// Generate random bytes using a CSPRNG. Not available in `no_std` context.\n\n///\n\n/// # About:\n\n/// This function can be used to generate cryptographic keys, salts or other\n\n/// values that rely on strong randomness. Please note that most keys and other\n\n/// types used throughout orion, implement their own `generate()` function and\n\n/// it is strongly preferred to use those, compared to [`secure_rand_bytes()`].\n\n///\n\n/// This uses [`getrandom`].\n\n///\n\n/// # Parameters:\n\n/// - `dst`: Destination buffer for the randomly generated bytes. The amount of\n\n/// bytes to be generated is\n\n/// implied by the length of `dst`.\n\n///\n\n/// # Errors:\n\n/// An error will be returned if:\n\n/// - `dst` is empty.\n\n///\n\n/// # Panics:\n\n/// A panic will occur if:\n\n/// - Failure to generate random bytes securely.\n\n/// - The platform is not supported by [`getrandom`].\n\n///\n\n/// # Example:\n\n/// ```rust\n\n/// use orion::util;\n\n///\n\n/// let mut salt = [0u8; 64];\n\n/// util::secure_rand_bytes(&mut salt)?;\n\n/// # Ok::<(), orion::errors::UnknownCryptoError>(())\n\n/// ```\n\n/// [`getrandom`]: https://github.com/rust-random/getrandom\n\npub fn secure_rand_bytes(dst: &mut [u8]) -> Result<(), errors::UnknownCryptoError> {\n\n if dst.is_empty() {\n\n return Err(errors::UnknownCryptoError);\n\n }\n\n\n\n getrandom::getrandom(dst).unwrap();\n\n\n\n Ok(())\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/util/mod.rs", "rank": 2, "score": 243358.4442091347 }, { "content": "/// Authenticate a message using BLAKE2b-256 in keyed mode.\n\npub fn authenticate(secret_key: &SecretKey, data: &[u8]) -> Result<Tag, UnknownCryptoError> {\n\n if secret_key.len() < BLAKE2B_MIN_KEY_SIZE {\n\n return Err(UnknownCryptoError);\n\n }\n\n let blake2b_secret_key = blake2b::SecretKey::from_slice(secret_key.unprotected_as_bytes())?;\n\n let mut state = Blake2b::new(&blake2b_secret_key, BLAKE2B_TAG_SIZE)?;\n\n state.update(data)?;\n\n state.finalize()\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/high_level/auth.rs", "rank": 3, "score": 230050.3865760789 }, { "content": "/// Compare two equal length slices in constant time.\n\n///\n\n/// # About:\n\n/// Compare two equal length slices, in constant time, using the\n\n/// [subtle](https://github.com/dalek-cryptography/subtle) crate.\n\n///\n\n/// # Parameters:\n\n/// - `a`: The first slice used in the comparison.\n\n/// - `b`: The second slice used in the comparison.\n\n///\n\n/// # Errors:\n\n/// An error will be returned if:\n\n/// - `a` and `b` do not have the same length.\n\n/// - `a` is not equal to `b`.\n\n///\n\n/// # Example:\n\n/// ```rust\n\n/// use orion::util;\n\n///\n\n/// let mut rnd_bytes = [0u8; 64];\n\n/// assert!(util::secure_cmp(&rnd_bytes, &[0u8; 64]).is_ok());\n\n///\n\n/// util::secure_rand_bytes(&mut rnd_bytes)?;\n\n/// assert!(util::secure_cmp(&rnd_bytes, &[0u8; 64]).is_err());\n\n/// # Ok::<(), orion::errors::UnknownCryptoError>(())\n\n/// ```\n\npub fn secure_cmp(a: &[u8], b: &[u8]) -> Result<(), errors::UnknownCryptoError> {\n\n if a.ct_eq(b).into() {\n\n Ok(())\n\n } else {\n\n Err(errors::UnknownCryptoError)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[cfg(feature = \"safe_api\")]\n\n #[test]\n\n fn rand_key_len_ok() {\n\n let mut dst = [0u8; 64];\n\n secure_rand_bytes(&mut dst).unwrap();\n\n }\n\n\n\n #[cfg(feature = \"safe_api\")]\n", "file_path": "src/util/mod.rs", "rank": 4, "score": 219607.12523101608 }, { "content": "/// Authenticated encryption using XChaCha20Poly1305.\n\npub fn seal(secret_key: &SecretKey, plaintext: &[u8]) -> Result<Vec<u8>, UnknownCryptoError> {\n\n if plaintext.is_empty() {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let out_len = match plaintext\n\n .len()\n\n .checked_add(XCHACHA_NONCESIZE + POLY1305_OUTSIZE)\n\n {\n\n Some(min_out_len) => min_out_len,\n\n None => return Err(UnknownCryptoError),\n\n };\n\n\n\n let mut dst_out = vec![0u8; out_len];\n\n let nonce = Nonce::generate();\n\n dst_out[..XCHACHA_NONCESIZE].copy_from_slice(nonce.as_ref());\n\n\n\n aead::xchacha20poly1305::seal(\n\n &chacha20::SecretKey::from_slice(secret_key.unprotected_as_bytes())?,\n\n &nonce,\n\n plaintext,\n\n None,\n\n &mut dst_out[XCHACHA_NONCESIZE..],\n\n )?;\n\n\n\n Ok(dst_out)\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/high_level/aead.rs", "rank": 5, "score": 199487.14795867875 }, { "content": "fn sha512_test_runner(data: &[u8], output: &[u8]) {\n\n let mut state = sha512::Sha512::new();\n\n state.update(data).unwrap();\n\n let digest = state.finalize().unwrap();\n\n\n\n let digest_one_shot = sha512::Sha512::digest(data).unwrap();\n\n\n\n assert_eq!(digest.as_ref(), digest_one_shot.as_ref());\n\n assert_eq!(digest.as_ref(), output);\n\n}\n\n\n", "file_path": "tests/hash/mod.rs", "rank": 6, "score": 179930.87813581905 }, { "content": "fn sha256_test_runner(data: &[u8], output: &[u8]) {\n\n let mut state = sha256::Sha256::new();\n\n state.update(data).unwrap();\n\n let digest = state.finalize().unwrap();\n\n\n\n let digest_one_shot = sha256::Sha256::digest(data).unwrap();\n\n\n\n assert_eq!(digest.as_ref(), digest_one_shot.as_ref());\n\n assert_eq!(digest.as_ref(), output);\n\n}\n\n\n", "file_path": "tests/hash/mod.rs", "rank": 7, "score": 179930.87813581905 }, { "content": "fn sha384_test_runner(data: &[u8], output: &[u8]) {\n\n let mut state = sha384::Sha384::new();\n\n state.update(data).unwrap();\n\n let digest = state.finalize().unwrap();\n\n\n\n let digest_one_shot = sha384::Sha384::digest(data).unwrap();\n\n\n\n assert_eq!(digest.as_ref(), digest_one_shot.as_ref());\n\n assert_eq!(digest.as_ref(), output);\n\n}\n\n\n", "file_path": "tests/hash/mod.rs", "rank": 8, "score": 179930.87813581905 }, { "content": "fn blake2b_test_runner(input: &[u8], key: &[u8], output: &[u8]) {\n\n // Only make SecretKey if test case key value is not empty.\n\n if key.is_empty() {\n\n let mut state = blake2::blake2b::Blake2b::new(output.len()).unwrap();\n\n state.update(input).unwrap();\n\n let digest = state.finalize().unwrap();\n\n assert_eq!(digest.len(), output.len());\n\n assert_eq!(digest.as_ref(), &output[..]);\n\n } else {\n\n let secret_key = mac::blake2b::SecretKey::from_slice(key).unwrap();\n\n let mut state = mac::blake2b::Blake2b::new(&secret_key, output.len()).unwrap();\n\n state.update(input).unwrap();\n\n let tag = state.finalize().unwrap();\n\n assert_eq!(tag.len(), output.len());\n\n assert_eq!(tag.unprotected_as_bytes(), &output[..]);\n\n }\n\n}\n\n\n", "file_path": "tests/hash/mod.rs", "rank": 9, "score": 177548.21599469057 }, { "content": "fn poly1305_test_runner(key: &[u8], input: &[u8], output: &[u8]) {\n\n let sk = OneTimeKey::from_slice(key).unwrap();\n\n\n\n let mut state = poly1305::Poly1305::new(&sk);\n\n state.update(input).unwrap();\n\n let tag_stream = state.finalize().unwrap();\n\n\n\n let tag_one_shot = poly1305::Poly1305::poly1305(&sk, input).unwrap();\n\n\n\n assert_eq!(tag_stream, output);\n\n assert_eq!(tag_one_shot, output);\n\n assert!(poly1305::Poly1305::verify(&Tag::from_slice(&output).unwrap(), &sk, input).is_ok());\n\n}\n", "file_path": "tests/mac/mod.rs", "rank": 10, "score": 177548.21599469057 }, { "content": "#[inline]\n\npub fn fiat_25519_from_bytes(out1: &mut fiat_25519_tight_field_element, arg1: &[u8; 32]) -> () {\n\n let x1: u64 = (((arg1[31]) as u64) << 44);\n\n let x2: u64 = (((arg1[30]) as u64) << 36);\n\n let x3: u64 = (((arg1[29]) as u64) << 28);\n\n let x4: u64 = (((arg1[28]) as u64) << 20);\n\n let x5: u64 = (((arg1[27]) as u64) << 12);\n\n let x6: u64 = (((arg1[26]) as u64) << 4);\n\n let x7: u64 = (((arg1[25]) as u64) << 47);\n\n let x8: u64 = (((arg1[24]) as u64) << 39);\n\n let x9: u64 = (((arg1[23]) as u64) << 31);\n\n let x10: u64 = (((arg1[22]) as u64) << 23);\n\n let x11: u64 = (((arg1[21]) as u64) << 15);\n\n let x12: u64 = (((arg1[20]) as u64) << 7);\n\n let x13: u64 = (((arg1[19]) as u64) << 50);\n\n let x14: u64 = (((arg1[18]) as u64) << 42);\n\n let x15: u64 = (((arg1[17]) as u64) << 34);\n\n let x16: u64 = (((arg1[16]) as u64) << 26);\n\n let x17: u64 = (((arg1[15]) as u64) << 18);\n\n let x18: u64 = (((arg1[14]) as u64) << 10);\n\n let x19: u64 = (((arg1[13]) as u64) << 2);\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 11, "score": 173631.59091546843 }, { "content": "#[inline]\n\npub fn fiat_25519_to_bytes(out1: &mut [u8; 32], arg1: &fiat_25519_tight_field_element) -> () {\n\n let mut x1: u64 = 0;\n\n let mut x2: fiat_25519_u1 = 0;\n\n fiat_25519_subborrowx_u51(&mut x1, &mut x2, 0x0, (arg1[0]), 0x7ffffffffffed);\n\n let mut x3: u64 = 0;\n\n let mut x4: fiat_25519_u1 = 0;\n\n fiat_25519_subborrowx_u51(&mut x3, &mut x4, x2, (arg1[1]), 0x7ffffffffffff);\n\n let mut x5: u64 = 0;\n\n let mut x6: fiat_25519_u1 = 0;\n\n fiat_25519_subborrowx_u51(&mut x5, &mut x6, x4, (arg1[2]), 0x7ffffffffffff);\n\n let mut x7: u64 = 0;\n\n let mut x8: fiat_25519_u1 = 0;\n\n fiat_25519_subborrowx_u51(&mut x7, &mut x8, x6, (arg1[3]), 0x7ffffffffffff);\n\n let mut x9: u64 = 0;\n\n let mut x10: fiat_25519_u1 = 0;\n\n fiat_25519_subborrowx_u51(&mut x9, &mut x10, x8, (arg1[4]), 0x7ffffffffffff);\n\n let mut x11: u64 = 0;\n\n fiat_25519_cmovznz_u64(&mut x11, x10, (0x0 as u64), 0xffffffffffffffff);\n\n let mut x12: u64 = 0;\n\n let mut x13: fiat_25519_u1 = 0;\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 12, "score": 173631.59091546843 }, { "content": "/// Padding size that gives the needed bytes to pad `input` to an integral\n\n/// multiple of 16.\n\nfn padding(input: usize) -> usize {\n\n if input == 0 {\n\n return 0;\n\n }\n\n\n\n let rem = input % 16;\n\n\n\n if rem != 0 {\n\n 16 - rem\n\n } else {\n\n 0\n\n }\n\n}\n\n\n\n/// Streaming XChaCha20Poly1305 state.\n\npub struct StreamXChaCha20Poly1305 {\n\n key: SecretKey,\n\n counter: u32,\n\n inonce: [u8; INONCEBYTES],\n\n}\n", "file_path": "src/hazardous/aead/streaming.rs", "rank": 13, "score": 160290.7989033301 }, { "content": "fn x25519_test_runner(expected_result: &[u8; 32], k: &[u8; 32], u: &[u8; 32], valid_result: bool) {\n\n let private = PrivateKey::from_slice(k).unwrap();\n\n let public = PublicKey::from_slice(u).unwrap();\n\n\n\n if valid_result {\n\n let actual_result = key_agreement(&private, &public).unwrap();\n\n\n\n assert_eq!(&actual_result, &expected_result.as_ref());\n\n } else {\n\n assert!(key_agreement(&private, &public).is_err());\n\n }\n\n}\n", "file_path": "tests/ecc/mod.rs", "rank": 14, "score": 157561.15784989702 }, { "content": "///\n\n///\n\n/// NOTE: Hmac has the output size of the hash function defined,\n\n/// but the array initialization with the size cannot depend on a generic parameter,\n\n/// because we don't have full support for const generics yet.\n\nfn _verify<Hmac, const OUTSIZE: usize>(\n\n expected: &[u8],\n\n padded_password: &[u8],\n\n salt: &[u8],\n\n iterations: usize,\n\n dest: &mut [u8],\n\n) -> Result<(), UnknownCryptoError>\n\nwhere\n\n Hmac: hmac::HmacFunction,\n\n{\n\n debug_assert_eq!(OUTSIZE, Hmac::HASH_FUNC_OUTSIZE);\n\n _derive_key::<Hmac, { OUTSIZE }>(padded_password, salt, iterations, dest)?;\n\n crate::util::secure_cmp(expected, dest)\n\n}\n\n\n\n/// PBKDF2-HMAC-SHA256 (Password-Based Key Derivation Function 2) as specified in the [RFC 8018](https://tools.ietf.org/html/rfc8018).\n\npub mod sha256 {\n\n use super::*;\n\n use crate::hazardous::hash::sha2::sha256::{self, Sha256};\n\n\n", "file_path": "src/hazardous/kdf/pbkdf2.rs", "rank": 15, "score": 141709.63716584974 }, { "content": "/// The HKDF extract step.\n\n///\n\n/// NOTE: Hmac has the output size of the hash function defined,\n\n/// but the array initialization with the size cannot depend on a generic parameter,\n\n/// because we don't have full support for const generics yet.\n\nfn _extract<Hmac, const OUTSIZE: usize>(\n\n salt: &[u8],\n\n ikm: &[u8],\n\n) -> Result<[u8; OUTSIZE], UnknownCryptoError>\n\nwhere\n\n Hmac: hmac::HmacFunction,\n\n{\n\n debug_assert_eq!(OUTSIZE, Hmac::HASH_FUNC_OUTSIZE);\n\n let mut dest = [0u8; OUTSIZE];\n\n\n\n let mut ctx = Hmac::_new(salt)?;\n\n ctx._update(ikm)?;\n\n ctx._finalize(&mut dest)?;\n\n\n\n Ok(dest)\n\n}\n\n\n", "file_path": "src/hazardous/kdf/hkdf.rs", "rank": 16, "score": 141709.51479478192 }, { "content": "/// The HKDF expand step.\n\nfn _expand<Hmac, const OUTSIZE: usize>(\n\n prk: &[u8],\n\n info: Option<&[u8]>,\n\n dest: &mut [u8],\n\n) -> Result<(), UnknownCryptoError>\n\nwhere\n\n Hmac: hmac::HmacFunction,\n\n{\n\n debug_assert_eq!(OUTSIZE, Hmac::HASH_FUNC_OUTSIZE);\n\n debug_assert_eq!(prk.len(), Hmac::HASH_FUNC_OUTSIZE);\n\n if dest.is_empty() || dest.len() > 255 * Hmac::HASH_FUNC_OUTSIZE {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let optional_info = info.unwrap_or(&[0u8; 0]);\n\n let mut ctx = Hmac::_new(prk)?;\n\n\n\n // We require a temporary buffer in case the requested bytes\n\n // to derive are lower than the HMAC functions output size.\n\n let mut tmp = [0u8; OUTSIZE];\n", "file_path": "src/hazardous/kdf/hkdf.rs", "rank": 17, "score": 141705.17062758328 }, { "content": "/// Combine `extract` and `expand` to return a derived key.\n\n///\n\n/// NOTE: See comment about const param at _extract function.\n\nfn _derive_key<Hmac, const OUTSIZE: usize>(\n\n salt: &[u8],\n\n ikm: &[u8],\n\n info: Option<&[u8]>,\n\n dest: &mut [u8],\n\n) -> Result<(), UnknownCryptoError>\n\nwhere\n\n Hmac: hmac::HmacFunction,\n\n{\n\n _expand::<Hmac, { OUTSIZE }>(&_extract::<Hmac, { OUTSIZE }>(salt, ikm)?, info, dest)\n\n}\n\n\n\n/// HKDF-HMAC-SHA256 (HMAC-based Extract-and-Expand Key Derivation Function) as specified in the [RFC 5869](https://tools.ietf.org/html/rfc5869).\n\npub mod sha256 {\n\n use super::*;\n\n use crate::hazardous::hash::sha2::sha256::SHA256_OUTSIZE;\n\n pub use crate::hazardous::mac::hmac::sha256::Tag;\n\n\n\n #[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n\n /// The HKDF extract step.\n", "file_path": "src/hazardous/kdf/hkdf.rs", "rank": 18, "score": 138354.03437804588 }, { "content": "///\n\n///\n\n/// NOTE: Hmac has the output size of the hash function defined,\n\n/// but the array initialization with the size cannot depend on a generic parameter,\n\n/// because we don't have full support for const generics yet.\n\nfn _derive_key<Hmac, const OUTSIZE: usize>(\n\n padded_password: &[u8],\n\n salt: &[u8],\n\n iterations: usize,\n\n dest: &mut [u8],\n\n) -> Result<(), UnknownCryptoError>\n\nwhere\n\n Hmac: hmac::HmacFunction,\n\n{\n\n debug_assert_eq!(OUTSIZE, Hmac::HASH_FUNC_OUTSIZE);\n\n if dest.is_empty() || iterations < 1 {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let mut u_step = [0u8; OUTSIZE];\n\n let mut hmac = Hmac::_new(padded_password)?;\n\n for (idx, dk_block) in dest.chunks_mut(Hmac::HASH_FUNC_OUTSIZE).enumerate() {\n\n // If this panics, then the size limit for PBKDF2 is reached.\n\n let block_idx: u32 = 1u32.checked_add(idx as u32).unwrap();\n\n\n", "file_path": "src/hazardous/kdf/pbkdf2.rs", "rank": 19, "score": 138353.76771867363 }, { "content": "#[cfg(test)]\n\n#[cfg(feature = \"safe_api\")]\n\npub trait TestingRandom {\n\n /// Randomly generate self.\n\n fn gen() -> Self;\n\n}\n\n\n\n#[cfg(feature = \"safe_api\")]\n", "file_path": "src/test_framework/streamcipher_interface.rs", "rank": 21, "score": 125573.03347906377 }, { "content": "/// BLAKE2 G with 64-bit multiplications.\n\nfn g(a: &mut u64, b: &mut u64, c: &mut u64, d: &mut u64) {\n\n *a = lower_mult_add(*a, *b);\n\n *d = (*d ^ *a).rotate_right(32);\n\n *c = lower_mult_add(*c, *d);\n\n *b = (*b ^ *c).rotate_right(24);\n\n *a = lower_mult_add(*a, *b);\n\n *d = (*d ^ *a).rotate_right(16);\n\n *c = lower_mult_add(*c, *d);\n\n *b = (*b ^ *c).rotate_right(63);\n\n}\n\n\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 22, "score": 125464.75043890336 }, { "content": "/// Authenticated decryption using XChaCha20Poly1305.\n\npub fn open(\n\n secret_key: &SecretKey,\n\n ciphertext_with_tag_and_nonce: &[u8],\n\n) -> Result<Vec<u8>, UnknownCryptoError> {\n\n // Avoid empty ciphertexts\n\n if ciphertext_with_tag_and_nonce.len() <= (XCHACHA_NONCESIZE + POLY1305_OUTSIZE) {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let mut dst_out =\n\n vec![0u8; ciphertext_with_tag_and_nonce.len() - (XCHACHA_NONCESIZE + POLY1305_OUTSIZE)];\n\n\n\n aead::xchacha20poly1305::open(\n\n &chacha20::SecretKey::from_slice(secret_key.unprotected_as_bytes())?,\n\n &Nonce::from_slice(&ciphertext_with_tag_and_nonce[..XCHACHA_NONCESIZE])?,\n\n &ciphertext_with_tag_and_nonce[XCHACHA_NONCESIZE..],\n\n None,\n\n &mut dst_out,\n\n )?;\n\n\n", "file_path": "src/high_level/aead.rs", "rank": 23, "score": 124048.6267480017 }, { "content": "/// AEAD XChaCha20Poly1305 decryption as specified in the [draft RFC](https://github.com/bikeshedders/xchacha-rfc).\n\npub fn open(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n ciphertext_with_tag: &[u8],\n\n ad: Option<&[u8]>,\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n let (subkey, ietf_nonce) = subkey_and_nonce(secret_key, nonce);\n\n chacha20poly1305::open(&subkey, &ietf_nonce, ciphertext_with_tag, ad, dst_out)\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(test)]\n\n#[cfg(feature = \"safe_api\")]\n\nmod public {\n\n use super::*;\n\n use crate::hazardous::mac::poly1305::POLY1305_OUTSIZE;\n\n use crate::test_framework::aead_interface::{test_diff_params_err, AeadTestRunner};\n\n\n\n #[quickcheck]\n", "file_path": "src/hazardous/aead/xchacha20poly1305.rs", "rank": 24, "score": 124043.90969882166 }, { "content": "/// IETF ChaCha20 encryption as specified in the [RFC 8439](https://tools.ietf.org/html/rfc8439).\n\npub fn encrypt(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n initial_counter: u32,\n\n plaintext: &[u8],\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n if dst_out.len() < plaintext.len() {\n\n return Err(UnknownCryptoError);\n\n }\n\n if plaintext.is_empty() {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n dst_out[..plaintext.len()].copy_from_slice(plaintext);\n\n encrypt_in_place(\n\n secret_key,\n\n nonce,\n\n initial_counter,\n\n &mut dst_out[..plaintext.len()],\n\n )\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/hazardous/stream/chacha20.rs", "rank": 25, "score": 124043.90969882166 }, { "content": "/// IETF ChaCha20 decryption as specified in the [RFC 8439](https://tools.ietf.org/html/rfc8439).\n\npub fn decrypt(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n initial_counter: u32,\n\n ciphertext: &[u8],\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n encrypt(secret_key, nonce, initial_counter, ciphertext, dst_out)\n\n}\n\n\n\n/// HChaCha20 as specified in the [draft-RFC](https://github.com/bikeshedders/xchacha-rfc/blob/master).\n\npub(super) fn hchacha20(\n\n secret_key: &SecretKey,\n\n nonce: &[u8],\n\n) -> Result<[u8; HCHACHA_OUTSIZE], UnknownCryptoError> {\n\n let mut chacha_state = ChaCha20::new(secret_key.unprotected_as_bytes(), nonce, false)?;\n\n let mut keystream_block = [0u8; HCHACHA_OUTSIZE];\n\n chacha_state.keystream_block(0, &mut keystream_block);\n\n\n\n Ok(keystream_block)\n", "file_path": "src/hazardous/stream/chacha20.rs", "rank": 26, "score": 124043.90969882166 }, { "content": "/// XChaCha20 decryption as specified in the [draft RFC](https://tools.ietf.org/html/draft-irtf-cfrg-xchacha-03).\n\npub fn decrypt(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n initial_counter: u32,\n\n ciphertext: &[u8],\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n encrypt(secret_key, nonce, initial_counter, ciphertext, dst_out)\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(test)]\n\n#[cfg(feature = \"safe_api\")]\n\nmod public {\n\n use super::*;\n\n\n\n mod test_encrypt_decrypt {\n\n use super::*;\n\n use crate::test_framework::streamcipher_interface::*;\n\n\n", "file_path": "src/hazardous/stream/xchacha20.rs", "rank": 27, "score": 124043.90969882166 }, { "content": "/// XChaCha20 encryption as specified in the [draft RFC](https://tools.ietf.org/html/draft-irtf-cfrg-xchacha-03).\n\npub fn encrypt(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n initial_counter: u32,\n\n plaintext: &[u8],\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n let (subkey, ietf_nonce) = subkey_and_nonce(secret_key, nonce);\n\n\n\n chacha20::encrypt(&subkey, &ietf_nonce, initial_counter, plaintext, dst_out)\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/hazardous/stream/xchacha20.rs", "rank": 28, "score": 124043.90969882166 }, { "content": "/// AEAD ChaCha20Poly1305 decryption and authentication as specified in the [RFC 8439](https://tools.ietf.org/html/rfc8439).\n\npub fn open(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n ciphertext_with_tag: &[u8],\n\n ad: Option<&[u8]>,\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n if ciphertext_with_tag.len() < POLY1305_OUTSIZE {\n\n return Err(UnknownCryptoError);\n\n }\n\n if dst_out.len() < ciphertext_with_tag.len() - POLY1305_OUTSIZE {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let mut dec_ctx =\n\n ChaCha20::new(secret_key.unprotected_as_bytes(), nonce.as_ref(), true).unwrap();\n\n let mut tmp = Zeroizing::new([0u8; CHACHA_BLOCKSIZE]);\n\n let mut auth_ctx = Poly1305::new(&poly1305_key_gen(&mut dec_ctx, &mut tmp));\n\n\n\n let ciphertext_len = ciphertext_with_tag.len() - POLY1305_OUTSIZE;\n", "file_path": "src/hazardous/aead/chacha20poly1305.rs", "rank": 29, "score": 124043.90969882166 }, { "content": "/// Verify Argon2i derived key in constant time.\n\npub fn verify(\n\n expected: &[u8],\n\n password: &[u8],\n\n salt: &[u8],\n\n iterations: u32,\n\n memory: u32,\n\n secret: Option<&[u8]>,\n\n ad: Option<&[u8]>,\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n derive_key(password, salt, iterations, memory, secret, ad, dst_out)?;\n\n util::secure_cmp(dst_out, expected)\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(test)]\n\nmod public {\n\n use super::*;\n\n\n\n #[cfg(feature = \"safe_api\")]\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 30, "score": 124043.90969882166 }, { "content": "/// AEAD XChaCha20Poly1305 encryption as specified in the [draft RFC](https://github.com/bikeshedders/xchacha-rfc).\n\npub fn seal(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n plaintext: &[u8],\n\n ad: Option<&[u8]>,\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n let (subkey, ietf_nonce) = subkey_and_nonce(secret_key, nonce);\n\n chacha20poly1305::seal(&subkey, &ietf_nonce, plaintext, ad, dst_out)\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/hazardous/aead/xchacha20poly1305.rs", "rank": 31, "score": 124043.90969882166 }, { "content": "/// AEAD ChaCha20Poly1305 encryption and authentication as specified in the [RFC 8439](https://tools.ietf.org/html/rfc8439).\n\npub fn seal(\n\n secret_key: &SecretKey,\n\n nonce: &Nonce,\n\n plaintext: &[u8],\n\n ad: Option<&[u8]>,\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n match plaintext.len().checked_add(POLY1305_OUTSIZE) {\n\n Some(out_min_len) => {\n\n if dst_out.len() < out_min_len {\n\n return Err(UnknownCryptoError);\n\n }\n\n }\n\n None => return Err(UnknownCryptoError),\n\n };\n\n\n\n let mut enc_ctx =\n\n ChaCha20::new(secret_key.unprotected_as_bytes(), nonce.as_ref(), true).unwrap();\n\n let mut tmp = Zeroizing::new([0u8; CHACHA_BLOCKSIZE]);\n\n\n", "file_path": "src/hazardous/aead/chacha20poly1305.rs", "rank": 32, "score": 124043.90969882166 }, { "content": "/// Derive a key using Argon2i.\n\npub fn derive_key(\n\n password: &Password,\n\n salt: &Salt,\n\n iterations: u32,\n\n memory: u32,\n\n length: u32,\n\n) -> Result<SecretKey, UnknownCryptoError> {\n\n if iterations < MIN_ITERATIONS {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n let mut dk = SecretKey::from_slice(&vec![0u8; length as usize])?;\n\n\n\n argon2i::derive_key(\n\n password.unprotected_as_bytes(),\n\n salt.as_ref(),\n\n iterations,\n\n memory,\n\n None,\n\n None,\n", "file_path": "src/high_level/kdf.rs", "rank": 33, "score": 121307.7490373952 }, { "content": "/// Hash a password using Argon2i.\n\npub fn hash_password(\n\n password: &Password,\n\n iterations: u32,\n\n memory: u32,\n\n) -> Result<PasswordHash, UnknownCryptoError> {\n\n if iterations < MIN_ITERATIONS {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n // Cannot panic as this is a valid size.\n\n let salt = Salt::generate(SALT_LENGTH).unwrap();\n\n let mut buffer = Zeroizing::new([0u8; PWHASH_LENGTH]);\n\n\n\n argon2i::derive_key(\n\n password.unprotected_as_bytes(),\n\n salt.as_ref(),\n\n iterations,\n\n memory,\n\n None,\n\n None,\n", "file_path": "src/high_level/pwhash.rs", "rank": 34, "score": 121307.7490373952 }, { "content": "/// Authenticate and verify a message using BLAKE2b-256 in keyed mode.\n\npub fn authenticate_verify(\n\n expected: &Tag,\n\n secret_key: &SecretKey,\n\n data: &[u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n if secret_key.len() < BLAKE2B_MIN_KEY_SIZE || expected.len() != BLAKE2B_TAG_SIZE {\n\n return Err(UnknownCryptoError);\n\n }\n\n let key = blake2b::SecretKey::from_slice(secret_key.unprotected_as_bytes())?;\n\n Blake2b::verify(expected, &key, BLAKE2B_TAG_SIZE, data)\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(test)]\n\nmod public {\n\n use super::*;\n\n\n\n mod test_auth_and_verify {\n\n use super::*;\n\n #[test]\n", "file_path": "src/high_level/auth.rs", "rank": 35, "score": 121307.59437980852 }, { "content": "/// X25519 (Diffie-Hellman with Montgomery form of Curve25519).\n\npub fn key_agreement(\n\n private_key: &PrivateKey,\n\n public_key: &PublicKey,\n\n) -> Result<SharedKey, UnknownCryptoError> {\n\n let u_coord = public_key.fe;\n\n let field_element = mont_ladder(&private_key.scalar, u_coord).as_bytes();\n\n // High bit should be zero.\n\n debug_assert_eq!(field_element[31] & 0b1000_0000u8, 0u8);\n\n if secure_cmp(&field_element, &LOW_ORDER_POINT_RESULT).is_ok() {\n\n return Err(UnknownCryptoError);\n\n }\n\n\n\n Ok(SharedKey::from(field_element))\n\n}\n\n\n\n#[cfg(test)]\n\nmod public {\n\n use crate::hazardous::ecc::x25519::{\n\n key_agreement, PrivateKey, PublicKey, SharedKey, BASEPOINT,\n\n };\n", "file_path": "src/hazardous/ecc/x25519.rs", "rank": 36, "score": 121302.87733062846 }, { "content": "pub fn chacha_test_runner(\n\n key: &[u8],\n\n nonce: &[u8],\n\n init_block_count: u32,\n\n input: &[u8],\n\n output: &[u8],\n\n) {\n\n if key.len() != CHACHA_KEYSIZE {\n\n assert!(SecretKey::from_slice(&key).is_err());\n\n return;\n\n }\n\n if input.is_empty() || output.is_empty() {\n\n return;\n\n }\n\n\n\n let sk = SecretKey::from_slice(&key).unwrap();\n\n\n\n // Selecting variant based on nonce size\n\n if nonce.len() == IETF_CHACHA_NONCESIZE {\n\n let n = chacha20::Nonce::from_slice(&nonce).unwrap();\n", "file_path": "tests/stream/mod.rs", "rank": 37, "score": 121302.87733062846 }, { "content": "/// Argon2i password hashing function as described in the [P-H-C specification](https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf).\n\npub fn derive_key(\n\n password: &[u8],\n\n salt: &[u8],\n\n iterations: u32,\n\n memory: u32,\n\n secret: Option<&[u8]>,\n\n ad: Option<&[u8]>,\n\n dst_out: &mut [u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n if password.len() > 0xFFFF_FFFF {\n\n return Err(UnknownCryptoError);\n\n }\n\n if salt.len() > 0xFFFF_FFFF || salt.len() < 8 {\n\n return Err(UnknownCryptoError);\n\n }\n\n if iterations < MIN_ITERATIONS {\n\n return Err(UnknownCryptoError);\n\n }\n\n if memory < MIN_MEMORY {\n\n return Err(UnknownCryptoError);\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 38, "score": 121302.87733062846 }, { "content": "/// Represents a Scalar decoded from a byte array.\n\nstruct Scalar([u8; PRIVATE_KEY_SIZE]);\n\n\n\nimpl Drop for Scalar {\n\n fn drop(&mut self) {\n\n use zeroize::Zeroize;\n\n self.0.iter_mut().zeroize();\n\n }\n\n}\n\n\n\nimpl PartialEq for Scalar {\n\n fn eq(&self, other: &Self) -> bool {\n\n use subtle::ConstantTimeEq;\n\n self.0.ct_eq(&other.0).into()\n\n }\n\n}\n\n\n\nimpl Eq for Scalar {}\n\n\n\nimpl Scalar {\n\n /// Create a scalar from some byte-array.\n", "file_path": "src/hazardous/ecc/x25519.rs", "rank": 39, "score": 119421.10473416222 }, { "content": "#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n\npub fn hash_password_verify(\n\n expected: &PasswordHash,\n\n password: &Password,\n\n) -> Result<(), UnknownCryptoError> {\n\n let mut buffer = Zeroizing::new([0u8; PWHASH_LENGTH]);\n\n\n\n argon2i::verify(\n\n expected.unprotected_as_bytes(),\n\n password.unprotected_as_bytes(),\n\n expected.salt.as_ref(),\n\n expected.iterations,\n\n expected.memory,\n\n None,\n\n None,\n\n buffer.as_mut(),\n\n )\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(test)]\n", "file_path": "src/high_level/pwhash.rs", "rank": 40, "score": 118760.29549904063 }, { "content": "#[inline]\n\npub fn fiat_25519_selectznz(\n\n out1: &mut [u64; 5],\n\n arg1: fiat_25519_u1,\n\n arg2: &[u64; 5],\n\n arg3: &[u64; 5],\n\n) -> () {\n\n let mut x1: u64 = 0;\n\n fiat_25519_cmovznz_u64(&mut x1, arg1, (arg2[0]), (arg3[0]));\n\n let mut x2: u64 = 0;\n\n fiat_25519_cmovznz_u64(&mut x2, arg1, (arg2[1]), (arg3[1]));\n\n let mut x3: u64 = 0;\n\n fiat_25519_cmovznz_u64(&mut x3, arg1, (arg2[2]), (arg3[2]));\n\n let mut x4: u64 = 0;\n\n fiat_25519_cmovznz_u64(&mut x4, arg1, (arg2[3]), (arg3[3]));\n\n let mut x5: u64 = 0;\n\n fiat_25519_cmovznz_u64(&mut x5, arg1, (arg2[4]), (arg3[4]));\n\n out1[0] = x1;\n\n out1[1] = x2;\n\n out1[2] = x3;\n\n out1[3] = x4;\n\n out1[4] = x5;\n\n}\n\n\n\n/// The function fiat_25519_to_bytes serializes a field element to bytes in little-endian order.\n\n///\n\n/// Postconditions:\n\n/// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31]\n\n///\n\n/// Output Bounds:\n\n/// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0x7f]]\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 41, "score": 116369.22632930745 }, { "content": "#[inline]\n\npub fn fiat_25519_add(\n\n out1: &mut fiat_25519_loose_field_element,\n\n arg1: &fiat_25519_tight_field_element,\n\n arg2: &fiat_25519_tight_field_element,\n\n) -> () {\n\n let x1: u64 = ((arg1[0]) + (arg2[0]));\n\n let x2: u64 = ((arg1[1]) + (arg2[1]));\n\n let x3: u64 = ((arg1[2]) + (arg2[2]));\n\n let x4: u64 = ((arg1[3]) + (arg2[3]));\n\n let x5: u64 = ((arg1[4]) + (arg2[4]));\n\n out1[0] = x1;\n\n out1[1] = x2;\n\n out1[2] = x3;\n\n out1[3] = x4;\n\n out1[4] = x5;\n\n}\n\n\n\n/// The function fiat_25519_sub subtracts two field elements.\n\n///\n\n/// Postconditions:\n\n/// eval out1 mod m = (eval arg1 - eval arg2) mod m\n\n///\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 42, "score": 116369.22632930745 }, { "content": "#[inline]\n\npub fn fiat_25519_opp(\n\n out1: &mut fiat_25519_loose_field_element,\n\n arg1: &fiat_25519_tight_field_element,\n\n) -> () {\n\n let x1: u64 = (0xfffffffffffda - (arg1[0]));\n\n let x2: u64 = (0xffffffffffffe - (arg1[1]));\n\n let x3: u64 = (0xffffffffffffe - (arg1[2]));\n\n let x4: u64 = (0xffffffffffffe - (arg1[3]));\n\n let x5: u64 = (0xffffffffffffe - (arg1[4]));\n\n out1[0] = x1;\n\n out1[1] = x2;\n\n out1[2] = x3;\n\n out1[3] = x4;\n\n out1[4] = x5;\n\n}\n\n\n\n/// The function fiat_25519_selectznz is a multi-limb conditional select.\n\n///\n\n/// Postconditions:\n\n/// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3)\n\n///\n\n/// Input Bounds:\n\n/// arg1: [0x0 ~> 0x1]\n\n/// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]]\n\n/// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]]\n\n/// Output Bounds:\n\n/// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]]\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 43, "score": 116369.22632930745 }, { "content": "#[inline]\n\npub fn fiat_25519_carry(\n\n out1: &mut fiat_25519_tight_field_element,\n\n arg1: &fiat_25519_loose_field_element,\n\n) -> () {\n\n let x1: u64 = (arg1[0]);\n\n let x2: u64 = ((x1 >> 51) + (arg1[1]));\n\n let x3: u64 = ((x2 >> 51) + (arg1[2]));\n\n let x4: u64 = ((x3 >> 51) + (arg1[3]));\n\n let x5: u64 = ((x4 >> 51) + (arg1[4]));\n\n let x6: u64 = ((x1 & 0x7ffffffffffff) + ((x5 >> 51) * 0x13));\n\n let x7: u64 = ((((x6 >> 51) as fiat_25519_u1) as u64) + (x2 & 0x7ffffffffffff));\n\n let x8: u64 = (x6 & 0x7ffffffffffff);\n\n let x9: u64 = (x7 & 0x7ffffffffffff);\n\n let x10: u64 = ((((x7 >> 51) as fiat_25519_u1) as u64) + (x3 & 0x7ffffffffffff));\n\n let x11: u64 = (x4 & 0x7ffffffffffff);\n\n let x12: u64 = (x5 & 0x7ffffffffffff);\n\n out1[0] = x8;\n\n out1[1] = x9;\n\n out1[2] = x10;\n\n out1[3] = x11;\n\n out1[4] = x12;\n\n}\n\n\n\n/// The function fiat_25519_add adds two field elements.\n\n///\n\n/// Postconditions:\n\n/// eval out1 mod m = (eval arg1 + eval arg2) mod m\n\n///\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 44, "score": 116369.22632930745 }, { "content": "#[inline]\n\npub fn fiat_25519_sub(\n\n out1: &mut fiat_25519_loose_field_element,\n\n arg1: &fiat_25519_tight_field_element,\n\n arg2: &fiat_25519_tight_field_element,\n\n) -> () {\n\n let x1: u64 = ((0xfffffffffffda + (arg1[0])) - (arg2[0]));\n\n let x2: u64 = ((0xffffffffffffe + (arg1[1])) - (arg2[1]));\n\n let x3: u64 = ((0xffffffffffffe + (arg1[2])) - (arg2[2]));\n\n let x4: u64 = ((0xffffffffffffe + (arg1[3])) - (arg2[3]));\n\n let x5: u64 = ((0xffffffffffffe + (arg1[4])) - (arg2[4]));\n\n out1[0] = x1;\n\n out1[1] = x2;\n\n out1[2] = x3;\n\n out1[3] = x4;\n\n out1[4] = x5;\n\n}\n\n\n\n/// The function fiat_25519_opp negates a field element.\n\n///\n\n/// Postconditions:\n\n/// eval out1 mod m = -eval arg1 mod m\n\n///\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 45, "score": 116369.22632930745 }, { "content": "#[inline]\n\npub fn fiat_25519_relax(\n\n out1: &mut fiat_25519_loose_field_element,\n\n arg1: &fiat_25519_tight_field_element,\n\n) -> () {\n\n let x1: u64 = (arg1[0]);\n\n let x2: u64 = (arg1[1]);\n\n let x3: u64 = (arg1[2]);\n\n let x4: u64 = (arg1[3]);\n\n let x5: u64 = (arg1[4]);\n\n out1[0] = x1;\n\n out1[1] = x2;\n\n out1[2] = x3;\n\n out1[3] = x4;\n\n out1[4] = x5;\n\n}\n\n\n\n/// The function fiat_25519_carry_scmul_121666 multiplies a field element by 121666 and reduces the result.\n\n///\n\n/// Postconditions:\n\n/// eval out1 mod m = (121666 * eval arg1) mod m\n\n///\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 46, "score": 116369.22632930745 }, { "content": "#[inline]\n\npub fn fiat_25519_cmovznz_u64(out1: &mut u64, arg1: fiat_25519_u1, arg2: u64, arg3: u64) -> () {\n\n let x1: fiat_25519_u1 = (!(!arg1));\n\n let x2: u64 = ((((((0x0 as fiat_25519_i2) - (x1 as fiat_25519_i2)) as fiat_25519_i1) as i128)\n\n & (0xffffffffffffffff as i128)) as u64);\n\n let x3: u64 = ((x2 & arg3) | ((!x2) & arg2));\n\n *out1 = x3;\n\n}\n\n\n\n/// The function fiat_25519_carry_mul multiplies two field elements and reduces the result.\n\n///\n\n/// Postconditions:\n\n/// eval out1 mod m = (eval arg1 * eval arg2) mod m\n\n///\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 47, "score": 115740.78198578225 }, { "content": "#[inline]\n\npub fn fiat_25519_carry_mul(\n\n out1: &mut fiat_25519_tight_field_element,\n\n arg1: &fiat_25519_loose_field_element,\n\n arg2: &fiat_25519_loose_field_element,\n\n) -> () {\n\n let x1: u128 = (((arg1[4]) as u128) * (((arg2[4]) * 0x13) as u128));\n\n let x2: u128 = (((arg1[4]) as u128) * (((arg2[3]) * 0x13) as u128));\n\n let x3: u128 = (((arg1[4]) as u128) * (((arg2[2]) * 0x13) as u128));\n\n let x4: u128 = (((arg1[4]) as u128) * (((arg2[1]) * 0x13) as u128));\n\n let x5: u128 = (((arg1[3]) as u128) * (((arg2[4]) * 0x13) as u128));\n\n let x6: u128 = (((arg1[3]) as u128) * (((arg2[3]) * 0x13) as u128));\n\n let x7: u128 = (((arg1[3]) as u128) * (((arg2[2]) * 0x13) as u128));\n\n let x8: u128 = (((arg1[2]) as u128) * (((arg2[4]) * 0x13) as u128));\n\n let x9: u128 = (((arg1[2]) as u128) * (((arg2[3]) * 0x13) as u128));\n\n let x10: u128 = (((arg1[1]) as u128) * (((arg2[4]) * 0x13) as u128));\n\n let x11: u128 = (((arg1[4]) as u128) * ((arg2[0]) as u128));\n\n let x12: u128 = (((arg1[3]) as u128) * ((arg2[1]) as u128));\n\n let x13: u128 = (((arg1[3]) as u128) * ((arg2[0]) as u128));\n\n let x14: u128 = (((arg1[2]) as u128) * ((arg2[2]) as u128));\n\n let x15: u128 = (((arg1[2]) as u128) * ((arg2[1]) as u128));\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 48, "score": 114141.21321979095 }, { "content": "#[inline]\n\npub fn fiat_25519_carry_scmul_121666(\n\n out1: &mut fiat_25519_tight_field_element,\n\n arg1: &fiat_25519_loose_field_element,\n\n) -> () {\n\n let x1: u128 = ((0x1db42 as u128) * ((arg1[4]) as u128));\n\n let x2: u128 = ((0x1db42 as u128) * ((arg1[3]) as u128));\n\n let x3: u128 = ((0x1db42 as u128) * ((arg1[2]) as u128));\n\n let x4: u128 = ((0x1db42 as u128) * ((arg1[1]) as u128));\n\n let x5: u128 = ((0x1db42 as u128) * ((arg1[0]) as u128));\n\n let x6: u64 = ((x5 >> 51) as u64);\n\n let x7: u64 = ((x5 & (0x7ffffffffffff as u128)) as u64);\n\n let x8: u128 = ((x6 as u128) + x4);\n\n let x9: u64 = ((x8 >> 51) as u64);\n\n let x10: u64 = ((x8 & (0x7ffffffffffff as u128)) as u64);\n\n let x11: u128 = ((x9 as u128) + x3);\n\n let x12: u64 = ((x11 >> 51) as u64);\n\n let x13: u64 = ((x11 & (0x7ffffffffffff as u128)) as u64);\n\n let x14: u128 = ((x12 as u128) + x2);\n\n let x15: u64 = ((x14 >> 51) as u64);\n\n let x16: u64 = ((x14 & (0x7ffffffffffff as u128)) as u64);\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 49, "score": 114141.21321979095 }, { "content": "#[inline]\n\npub fn fiat_25519_addcarryx_u51(\n\n out1: &mut u64,\n\n out2: &mut fiat_25519_u1,\n\n arg1: fiat_25519_u1,\n\n arg2: u64,\n\n arg3: u64,\n\n) -> () {\n\n let x1: u64 = (((arg1 as u64) + arg2) + arg3);\n\n let x2: u64 = (x1 & 0x7ffffffffffff);\n\n let x3: fiat_25519_u1 = ((x1 >> 51) as fiat_25519_u1);\n\n *out1 = x2;\n\n *out2 = x3;\n\n}\n\n\n\n/// The function fiat_25519_subborrowx_u51 is a subtraction with borrow.\n\n///\n\n/// Postconditions:\n\n/// out1 = (-arg1 + arg2 + -arg3) mod 2^51\n\n/// out2 = -⌊(-arg1 + arg2 + -arg3) / 2^51⌋\n\n///\n\n/// Input Bounds:\n\n/// arg1: [0x0 ~> 0x1]\n\n/// arg2: [0x0 ~> 0x7ffffffffffff]\n\n/// arg3: [0x0 ~> 0x7ffffffffffff]\n\n/// Output Bounds:\n\n/// out1: [0x0 ~> 0x7ffffffffffff]\n\n/// out2: [0x0 ~> 0x1]\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 50, "score": 114141.21321979095 }, { "content": "#[inline]\n\npub fn fiat_25519_carry_square(\n\n out1: &mut fiat_25519_tight_field_element,\n\n arg1: &fiat_25519_loose_field_element,\n\n) -> () {\n\n let x1: u64 = ((arg1[4]) * 0x13);\n\n let x2: u64 = (x1 * 0x2);\n\n let x3: u64 = ((arg1[4]) * 0x2);\n\n let x4: u64 = ((arg1[3]) * 0x13);\n\n let x5: u64 = (x4 * 0x2);\n\n let x6: u64 = ((arg1[3]) * 0x2);\n\n let x7: u64 = ((arg1[2]) * 0x2);\n\n let x8: u64 = ((arg1[1]) * 0x2);\n\n let x9: u128 = (((arg1[4]) as u128) * (x1 as u128));\n\n let x10: u128 = (((arg1[3]) as u128) * (x2 as u128));\n\n let x11: u128 = (((arg1[3]) as u128) * (x4 as u128));\n\n let x12: u128 = (((arg1[2]) as u128) * (x2 as u128));\n\n let x13: u128 = (((arg1[2]) as u128) * (x5 as u128));\n\n let x14: u128 = (((arg1[2]) as u128) * ((arg1[2]) as u128));\n\n let x15: u128 = (((arg1[1]) as u128) * (x2 as u128));\n\n let x16: u128 = (((arg1[1]) as u128) * (x6 as u128));\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 51, "score": 114141.21321979095 }, { "content": "#[inline]\n\npub fn fiat_25519_subborrowx_u51(\n\n out1: &mut u64,\n\n out2: &mut fiat_25519_u1,\n\n arg1: fiat_25519_u1,\n\n arg2: u64,\n\n arg3: u64,\n\n) -> () {\n\n let x1: i64 = ((((((arg2 as i128) - (arg1 as i128)) as i64) as i128) - (arg3 as i128)) as i64);\n\n let x2: fiat_25519_i1 = ((x1 >> 51) as fiat_25519_i1);\n\n let x3: u64 = (((x1 as i128) & (0x7ffffffffffff as i128)) as u64);\n\n *out1 = x3;\n\n *out2 = (((0x0 as fiat_25519_i2) - (x2 as fiat_25519_i2)) as fiat_25519_u1);\n\n}\n\n\n\n/// The function fiat_25519_cmovznz_u64 is a single-word conditional move.\n\n///\n\n/// Postconditions:\n\n/// out1 = (if arg1 = 0 then arg2 else arg3)\n\n///\n\n/// Input Bounds:\n\n/// arg1: [0x0 ~> 0x1]\n\n/// arg2: [0x0 ~> 0xffffffffffffffff]\n\n/// arg3: [0x0 ~> 0xffffffffffffffff]\n\n/// Output Bounds:\n\n/// out1: [0x0 ~> 0xffffffffffffffff]\n", "file_path": "src/hazardous/ecc/fiat_curve25519_u64.rs", "rank": 52, "score": 114141.21321979095 }, { "content": "#[rustfmt::skip]\n\nfn fill_block(w: &mut [u64; 128]) {\n\n\t\n\n\tlet mut v0: u64; let mut v1: u64; let mut v2: u64; let mut v3: u64;\n\n\tlet mut v4: u64; let mut v5: u64; let mut v6: u64; let mut v7: u64; \n\n\tlet mut v8: u64; let mut v9: u64; let mut v10: u64; let mut v11: u64;\n\n\tlet mut v12: u64; let mut v13: u64; let mut v14: u64; let mut v15: u64;\n\n\n\n\tlet mut idx = 0;\n\n\t\n\n\t// Operate on columns.\n\n\twhile idx < 128 {\n\n\t\tv0 = w[idx ]; v1 = w[idx + 1]; v2 = w[idx + 2]; v3 = w[idx + 3];\n\n\t\tv4 = w[idx + 4]; v5 = w[idx + 5]; v6 = w[idx + 6]; v7 = w[idx + 7];\n\n\t\tv8 = w[idx + 8]; v9 = w[idx + 9]; v10 = w[idx + 10]; v11 = w[idx + 11];\n\n\t\tv12 = w[idx + 12]; v13 = w[idx + 13]; v14 = w[idx + 14]; v15 = w[idx + 15];\n\n\n\n\t\tpermutation_p(\n\n\t\t\t&mut v0, &mut v1, &mut v2, &mut v3, \n\n\t\t\t&mut v4, &mut v5, &mut v6, &mut v7, \n\n\t\t\t&mut v8, &mut v9, &mut v10, &mut v11, \n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 53, "score": 109223.24861619268 }, { "content": "/// Test runner for AEADs.\n\npub fn AeadTestRunner<Sealer, Opener, Key, Nonce>(\n\n sealer: Sealer,\n\n opener: Opener,\n\n key: Key,\n\n nonce: Nonce,\n\n input: &[u8],\n\n expected_ct_with_tag: Option<&[u8]>,\n\n tag_size: usize,\n\n aad: &[u8],\n\n) where\n\n Sealer: Fn(&Key, &Nonce, &[u8], Option<&[u8]>, &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n Opener: Fn(&Key, &Nonce, &[u8], Option<&[u8]>, &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n{\n\n seal_dst_out_length(&sealer, &key, &nonce, input, tag_size, aad);\n\n open_dst_out_length(&sealer, &opener, &key, &nonce, input, tag_size, aad);\n\n open_modified_tag_err(&sealer, &opener, &key, &nonce, input, tag_size, aad);\n\n open_modified_ciphertext_err(&sealer, &opener, &key, &nonce, input, tag_size, aad);\n\n open_modified_aad_err(&sealer, &opener, &key, &nonce, input, tag_size, aad);\n\n none_or_empty_some_aad_same_result(&sealer, &opener, &key, &nonce, input, tag_size);\n\n seal_open_equals_expected(\n", "file_path": "src/test_framework/aead_interface.rs", "rank": 54, "score": 94933.33353069652 }, { "content": "/// Test runner for stream ciphers.\n\npub fn StreamCipherTestRunner<Encryptor, Decryptor, Key, Nonce>(\n\n encryptor: Encryptor,\n\n decryptor: Decryptor,\n\n key: Key,\n\n nonce: Nonce,\n\n counter: u32,\n\n input: &[u8],\n\n expected_ct: Option<&[u8]>,\n\n) where\n\n Encryptor: Fn(&Key, &Nonce, u32, &[u8], &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n Decryptor: Fn(&Key, &Nonce, u32, &[u8], &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n{\n\n if !input.is_empty() {\n\n encrypt_decrypt_out_length(&encryptor, &decryptor, &key, &nonce, input);\n\n encrypt_decrypt_equals_expected(\n\n &encryptor,\n\n &decryptor,\n\n &key,\n\n &nonce,\n\n counter,\n", "file_path": "src/test_framework/streamcipher_interface.rs", "rank": 55, "score": 93192.26344667992 }, { "content": "/// Test that sealing and opening with different secret-key/nonce yields an error.\n\npub fn test_diff_params_err<Sealer, Opener, Key, Nonce>(\n\n sealer: &Sealer,\n\n opener: &Opener,\n\n input: &[u8],\n\n tag_size: usize,\n\n) where\n\n Key: TestingRandom + PartialEq<Key>,\n\n Nonce: TestingRandom + PartialEq<Nonce>,\n\n Sealer: Fn(&Key, &Nonce, &[u8], Option<&[u8]>, &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n Opener: Fn(&Key, &Nonce, &[u8], Option<&[u8]>, &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n{\n\n let sk1 = Key::gen();\n\n let sk2 = Key::gen();\n\n assert!(sk1 != sk2);\n\n\n\n let n1 = Nonce::gen();\n\n let n2 = Nonce::gen();\n\n assert!(n1 != n2);\n\n\n\n let mut dst_out_ct = vec![0u8; input.len() + tag_size];\n", "file_path": "src/test_framework/aead_interface.rs", "rank": 56, "score": 93192.26344667992 }, { "content": "/// Test that encrypting using different secret-key/nonce/initial-counter combinations yields different\n\n/// ciphertexts.\n\npub fn test_diff_params_diff_output<Encryptor, Decryptor, Key, Nonce>(\n\n encryptor: &Encryptor,\n\n decryptor: &Decryptor,\n\n) where\n\n Key: TestingRandom + PartialEq<Key>,\n\n Nonce: TestingRandom + PartialEq<Nonce>,\n\n Encryptor: Fn(&Key, &Nonce, u32, &[u8], &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n Decryptor: Fn(&Key, &Nonce, u32, &[u8], &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n{\n\n let input = &[0u8; 16];\n\n\n\n let sk1 = Key::gen();\n\n let sk2 = Key::gen();\n\n assert!(sk1 != sk2);\n\n\n\n let n1 = Nonce::gen();\n\n let n2 = Nonce::gen();\n\n assert!(n1 != n2);\n\n\n\n let c1 = 0u32;\n", "file_path": "src/test_framework/streamcipher_interface.rs", "rank": 57, "score": 91552.38538495003 }, { "content": "#[cfg(feature = \"safe_api\")]\n\nfn encrypt_decrypt_input_empty<Encryptor, Decryptor, Key, Nonce>(\n\n encryptor: &Encryptor,\n\n decryptor: &Decryptor,\n\n key: &Key,\n\n nonce: &Nonce,\n\n) where\n\n Encryptor: Fn(&Key, &Nonce, u32, &[u8], &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n Decryptor: Fn(&Key, &Nonce, u32, &[u8], &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n{\n\n let mut dst_out = [0u8; 64];\n\n assert!(encryptor(key, nonce, 0, &[0u8; 0], &mut dst_out).is_err());\n\n assert!(decryptor(key, nonce, 0, &[0u8; 0], &mut dst_out).is_err());\n\n}\n\n\n", "file_path": "src/test_framework/streamcipher_interface.rs", "rank": 58, "score": 77482.48425401417 }, { "content": "/// Using None or Some with empty slice should produce the exact same result.\n\nfn none_or_empty_some_aad_same_result<Sealer, Opener, Key, Nonce>(\n\n sealer: &Sealer,\n\n opener: &Opener,\n\n key: &Key,\n\n nonce: &Nonce,\n\n input: &[u8],\n\n tag_size: usize,\n\n) where\n\n Sealer: Fn(&Key, &Nonce, &[u8], Option<&[u8]>, &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n Opener: Fn(&Key, &Nonce, &[u8], Option<&[u8]>, &mut [u8]) -> Result<(), UnknownCryptoError>,\n\n{\n\n let mut dst_out_ct_none = vec![0u8; input.len() + tag_size];\n\n let mut dst_out_ct_some_empty = vec![0u8; input.len() + tag_size];\n\n\n\n sealer(key, nonce, input, None, &mut dst_out_ct_none).unwrap();\n\n sealer(\n\n key,\n\n nonce,\n\n input,\n\n Some(&[0u8; 0]),\n", "file_path": "src/test_framework/aead_interface.rs", "rank": 59, "score": 77481.84894133976 }, { "content": "/// Data-independent indexing.\n\nstruct Gidx {\n\n block: [u64; 128],\n\n addresses: [u64; 128],\n\n segment_length: u32,\n\n offset: u32,\n\n}\n\n\n\nimpl Gidx {\n\n fn new(blocks: u32, passes: u32, segment_length: u32) -> Self {\n\n let mut block = [0u64; 128];\n\n block[1] = 0u64; // Lane number, we only support one (0u64).\n\n block[3] = u64::from(blocks);\n\n block[4] = u64::from(passes);\n\n block[5] = u64::from(ARGON2_VARIANT); // The Argon2i variant\n\n\n\n Self {\n\n block,\n\n addresses: [0u64; 128],\n\n segment_length,\n\n offset: 0,\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 60, "score": 70761.0207897996 }, { "content": "#[test]\n\n#[cfg(feature = \"safe_api\")]\n\nfn test_source() {\n\n use std::error::Error;\n\n assert!(UnknownCryptoError.source().is_none());\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 61, "score": 64822.60175147468 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn permutation_p(\n\n v0: &mut u64,\n\n v1: &mut u64,\n\n v2: &mut u64,\n\n v3: &mut u64,\n\n v4: &mut u64,\n\n v5: &mut u64,\n\n v6: &mut u64,\n\n v7: &mut u64,\n\n v8: &mut u64,\n\n v9: &mut u64,\n\n v10: &mut u64,\n\n v11: &mut u64,\n\n v12: &mut u64,\n\n v13: &mut u64,\n\n v14: &mut u64,\n\n v15: &mut u64,\n\n) {\n\n g(v0, v4, v8, v12);\n\n g(v1, v5, v9, v13);\n\n g(v2, v6, v10, v14);\n\n g(v3, v7, v11, v15);\n\n g(v0, v5, v10, v15);\n\n g(v1, v6, v11, v12);\n\n g(v2, v7, v8, v13);\n\n g(v3, v4, v9, v14);\n\n}\n\n\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 62, "score": 63235.15307244971 }, { "content": "/// Represent an element in the curve field.\n\nstruct FieldElement([u64; 5]);\n\n\n\nimpl Eq for FieldElement {}\n\n\n\nimpl PartialEq for FieldElement {\n\n fn eq(&self, other: &Self) -> bool {\n\n use subtle::ConstantTimeEq;\n\n self.as_bytes().ct_eq(&other.as_bytes()).into()\n\n }\n\n}\n\n\n\n/// The function fiat_25519_carry_mul multiplies two field elements and reduces the result.\n\nimpl Mul for FieldElement {\n\n type Output = Self;\n\n\n\n fn mul(self, rhs: Self) -> Self::Output {\n\n use fiat_curve25519_u64::fiat_25519_carry_mul;\n\n\n\n let mut ret = [0u64; 5];\n\n fiat_25519_carry_mul(&mut ret, &self.0, &rhs.0);\n", "file_path": "src/hazardous/ecc/x25519.rs", "rank": 63, "score": 62435.261043367645 }, { "content": "fn wycheproof_test_runner(\n\n key: &[u8],\n\n nonce: &[u8],\n\n aad: &[u8],\n\n tag: &[u8],\n\n input: &[u8],\n\n output: &[u8],\n\n result: bool,\n\n tcid: u64,\n\n is_ietf: bool,\n\n) -> Result<(), UnknownCryptoError> {\n\n let mut dst_ct_out = vec![0u8; input.len() + 16];\n\n let mut dst_pt_out = vec![0u8; input.len()];\n\n\n\n if result {\n\n let key = SecretKey::from_slice(&key)?;\n\n\n\n if is_ietf {\n\n let nonce = chacha20poly1305::Nonce::from_slice(&nonce)?;\n\n chacha20poly1305::seal(&key, &nonce, input, Some(aad), &mut dst_ct_out)?;\n", "file_path": "tests/aead/mod.rs", "rank": 64, "score": 61765.34258321639 }, { "content": "/// Authenticates the ciphertext, ad and their lengths.\n\nfn process_authentication(\n\n auth_ctx: &mut Poly1305,\n\n ad: &[u8],\n\n ciphertext: &[u8],\n\n) -> Result<(), UnknownCryptoError> {\n\n auth_ctx.process_pad_to_blocksize(ad)?;\n\n auth_ctx.process_pad_to_blocksize(ciphertext)?;\n\n\n\n let (ad_len, ct_len): (u64, u64) = match (ad.len().try_into(), ciphertext.len().try_into()) {\n\n (Ok(alen), Ok(clen)) => (alen, clen),\n\n _ => return Err(UnknownCryptoError),\n\n };\n\n\n\n let mut tmp_pad = [0u8; 16];\n\n tmp_pad[0..8].copy_from_slice(&ad_len.to_le_bytes());\n\n tmp_pad[8..16].copy_from_slice(&ct_len.to_le_bytes());\n\n auth_ctx.update(tmp_pad.as_ref())\n\n}\n\n\n\n#[must_use = \"SECURITY WARNING: Ignoring a Result can have real security implications.\"]\n", "file_path": "src/hazardous/aead/chacha20poly1305.rs", "rank": 65, "score": 61765.34258321639 }, { "content": "// format! is only available with std\n\nfn test_unknown_crypto_from_getrandom() {\n\n use core::num::NonZeroU32;\n\n // Choose some random error code.\n\n let err_code = NonZeroU32::new(12).unwrap();\n\n let err_foreign: getrandom::Error = getrandom::Error::from(err_code);\n\n\n\n // Tests Debug impl through \"{:?}\"\n\n let err = format!(\"{:?}\", UnknownCryptoError::from(err_foreign));\n\n assert_eq!(err, \"UnknownCryptoError\");\n\n // Tests Display impl through \"{}\"\n\n let err = format!(\"{}\", UnknownCryptoError::from(err_foreign));\n\n assert_eq!(err, \"UnknownCryptoError\");\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 66, "score": 61765.34258321639 }, { "content": "/// H0 as defined in the specification.\n\nfn initial_hash(\n\n hash_length: u32,\n\n memory_kib: u32,\n\n passes: u32,\n\n p: &[u8],\n\n s: &[u8],\n\n k: &[u8],\n\n x: &[u8],\n\n) -> Result<[u8; 72], UnknownCryptoError> {\n\n // We save additional 8 bytes in H0 for when the first two blocks are processed,\n\n // so that this may contain two little-endian integers.\n\n let mut h0 = [0u8; 72];\n\n let mut hasher = Blake2b::new(BLAKE2B_OUTSIZE)?;\n\n\n\n // Collect the first part to reduce times we update the hasher state.\n\n h0[0..4].copy_from_slice(&LANES.to_le_bytes());\n\n h0[4..8].copy_from_slice(&hash_length.to_le_bytes());\n\n h0[8..12].copy_from_slice(&memory_kib.to_le_bytes());\n\n h0[12..16].copy_from_slice(&passes.to_le_bytes());\n\n h0[16..20].copy_from_slice(&ARGON2_VERSION.to_le_bytes());\n", "file_path": "src/hazardous/kdf/argon2i.rs", "rank": 67, "score": 61765.34258321639 }, { "content": "#[test]\n\nfn test_xchacha20poly1305() {\n\n boringssl_runner(\n\n \"./tests/test_data/third_party/google/boringssl/boringssl_xchacha20_poly1305.txt\",\n\n false,\n\n );\n\n}\n", "file_path": "tests/aead/boringssl_tests.rs", "rank": 68, "score": 61765.34258321639 }, { "content": "#[test]\n\nfn test_pynacl() {\n\n run_tests_from_json(\"./tests/test_data/third_party/pynacl/pynacl_raw_argon2i_hashes.json\");\n\n}\n", "file_path": "tests/kdf/pynacl_argon2i.rs", "rank": 69, "score": 61765.34258321639 }, { "content": "#[test]\n\nfn test_chacha20poly1305() {\n\n boringssl_runner(\n\n \"./tests/test_data/third_party/google/boringssl/boringssl_chacha20_poly1305.txt\",\n\n true,\n\n );\n\n}\n\n\n", "file_path": "tests/aead/boringssl_tests.rs", "rank": 70, "score": 61765.34258321639 }, { "content": "/// Using BLAKE2b, derive two shared secret from a scalarmult computation.\n\nfn establish_session_keys(\n\n shared_secret: &x25519::SharedKey,\n\n client_pk: &PublicKey,\n\n server_pk: &PublicKey,\n\n) -> Result<Digest, UnknownCryptoError> {\n\n let mut ctx = Blake2b::new(64)?;\n\n ctx.update(shared_secret.unprotected_as_bytes())?;\n\n ctx.update(&client_pk.to_bytes())?;\n\n ctx.update(&server_pk.to_bytes())?;\n\n ctx.finalize()\n\n}\n\n\n\n// Testing public functions in the module.\n\n#[cfg(test)]\n\nmod public {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_basic_key_exchange() {\n\n let session_server = EphemeralServerSession::new().unwrap();\n", "file_path": "src/high_level/kex.rs", "rank": 71, "score": 60405.22874216872 }, { "content": "#[test]\n\nfn test_wycheproof_hmac_512() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_hmac_sha512_test.json\",\n\n );\n\n}\n", "file_path": "tests/mac/wycheproof_hmac.rs", "rank": 72, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_wycheproof_hkdf_512() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_hkdf_sha512_test.json\",\n\n );\n\n}\n", "file_path": "tests/kdf/wycheproof_hkdf.rs", "rank": 73, "score": 60400.561086246176 }, { "content": "#[test]\n\n#[cfg(feature = \"safe_api\")]\n\nfn test_unknown_crypto_from_parseint_error() {\n\n let err_foreign = \"j\".parse::<u32>().unwrap_err();\n\n\n\n // Tests Debug impl through \"{:?}\" and Display impl though \"{}\"\n\n let err = format!(\n\n \"{:?}:{}\",\n\n UnknownCryptoError::from(err_foreign.clone()),\n\n UnknownCryptoError::from(err_foreign)\n\n );\n\n assert_eq!(err, \"UnknownCryptoError:UnknownCryptoError\");\n\n}\n", "file_path": "src/errors.rs", "rank": 74, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_wycheproof_hkdf_384() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_hkdf_sha384_test.json\",\n\n );\n\n}\n", "file_path": "tests/kdf/wycheproof_hkdf.rs", "rank": 75, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_wycheproof_hmac_384() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_hmac_sha384_test.json\",\n\n );\n\n}\n\n\n", "file_path": "tests/mac/wycheproof_hmac.rs", "rank": 76, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_wycheproof_hmac_256() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_hmac_sha256_test.json\",\n\n );\n\n}\n\n\n", "file_path": "tests/mac/wycheproof_hmac.rs", "rank": 77, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_blake2b_kat() {\n\n let file = File::open(\"./tests/test_data/third_party/blake2-kat.json\").unwrap();\n\n let reader = BufReader::new(file);\n\n let stream = Deserializer::from_reader(reader).into_iter::<Value>();\n\n\n\n for test_collection in stream {\n\n for test_object in test_collection.unwrap().as_array() {\n\n for test_case in test_object {\n\n // Only test BLAKE2b test vectors\n\n if test_case.get(\"hash\").unwrap() == \"blake2b\" {\n\n super::blake2b_test_runner(\n\n &decode(test_case.get(\"in\").unwrap().as_str().unwrap()).unwrap(),\n\n &decode(test_case.get(\"key\").unwrap().as_str().unwrap()).unwrap(),\n\n &decode(test_case.get(\"out\").unwrap().as_str().unwrap()).unwrap(),\n\n )\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "tests/hash/blake2b_kat.rs", "rank": 78, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_boringssl_poly1305() {\n\n let boringssl_poly1305_fields: Vec<String> = vec![\"Key\".into(), \"Input\".into(), \"MAC\".into()];\n\n let mut boringssl_poly1305_reader = TestCaseReader::new(\n\n \"./tests/test_data/third_party/google/boringssl/boringssl_poly1305_tests.txt\",\n\n boringssl_poly1305_fields,\n\n \"=\",\n\n );\n\n\n\n let mut test_case = boringssl_poly1305_reader.next();\n\n while test_case.is_some() {\n\n let mut tc = test_case.unwrap();\n\n\n\n let key: Vec<u8> = TestCaseReader::default_parse(tc.get_data(\"Key\"));\n\n let input: Vec<u8> = TestCaseReader::default_parse(tc.get_data(\"Input\"));\n\n let expected_output: Vec<u8> = TestCaseReader::default_parse(tc.get_data(\"MAC\"));\n\n\n\n if key.is_empty() {\n\n tc.outcome = false;\n\n }\n\n if expected_output.is_empty() {\n\n tc.outcome = false;\n\n }\n\n\n\n poly1305_test_runner(&key[..], &input[..], &expected_output[..]);\n\n\n\n // Read the next one\n\n test_case = boringssl_poly1305_reader.next();\n\n }\n\n}\n", "file_path": "tests/mac/boringssl_poly1305.rs", "rank": 79, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_wycheproof_hkdf_256() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_hkdf_sha256_test.json\",\n\n );\n\n}\n", "file_path": "tests/kdf/wycheproof_hkdf.rs", "rank": 80, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_pynacl() {\n\n run_tests_from_json(\n\n \"./tests/test_data/third_party/pynacl/pynacl_secretstream_test_vectors.json\",\n\n );\n\n run_tests_from_json(\"./tests/test_data/pynacl_generated.json\");\n\n run_tests_from_json(\"./tests/test_data/pynacl_generated_with_rekey.json\");\n\n}\n", "file_path": "tests/aead/pynacl_streaming_aead.rs", "rank": 81, "score": 60400.561086246176 }, { "content": "#[test]\n\n#[cfg(feature = \"safe_api\")]\n\nfn test_unknown_crypto_from_decode_error() {\n\n use ct_codecs::Error;\n\n\n\n let err_one = Error::InvalidInput;\n\n let err_two = Error::Overflow;\n\n\n\n // Tests Debug impl through \"{:?}\" and Display impl though \"{}\"\n\n let err = format!(\n\n \"{:?}:{}\",\n\n UnknownCryptoError::from(err_one.clone()),\n\n UnknownCryptoError::from(err_one)\n\n );\n\n assert_eq!(err, \"UnknownCryptoError:UnknownCryptoError\");\n\n let err = format!(\n\n \"{:?}:{}\",\n\n UnknownCryptoError::from(err_two.clone()),\n\n UnknownCryptoError::from(err_two)\n\n );\n\n assert_eq!(err, \"UnknownCryptoError:UnknownCryptoError\");\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 82, "score": 60400.561086246176 }, { "content": "#[test]\n\nfn test_wycheproof_x25519() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_x25519_test.json\",\n\n );\n\n}\n", "file_path": "tests/ecc/wycheproof_x25519.rs", "rank": 83, "score": 60400.561086246176 }, { "content": "/// Type for a Poly1305 tag.\n\ntype Poly1305Tag = [u8; POLY1305_OUTSIZE];\n\n\n\nconstruct_secret_key! {\n\n /// A type to represent the `OneTimeKey` that Poly1305 uses for authentication.\n\n ///\n\n /// # Errors:\n\n /// An error will be returned if:\n\n /// - `slice` is not 32 bytes.\n\n ///\n\n /// # Panics:\n\n /// A panic will occur if:\n\n /// - Failure to generate random bytes securely.\n\n (OneTimeKey, test_one_time_key, POLY1305_KEYSIZE, POLY1305_KEYSIZE, POLY1305_KEYSIZE)\n\n}\n\n\n\nimpl_from_trait!(OneTimeKey, POLY1305_KEYSIZE);\n\n\n\nconstruct_tag! {\n\n /// A type to represent the `Tag` that Poly1305 returns.\n\n ///\n", "file_path": "src/hazardous/mac/poly1305.rs", "rank": 84, "score": 59473.865325055995 }, { "content": "/// The F function as described in the RFC.\n\nfn _function_f<Hmac>(\n\n salt: &[u8],\n\n iterations: usize,\n\n index: u32,\n\n dk_block: &mut [u8],\n\n block_len: usize,\n\n u_step: &mut [u8],\n\n hmac: &mut Hmac,\n\n) -> Result<(), UnknownCryptoError>\n\nwhere\n\n Hmac: hmac::HmacFunction,\n\n{\n\n debug_assert_eq!(u_step.len(), Hmac::HASH_FUNC_OUTSIZE);\n\n hmac._update(salt)?;\n\n hmac._update(&index.to_be_bytes())?;\n\n hmac._finalize(u_step)?;\n\n debug_assert!(block_len <= u_step.len());\n\n dk_block.copy_from_slice(&u_step[..block_len]);\n\n\n\n if iterations > 1 {\n", "file_path": "src/hazardous/kdf/pbkdf2.rs", "rank": 85, "score": 59459.88161114558 }, { "content": "#[test]\n\nfn test_nist_cavp() {\n\n let nist_cavp_fields: Vec<String> = vec![\n\n \"Count\".into(),\n\n \"Klen\".into(),\n\n \"Tlen\".into(),\n\n \"Key\".into(),\n\n \"Msg\".into(),\n\n \"Mac\".into(),\n\n ];\n\n\n\n let mut nist_cavp_reader = TestCaseReader::new(\n\n \"./tests/test_data/third_party/nist/HMAC.rsp\",\n\n nist_cavp_fields.clone(),\n\n \"=\",\n\n );\n\n // Set variant separators.\n\n nist_cavp_reader.set_stop_flags(vec![\"[L=32]\".into(), \"[L=48]\".into(), \"[L=64]\".into()]);\n\n // The current HMAC variant being tested.\n\n let mut current_variant = String::new();\n\n let mut test_case: Option<TestCase>;\n", "file_path": "tests/mac/nist_cavp_hmac.rs", "rank": 86, "score": 59129.93926389486 }, { "content": "#[test]\n\nfn test_wycheproof_xchacha20_poly1305() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_xchacha20_poly1305_test.json\",\n\n );\n\n}\n", "file_path": "tests/aead/wycheproof_aead.rs", "rank": 87, "score": 59129.93926389486 }, { "content": "// format! is only available with std\n\nfn test_unknown_crypto_error_debug_display() {\n\n // Tests Debug impl through \"{:?}\"\n\n let err = format!(\"{:?}\", UnknownCryptoError);\n\n assert_eq!(err, \"UnknownCryptoError\");\n\n // Tests Display impl through \"{}\"\n\n let err = format!(\"{}\", UnknownCryptoError);\n\n assert_eq!(err, \"UnknownCryptoError\");\n\n}\n\n\n\n#[test]\n\n#[cfg(feature = \"safe_api\")]\n", "file_path": "src/errors.rs", "rank": 88, "score": 59129.93926389486 }, { "content": "#[test]\n\nfn test_wycheproof_chacha20_poly1305() {\n\n wycheproof_runner(\n\n \"./tests/test_data/third_party/google/wycheproof/wycheproof_chacha20_poly1305_test.json\",\n\n );\n\n}\n\n\n", "file_path": "tests/aead/wycheproof_aead.rs", "rank": 89, "score": 59129.93926389486 }, { "content": "#[test]\n\nfn test_nist_cavp_short_msg() {\n\n nist_cavp_runner(\"./tests/test_data/third_party/nist/SHA384ShortMsg.rsp\");\n\n}\n", "file_path": "tests/hash/sha384_nist_cavp.rs", "rank": 90, "score": 56834.71240112852 }, { "content": "#[test]\n\nfn test_nist_cavp_long_msg() {\n\n nist_cavp_runner(\"./tests/test_data/third_party/nist/SHA512LongMsg.rsp\");\n\n}\n\n\n", "file_path": "tests/hash/sha512_nist_cavp.rs", "rank": 91, "score": 56834.71240112852 }, { "content": "#[test]\n\nfn test_nist_cavp_short_msg() {\n\n nist_cavp_runner(\"./tests/test_data/third_party/nist/SHA512ShortMsg.rsp\");\n\n}\n", "file_path": "tests/hash/sha512_nist_cavp.rs", "rank": 92, "score": 56834.71240112852 }, { "content": "#[test]\n\nfn test_nist_cavp_long_msg() {\n\n nist_cavp_runner(\"./tests/test_data/third_party/nist/SHA256LongMsg.rsp\");\n\n}\n\n\n", "file_path": "tests/hash/sha256_nist_cavp.rs", "rank": 93, "score": 56834.71240112852 }, { "content": "#[test]\n\nfn test_nist_cavp_short_msg() {\n\n nist_cavp_runner(\"./tests/test_data/third_party/nist/SHA256ShortMsg.rsp\");\n\n}\n", "file_path": "tests/hash/sha256_nist_cavp.rs", "rank": 94, "score": 56834.71240112852 }, { "content": "#[test]\n\nfn test_nist_cavp_long_msg() {\n\n nist_cavp_runner(\"./tests/test_data/third_party/nist/SHA384LongMsg.rsp\");\n\n}\n\n\n", "file_path": "tests/hash/sha384_nist_cavp.rs", "rank": 95, "score": 56834.71240112852 }, { "content": "/// NISTs SHA256/384/512 Long/Short share the same format,\n\n/// so fields and separator remain the same.\n\nfn nist_cavp_runner(path: &str) {\n\n let nist_cavp_fields: Vec<String> = vec![\"Len\".into(), \"Msg\".into(), \"MD\".into()];\n\n let mut nist_cavp_reader = TestCaseReader::new(path, nist_cavp_fields, \"=\");\n\n\n\n let mut test_case = nist_cavp_reader.next();\n\n // Check that we actually ran any of the SHA2 test runners.\n\n let mut ran_any_runner = false;\n\n while test_case.is_some() {\n\n let tc = test_case.unwrap();\n\n\n\n let input: Vec<u8> = TestCaseReader::default_parse(tc.get_data(\"Msg\"));\n\n let expected_output: Vec<u8> = TestCaseReader::default_parse(tc.get_data(\"MD\"));\n\n\n\n if path.contains(\"SHA256\") {\n\n sha256_test_runner(&input[..], &expected_output[..]);\n\n ran_any_runner = true;\n\n }\n\n if path.contains(\"SHA384\") {\n\n sha384_test_runner(&input[..], &expected_output[..]);\n\n ran_any_runner = true;\n", "file_path": "tests/hash/mod.rs", "rank": 96, "score": 54929.891552399524 }, { "content": "fn wycheproof_runner(path: &str) {\n\n let file = File::open(path).unwrap();\n\n let reader = BufReader::new(file);\n\n let tests: WycheproofHmacTests = serde_json::from_reader(reader).unwrap();\n\n\n\n let mut tests_run = 0;\n\n for test_group in tests.testGroups.iter() {\n\n for test in test_group.tests.iter() {\n\n let should_test_pass: bool = match test.result.as_str() {\n\n \"valid\" => true,\n\n \"invalid\" => false,\n\n _ => panic!(\"Unexpected test outcome for Wycheproof test\"),\n\n };\n\n\n\n if path.contains(\"sha256\") {\n\n super::hmac256_test_runner(\n\n &decode(&test.tag).unwrap(),\n\n &decode(&test.key).unwrap(),\n\n &decode(&test.msg).unwrap(),\n\n Some((test_group.tagSize / 8) as usize),\n", "file_path": "tests/mac/wycheproof_hmac.rs", "rank": 97, "score": 54929.891552399524 }, { "content": "fn wycheproof_runner(path: &str) {\n\n let file = File::open(path).unwrap();\n\n let reader = BufReader::new(file);\n\n let tests: WycheproofAeadTests = serde_json::from_reader(reader).unwrap();\n\n\n\n let is_ietf = match tests.algorithm.as_str() {\n\n \"CHACHA20-POLY1305\" => true,\n\n \"XCHACHA20-POLY1305\" => false,\n\n _ => panic!(\"Unexpected name for Wycheproof algorithm\"),\n\n };\n\n\n\n let mut tests_run = 0;\n\n for test_group in tests.testGroups.iter() {\n\n for test in test_group.tests.iter() {\n\n let should_test_pass: bool = match test.result.as_str() {\n\n \"valid\" => true,\n\n \"invalid\" => false,\n\n _ => panic!(\"Unexpected test outcome for Wycheproof test\"),\n\n };\n\n\n", "file_path": "tests/aead/wycheproof_aead.rs", "rank": 98, "score": 54929.891552399524 }, { "content": "fn wycheproof_runner(path: &str) {\n\n let file = File::open(path).unwrap();\n\n let reader = BufReader::new(file);\n\n let tests: WycheproofX25519Tests = serde_json::from_reader(reader).unwrap();\n\n\n\n let mut tests_run = 0;\n\n for test_group in tests.testGroups.iter() {\n\n for test in test_group.tests.iter() {\n\n let mut should_test_pass: bool = match test.result.as_str() {\n\n \"valid\" => true,\n\n \"acceptable\" => true, // NOTE: We handle special cases after this\n\n _ => panic!(\"Unexpected test outcome for Wycheproof test\"),\n\n };\n\n\n\n // Only tests that are only `Twist`/`NonCanonical` are accepted.\n\n\n\n if test.flags.contains(&\"ZeroSharedSecret\".to_string()) {\n\n should_test_pass = false;\n\n }\n\n\n", "file_path": "tests/ecc/wycheproof_x25519.rs", "rank": 99, "score": 54929.891552399524 } ]
Rust
examples/example1.rs
evetion/startin
88ad5557cbd954ec8996f99d9afb74fd1ec174ec
#![allow(dead_code)] extern crate csv; extern crate serde; extern crate startin; #[macro_use] extern crate serde_derive; use std::error::Error; use std::io; #[derive(Debug, Deserialize)] pub struct CSVPoint { pub x: f64, pub y: f64, pub z: f64, } fn main() { let re = read_xyz_file(); let vec = match re { Ok(vec) => vec, Err(error) => panic!("Problem with the file {:?}", error), }; let mut dt = startin::Triangulation::new(); dt.set_jump_and_walk(false); dt.use_robust_predicates(true); let mut duplicates = 0; for p in vec.into_iter() { let re = dt.insert_one_pt(p.x, p.y, p.z); match re { Ok(_x) => continue, Err(_e) => duplicates = duplicates + 1, }; } if duplicates > 0 { println!("Duplicates? {} of them.\n", duplicates); } else { println!("Duplicates? none.\n"); } println!("{}", dt); let pts = dt.all_vertices(); println!("Size pts: {}", pts.len()); println!("Vertex CH: {}", dt.is_vertex_convex_hull(0)); let re = dt.adjacent_vertices_to_vertex(66); if re.is_some() == true { for each in re.unwrap() { println!("Adjacent vertex {}", each); } } else { println!("Vertex does not exists."); } let trs = dt.incident_triangles_to_vertex(6).unwrap(); let re = dt.adjacent_triangles_to_triangle(&trs[0]); if re.is_some() == true { println!("Adjacent to: {}", &trs[0]); for tr in re.unwrap().iter() { println!("adj: {}", tr); } } let pathout = "/Users/hugo/temp/out.obj"; println!("Writing OBJ file..."); let re = dt.write_obj(pathout.to_string(), false); match re { Ok(_x) => println!("--> OBJ output saved to: {}", pathout), Err(_x) => println!("ERROR: path {} doesn't exist, abort.", pathout), } } fn read_xyz_file() -> Result<Vec<CSVPoint>, Box<dyn Error>> { let mut rdr = csv::ReaderBuilder::new() .delimiter(b' ') .from_reader(io::stdin()); let mut vpts: Vec<CSVPoint> = Vec::new(); for result in rdr.deserialize() { let record: CSVPoint = result?; vpts.push(record); } Ok(vpts) }
#![allow(dead_code)] extern crate csv; extern crate serde; extern crate startin; #[macro_use] extern crate serde_derive; use std::error::Error; use std::io; #[derive(Debug, Deserialize)] pub struct CSVPoint { pub x: f64, pub y: f64, pub z: f64, } fn main() { let re = read_xyz_file(); let vec = match re { Ok(vec) => vec, Err(error) => panic!("Problem with the file {:?}", error), }; let mut dt = startin::Triangulation::new(); dt.set_jump_and_walk(false); dt.use_robust_predicates(true); let mut duplicates = 0; for p in vec.into_iter() { let re = dt.insert_one_pt(p.x, p.y, p.z); match re { Ok(_x) => continue, Err(_e) => duplicates = duplicates + 1, }; } if duplicates > 0 { println!("Duplicates? {} of them.\n", duplicates); } else { println!("Duplicates? none.\n"); } println!("{}", dt); let pts = dt.all_vertices(); println!("Size pts: {}", pts.len()); println!("Vertex CH: {}", dt.is_vertex_convex_hull(0)); let re = dt.adja
fn read_xyz_file() -> Result<Vec<CSVPoint>, Box<dyn Error>> { let mut rdr = csv::ReaderBuilder::new() .delimiter(b' ') .from_reader(io::stdin()); let mut vpts: Vec<CSVPoint> = Vec::new(); for result in rdr.deserialize() { let record: CSVPoint = result?; vpts.push(record); } Ok(vpts) }
cent_vertices_to_vertex(66); if re.is_some() == true { for each in re.unwrap() { println!("Adjacent vertex {}", each); } } else { println!("Vertex does not exists."); } let trs = dt.incident_triangles_to_vertex(6).unwrap(); let re = dt.adjacent_triangles_to_triangle(&trs[0]); if re.is_some() == true { println!("Adjacent to: {}", &trs[0]); for tr in re.unwrap().iter() { println!("adj: {}", tr); } } let pathout = "/Users/hugo/temp/out.obj"; println!("Writing OBJ file..."); let re = dt.write_obj(pathout.to_string(), false); match re { Ok(_x) => println!("--> OBJ output saved to: {}", pathout), Err(_x) => println!("ERROR: path {} doesn't exist, abort.", pathout), } }
function_block-function_prefixed
[ { "content": "fn read_xyz_file() -> Vec<Vec<f64>> {\n\n let tmpf = File::open(\"/Users/hugo/Dropbox/data/ahn3/o3.txt\").unwrap();\n\n // let tmpf = File::open(\"/Users/hugo/Dropbox/data/ahn3/test.txt\").unwrap();\n\n let file = BufReader::new(&tmpf);\n\n\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n\n\n for (num, line) in file.lines().enumerate() {\n\n if num != 0 {\n\n let l = line.unwrap();\n\n let v: Vec<f64> = l.split(' ').map(|s| s.parse().unwrap()).collect();\n\n // println!(\"{:?}\", v);\n\n pts.push(v);\n\n }\n\n }\n\n pts\n\n}\n", "file_path": "examples/example8.rs", "rank": 0, "score": 120308.1302379382 }, { "content": "pub fn circle_centre(a: &[f64], b: &[f64], c: &[f64]) -> Vec<f64> {\n\n //-- nicked from http://www.ambrsoft.com/trigocalc/circle3d.htm\n\n let val_a = det3x3t(&[a[0], a[1], 1.0], &[b[0], b[1], 1.0], &[c[0], c[1], 1.0]);\n\n let val_b = det3x3t(\n\n &[a[0] * a[0] + a[1] * a[1], a[1], 1.0],\n\n &[b[0] * b[0] + b[1] * b[1], b[1], 1.0],\n\n &[c[0] * c[0] + c[1] * c[1], c[1], 1.0],\n\n );\n\n let val_c = det3x3t(\n\n &[a[0] * a[0] + a[1] * a[1], a[0], 1.0],\n\n &[b[0] * b[0] + b[1] * b[1], b[0], 1.0],\n\n &[c[0] * c[0] + c[1] * c[1], c[0], 1.0],\n\n );\n\n let x = val_b / (2.0 * val_a);\n\n let y = -val_c / (2.0 * val_a);\n\n vec![x, y, 0.0]\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 1, "score": 114061.91750689801 }, { "content": "pub fn distance2d(a: &[f64], b: &[f64]) -> f64 {\n\n let d2 = (b[0] - a[0]) * (b[0] - a[0]) + (b[1] - a[1]) * (b[1] - a[1]);\n\n d2.sqrt()\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 3, "score": 100116.69584612502 }, { "content": "pub fn det3x3t(a: &[f64], b: &[f64], c: &[f64]) -> f64 {\n\n ((a[0] - c[0]) * (b[1] - c[1])) - ((a[1] - c[1]) * (b[0] - c[0]))\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 4, "score": 99021.28421504388 }, { "content": "pub fn distance2d_squared(a: &[f64], b: &[f64]) -> f64 {\n\n (b[0] - a[0]) * (b[0] - a[0]) + (b[1] - a[1]) * (b[1] - a[1])\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 5, "score": 97355.36513279172 }, { "content": "pub fn area_triangle(a: &[f64], b: &[f64], c: &[f64]) -> f64 {\n\n det3x3t(a, b, c) / 2.0\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 6, "score": 96557.08632994693 }, { "content": "pub fn incircle_robust(a: &[f64], b: &[f64], c: &[f64], p: &[f64]) -> i8 {\n\n //-- p is INSIDE == +1\n\n //-- p is OUTSIDE == -1\n\n //-- p is ONCIRCLE == 0\n\n let re = robust::incircle(\n\n robust::Coord { x: a[0], y: a[1] },\n\n robust::Coord { x: b[0], y: b[1] },\n\n robust::Coord { x: c[0], y: c[1] },\n\n robust::Coord { x: p[0], y: p[1] },\n\n );\n\n if re == 0.0_f64 {\n\n return 0;\n\n } else if re.is_sign_positive() {\n\n return 1;\n\n } else {\n\n return -1;\n\n }\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 7, "score": 96508.10930911795 }, { "content": "pub fn incircle_fast(a: &[f64], b: &[f64], c: &[f64], p: &[f64]) -> i8 {\n\n //-- p is INSIDE == +1\n\n //-- p is OUTSIDE == -1\n\n //-- p is ONCIRCLE == 0\n\n let at = (\n\n a[0] - p[0],\n\n a[1] - p[1],\n\n (a[0] * a[0] + a[1] * a[1]) - (p[0] * p[0] + p[1] * p[1]),\n\n );\n\n let bt = (\n\n b[0] - p[0],\n\n b[1] - p[1],\n\n (b[0] * b[0] + b[1] * b[1]) - (p[0] * p[0] + p[1] * p[1]),\n\n );\n\n let ct = (\n\n c[0] - p[0],\n\n c[1] - p[1],\n\n (c[0] * c[0] + c[1] * c[1]) - (p[0] * p[0] + p[1] * p[1]),\n\n );\n\n let i = at.0 * (bt.1 * ct.2 - bt.2 * ct.1);\n", "file_path": "src/geom/mod.rs", "rank": 8, "score": 96508.10930911795 }, { "content": "pub fn orient2d_fast(a: &[f64], b: &[f64], c: &[f64]) -> i8 {\n\n //-- CCW = +1\n\n //-- CW = -1\n\n //-- colinear = 0\n\n let re: f64 = ((a[0] - c[0]) * (b[1] - c[1])) - ((a[1] - c[1]) * (b[0] - c[0]));\n\n if re.abs() < 1e-12 {\n\n return 0;\n\n } else if re > 0.0 {\n\n return 1;\n\n } else {\n\n return -1;\n\n }\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 9, "score": 89928.27823354783 }, { "content": "pub fn orient2d_robust(a: &[f64], b: &[f64], c: &[f64]) -> i8 {\n\n //-- CCW = +1\n\n //-- CW = -1\n\n //-- colinear = 0\n\n let re = robust::orient2d(\n\n robust::Coord { x: a[0], y: a[1] },\n\n robust::Coord { x: b[0], y: b[1] },\n\n robust::Coord { x: c[0], y: c[1] },\n\n );\n\n if re == 0.0_f64 {\n\n return 0;\n\n } else if re.is_sign_positive() {\n\n return 1;\n\n } else {\n\n return -1;\n\n }\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 10, "score": 89928.27823354783 }, { "content": "pub fn incircle(a: &[f64], b: &[f64], c: &[f64], p: &[f64], robust_predicates: bool) -> i8 {\n\n //-- p is INSIDE == +1\n\n //-- p is OUTSIDE == -1\n\n //-- p is ONCIRCLE == 0\n\n if robust_predicates == true {\n\n return incircle_robust(&a, &b, &c, &p);\n\n } else {\n\n return incircle_fast(&a, &b, &c, &p);\n\n }\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 11, "score": 88712.37238463396 }, { "content": "fn main() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n\n\n pts.push(vec![0.0, 0.0, 12.5]);\n\npts.push(vec![0.0, 0.0, 11.11]);\n\npts.push(vec![1.0, 0.0, 22.22]);\n\npts.push(vec![1.0, 1.0, 33.33]);\n\npts.push(vec![0.0, 1.0, 44.0]);\n\npts.push(vec![0.5, 0.49, 44.0]);\n\npts.push(vec![0.45, 0.69, 44.0]);\n\npts.push(vec![0.65, 0.49, 44.0]);\n\npts.push(vec![0.75, 0.29, 44.0]);\n\npts.push(vec![1.5, 1.49, 44.0]);\n\npts.push(vec![0.6, 0.2, 44.0]);\n\npts.push(vec![0.45, 0.4, 44.0]);\n\npts.push(vec![0.1, 0.8, 44.0]);\n\n\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n", "file_path": "examples/example9.rs", "rank": 12, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let path = std::env::args()\n\n .skip(1)\n\n .next()\n\n .expect(\"Must provide a path to a LAS/LAZ file\");\n\n let mut reader = Reader::from_path(path).expect(\"Wrong file name\");\n\n\n\n let header = reader.header();\n\n println!(\"Reading LAS file version: {}\", header.version());\n\n println!(\"{} points.\", header.number_of_points());\n\n\n\n // let b = header.bounds();\n\n // println!(\n\n // \"({}, {}, {}) --> ({}, {}, {})\",\n\n // b.min.x, b.min.y, b.min.z, b.max.x, b.max.y, b.max.z\n\n // );\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(true);\n\n\n", "file_path": "examples/laz2tin.rs", "rank": 13, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let re = read_xyz_file();\n\n // let vec = match re {\n\n // Ok(vec) => vec,\n\n // Err(error) => panic!(\"Problem with the file {:?}\", error),\n\n // };\n\n\n\n let mut dt = startin::Triangulation::new();\n\n // dt.set_snap_tolerance(0.1);\n\n // dt.set_jump_and_walk(true);\n\n // dt.use_robust_predicates(true);\n\n\n\n let mut duplicates = 0;\n\n for (i, p) in re.into_iter().enumerate() {\n\n // if i == 1149569 {\n\n // println!(\"checking validity\");\n\n // println!(\"is valid: {}\", dt.is_valid());\n\n // }\n\n if i == 1149570 {\n\n println!(\"Oh!\");\n", "file_path": "examples/example8.rs", "rank": 14, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![0.0, 0.0, 12.5]);\n\n pts.push(vec![1.0, 0.0, 7.65]);\n\n pts.push(vec![1.1, 1.1, 33.0]);\n\n pts.push(vec![0.0, 1.0, 33.0]);\n\n pts.push(vec![0.5, 0.9, 33.0]);\n\n pts.push(vec![0.9, 0.5, 33.0]);\n\n pts.push(vec![0.67, 0.66, 33.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n println!(\"{}\", dt.printme(true));\n\n\n\n // let _re = dt.remove(3);\n\n // println!(\"{}\", dt.printme(true));\n\n\n\n println!(\"is 3 removed {}\", dt.is_vertex_removed(3));\n\n // dt.insert_one_pt(1.1, 2.2, 3.3);\n\n // println!(\"is 3 removed {}\", dt.is_vertex_removed(3));\n", "file_path": "examples/example5.rs", "rank": 15, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![20 as f64, 30.0]);\n\n pts.push(vec![20 as f64, 30.0, 1.1]);\n\n pts.push(vec![120.0, 33.0, 12.5]);\n\n pts.push(vec![124.0, 222.0, 7.65]);\n\n pts.push(vec![20.0, 133.0, 21.0]);\n\n pts.push(vec![60.0, 60.0, 33.0]);\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n\n\n\n println!(\"*****\");\n\n println!(\"Number of points in DT: {}\", dt.number_of_vertices());\n\n println!(\"Number of triangles in DT: {}\", dt.number_of_triangles());\n\n\n\n //-- print all the vertices\n\n for (i, each) in dt.all_vertices().iter().enumerate() {\n\n // skip the first one, the infinite vertex\n\n if i > 0 {\n", "file_path": "examples/example2.rs", "rank": 16, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let num_pts = 1000;\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n\n\n let mut rng = rand::thread_rng();\n\n for _i in 0..num_pts {\n\n let x: f64 = rng.gen();\n\n let y: f64 = rng.gen();\n\n pts.push(vec![x * 100.0, y * 100.0, 2.0]);\n\n }\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n println!(\"{}\", dt.printme(false));\n\n\n\n //-- delete 5 vertices on convex hull\n\n let mut total: usize = 0;\n\n loop {\n\n let j: usize = rng.gen_range(1, num_pts);\n", "file_path": "examples/example4.rs", "rank": 17, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let mut found = false;\n\n loop {\n\n if found {\n\n break;\n\n }\n\n let num_pts = 20;\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n\n\n let mut rng = rand::thread_rng();\n\n for _i in 0..num_pts {\n\n let x: f64 = rng.gen();\n\n let y: f64 = rng.gen();\n\n pts.push(vec![x * 100.0, y * 100.0, 2.0]);\n\n }\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n // println!(\"{}\", dt.printme(false));\n", "file_path": "examples/example7.rs", "rank": 19, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n\n\n pts.push(vec![57.42265069953759, 11.302054173605036, 2.0]);\n\n pts.push(vec![92.84366030693992, 43.6916136057666, 2.0]);\n\n pts.push(vec![1.4236330964329302, 64.90700146602387, 2.0]);\n\n pts.push(vec![13.285710267579498, 50.189815660581175, 2.0]);\n\n pts.push(vec![15.111123388743696, 78.78533829573448, 2.0]);\n\n pts.push(vec![36.63646690982746, 6.91987548989339, 2.0]);\n\n pts.push(vec![29.637182771593974, 68.65833069674996, 2.0]);\n\n pts.push(vec![11.833562381185224, 17.179817935796258, 2.0]);\n\n pts.push(vec![76.1403374462718, 85.06661188634638, 2.0]);\n\n pts.push(vec![97.68197803600579, 86.72138786481214, 2.0]);\n\n pts.push(vec![57.14429336094553, 85.64704227817887, 2.0]);\n\n pts.push(vec![39.00833936370296, 38.120561196977896, 2.0]);\n\n pts.push(vec![72.92102098683782, 29.345682339583913, 2.0]);\n\n pts.push(vec![11.294274255141302, 3.9130782784767892, 2.0]);\n\n pts.push(vec![0.22779172434063133, 28.09424506294478, 2.0]);\n\n pts.push(vec![90.87939595311902, 69.60923683981981, 2.0]);\n\n pts.push(vec![13.196743373706909, 37.43824255825546, 2.0]);\n", "file_path": "examples/example6.rs", "rank": 20, "score": 85732.5661083467 }, { "content": "fn main() {\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n\n\n let _re = dt.insert_one_pt(1.1, 1.07, 12.5);\n\n let _re = dt.insert_one_pt(11.0, 1.02, 7.65);\n\n let _re = dt.insert_one_pt(11.05, 11.1, 33.0);\n\n let _re = dt.insert_one_pt(1.0, 11.0, 21.0);\n\n\n\n println!(\"{}\", dt.printme(true));\n\n println!(\"Is Delaunay?: {}\", dt.is_valid());\n\n println!(\"# vertices {}\", dt.number_of_vertices());\n\n println!(\"# triangles {}\", dt.number_of_triangles());\n\n\n\n // let c = dt.closest_point(11.0, 11.0);\n\n // let re = dt.remove(c.unwrap());\n\n let re = dt.remove(3);\n\n if re.is_err() == true {\n\n println!(\"!!!Error: {:?}\", re.unwrap_err());\n\n }\n", "file_path": "examples/example3.rs", "rank": 21, "score": 85732.5661083467 }, { "content": "pub fn orient2d(a: &[f64], b: &[f64], c: &[f64], robust_predicates: bool) -> i8 {\n\n //-- CCW = +1\n\n //-- CW = -1\n\n //-- colinear = 0\n\n if robust_predicates == true {\n\n return orient2d_robust(&a, &b, &c);\n\n } else {\n\n return orient2d_fast(&a, &b, &c);\n\n }\n\n}\n\n\n", "file_path": "src/geom/mod.rs", "rank": 22, "score": 81803.45105929647 }, { "content": "#[repr(C)]\n\n#[derive(Debug, Clone)]\n\nstruct Link(Vec<usize>);\n\n\n\nimpl Link {\n\n fn new() -> Link {\n\n // Link(Vec::new())\n\n Link(Vec::with_capacity(8))\n\n }\n\n fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n fn is_empty(&self) -> bool {\n\n if self.0.len() == 0 {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n fn add(&mut self, v: usize) {\n\n self.0.push(v);\n\n }\n", "file_path": "src/lib.rs", "rank": 23, "score": 70015.59122746058 }, { "content": "#[test]\n\nfn duplicates() {\n\n let mut dt = startin::Triangulation::new();\n\n let _re = dt.insert_one_pt(2.5, 6.3, 4.5);\n\n let _re = dt.insert_one_pt(2.2, 2.3, 4.5);\n\n let re2 = dt.insert_one_pt(2.2, 2.3, 4.5);\n\n assert!(re2.is_err());\n\n assert_eq!(2, re2.unwrap_err());\n\n assert_eq!(2, dt.number_of_vertices());\n\n assert_eq!(0, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/init_construction.rs", "rank": 24, "score": 62906.919595154206 }, { "content": "#[test]\n\nfn outside_ch() {\n\n let mut dt = four_points();\n\n assert_eq!(None, dt.interpolate_nn(5.0, -0.1));\n\n assert_eq!(None, dt.interpolate_nni(5.0, -0.1));\n\n assert_eq!(None, dt.interpolate_laplace(5.0, -0.1));\n\n assert_eq!(None, dt.interpolate_tin_linear(5.0, -0.1));\n\n assert_eq!(None, dt.interpolate_nni(5.0, 0.0));\n\n}\n\n\n", "file_path": "tests/interpolate.rs", "rank": 25, "score": 62906.919595154206 }, { "content": "#[test]\n\nfn one_pt() {\n\n let mut dt = startin::Triangulation::new();\n\n let re = dt.insert_one_pt(2.2, 2.3, 4.5);\n\n assert!(re.is_ok());\n\n assert_eq!(1, re.unwrap());\n\n assert_eq!(1, dt.number_of_vertices());\n\n assert_eq!(0, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/init_construction.rs", "rank": 26, "score": 60000.37844383091 }, { "content": "#[repr(C)]\n\nstruct Star {\n\n pub pt: [f64; 3],\n\n pub link: Link,\n\n}\n\n\n\nimpl Star {\n\n pub fn new(x: f64, y: f64, z: f64) -> Star {\n\n let l = Link::new();\n\n Star {\n\n pt: [x, y, z],\n\n link: l,\n\n }\n\n }\n\n pub fn is_deleted(&self) -> bool {\n\n self.link.is_empty()\n\n }\n\n}\n\n\n\n//----------------------\n\n#[repr(C)]\n", "file_path": "src/lib.rs", "rank": 27, "score": 44556.49109806112 }, { "content": "#[test]\n\nfn empty() {\n\n let mut dt = startin::Triangulation::new();\n\n assert_eq!(None, dt.interpolate_nn(1.1, 1.1));\n\n assert_eq!(None, dt.interpolate_nni(1.1, 1.1));\n\n assert_eq!(None, dt.interpolate_laplace(1.1, 1.1));\n\n assert_eq!(None, dt.interpolate_tin_linear(1.1, 1.1));\n\n}\n\n\n", "file_path": "tests/interpolate.rs", "rank": 28, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn grid() {\n\n let mut dt = startin::Triangulation::new();\n\n for i in 0..10 {\n\n for j in 0..10 {\n\n let _re = dt.insert_one_pt(i as f64, j as f64, 1.0);\n\n }\n\n }\n\n let _re = dt.remove(1);\n\n let _re = dt.remove(10);\n\n let _re = dt.remove(77);\n\n assert!(dt.is_valid());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 29, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn convexhull() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![0.0, 0.0, 12.5]);\n\n pts.push(vec![1.0, 0.0, 7.65]);\n\n pts.push(vec![2.0, 2.0, 33.0]);\n\n pts.push(vec![0.0, 2.0, 33.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n\n let _re = dt.remove(3);\n\n assert_eq!(3, dt.number_of_vertices());\n\n assert_eq!(1, dt.number_of_triangles());\n\n let _re = dt.remove(2);\n\n assert_eq!(2, dt.number_of_vertices());\n\n assert_eq!(0, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 30, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn collinear() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![0.0, 0.0, 12.5]);\n\n pts.push(vec![1.0, 0.0, 7.65]);\n\n pts.push(vec![2.0, 0.0, 33.0]);\n\n pts.push(vec![3.0, 0.0, 33.0]);\n\n pts.push(vec![4.0, 0.0, 33.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n\n assert_eq!(5, dt.number_of_vertices());\n\n assert_eq!(0, dt.number_of_triangles());\n\n let _re = dt.insert_one_pt(3.0, 3.0, 33.0);\n\n assert_eq!(6, dt.number_of_vertices());\n\n assert_eq!(4, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 31, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn simple() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![1.1, 1.07, 12.5]);\n\n pts.push(vec![11.0, 1.02, 7.65]);\n\n pts.push(vec![11.05, 11.1, 33.0]);\n\n pts.push(vec![1.0, 11.0, 21.0]);\n\n pts.push(vec![9.0, 5.0, 21.0]);\n\n pts.push(vec![12.0, 5.1, 21.0]);\n\n pts.push(vec![8.0, 8.0, 21.0]);\n\n pts.push(vec![12.0, 8.1, 21.0]);\n\n pts.push(vec![4.0, 5.15, 33.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n let _re = dt.remove(7);\n\n assert_eq!(8, dt.number_of_vertices());\n\n assert_eq!(8, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 32, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn cocircular() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![0.0, 0.0, 12.5]);\n\n pts.push(vec![1.0, 0.0, 7.65]);\n\n pts.push(vec![1.0, 1.0, 33.0]);\n\n pts.push(vec![0.0, 1.0, 21.0]);\n\n let y: f64 = 0.5 + ((0.5 * 0.5 + 0.5 * 0.5) as f64).sqrt();\n\n pts.push(vec![0.5, y, 21.0]);\n\n pts.push(vec![0.5, 0.5, 33.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n let _re = dt.remove(6);\n\n assert_eq!(5, dt.number_of_vertices());\n\n assert_eq!(3, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 33, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn middle() {\n\n let mut dt = four_points();\n\n assert_eq!(Some(2.5), dt.interpolate_nni(5.0, 5.0));\n\n assert_eq!(Some(2.5), dt.interpolate_laplace(5.0, 5.0));\n\n}\n\n\n", "file_path": "tests/interpolate.rs", "rank": 34, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn nn() {\n\n let mut dt = four_points();\n\n let _re = dt.insert_one_pt(5.0, 5.0, 11.1);\n\n assert_eq!(Some(11.1), dt.interpolate_nn(5.1, 5.1));\n\n}\n\n\n", "file_path": "tests/interpolate.rs", "rank": 35, "score": 41627.39888392128 }, { "content": "#[test]\n\nfn existing_point() {\n\n let mut dt = four_points();\n\n let _re = dt.insert_one_pt(5.0, 5.0, 11.1);\n\n assert_eq!(Some(11.1), dt.interpolate_nn(5.0, 5.0));\n\n assert_eq!(Some(11.1), dt.interpolate_nni(5.0, 5.0));\n\n assert_eq!(Some(11.1), dt.interpolate_laplace(5.0, 5.0));\n\n assert_eq!(Some(11.1), dt.interpolate_tin_linear(5.0, 5.0));\n\n assert_eq!(Some(1.0), dt.interpolate_nn(0.0, 0.0));\n\n assert_eq!(Some(1.0), dt.interpolate_nni(0.0, 0.0));\n\n assert_eq!(Some(1.0), dt.interpolate_laplace(0.0, 0.0));\n\n assert_eq!(Some(1.0), dt.interpolate_tin_linear(0.0, 0.0));\n\n}\n\n\n", "file_path": "tests/interpolate.rs", "rank": 36, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn grid() {\n\n let mut dt = startin::Triangulation::new();\n\n for i in 0..10 {\n\n for j in 0..10 {\n\n let _re = dt.insert_one_pt(i as f64, j as f64, 1.0);\n\n }\n\n }\n\n assert!(dt.is_valid());\n\n}\n", "file_path": "tests/init_construction.rs", "rank": 37, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn empty() {\n\n let dt = startin::Triangulation::new();\n\n assert_eq!(0, dt.number_of_vertices());\n\n assert_eq!(0, dt.number_of_triangles());\n\n}\n\n\n", "file_path": "tests/init_construction.rs", "rank": 38, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn insert_delete() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n let mut rng = rand::thread_rng();\n\n let size = 10.0_f64;\n\n for _i in 0..100 {\n\n let x: f64 = rng.gen();\n\n let y: f64 = rng.gen();\n\n pts.push(vec![x * size, y * size, 2.0]);\n\n }\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n let _re = dt.insert_one_pt(3.05, 3.1, 33.0);\n\n let re = dt.remove(dt.number_of_vertices() - 1);\n\n assert_eq!(true, re.is_ok());\n\n assert_eq!(100, dt.number_of_vertices());\n\n assert!(dt.is_valid());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 39, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn deletion_impossible() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![1.1, 1.07, 12.5]);\n\n pts.push(vec![11.0, 1.02, 7.65]);\n\n pts.push(vec![11.05, 11.1, 33.0]);\n\n pts.push(vec![1.0, 11.0, 21.0]);\n\n pts.push(vec![9.0, 5.0, 21.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n\n assert_eq!(Err(\"Cannot remove the infinite vertex\"), dt.remove(0));\n\n assert_eq!(Err(\"Vertex does not exist\"), dt.remove(7));\n\n let _re = dt.remove(5);\n\n assert_eq!(Err(\"Vertex does not exist\"), dt.remove(5));\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 40, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn simple_grid() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![0.0, 0.0, 12.5]);\n\n pts.push(vec![1.0, 0.0, 7.65]);\n\n pts.push(vec![1.0, 1.0, 33.0]);\n\n pts.push(vec![0.0, 1.0, 21.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n let _re = dt.remove(1);\n\n let _re = dt.remove(3);\n\n assert_eq!(2, dt.number_of_vertices());\n\n assert_eq!(0, dt.number_of_triangles());\n\n}\n", "file_path": "tests/deletion.rs", "rank": 41, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn tin_linear() {\n\n let dt = four_points();\n\n assert_eq!(Some(1.5), dt.interpolate_tin_linear(5.0, 0.0));\n\n}\n", "file_path": "tests/interpolate.rs", "rank": 42, "score": 40004.41107985037 }, { "content": "#[test]\n\nfn insert_delete_them_many() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n let mut rng = rand::thread_rng();\n\n let size = 10.0_f64;\n\n for _i in 0..10 {\n\n let x: f64 = rng.gen();\n\n let y: f64 = rng.gen();\n\n pts.push(vec![x * size, y * size, 2.0]);\n\n }\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n assert_eq!(10, dt.number_of_vertices());\n\n for i in 5..10 {\n\n let _re = dt.remove(i);\n\n }\n\n assert_eq!(5, dt.number_of_vertices());\n\n // assert_eq!(0, dt.number_of_triangles());\n\n\n\n assert!(dt.is_valid());\n\n}\n\n\n", "file_path": "tests/deletion.rs", "rank": 43, "score": 38593.38246902109 }, { "content": "fn four_points() -> Triangulation {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![0.0, 0.0, 1.0]);\n\n pts.push(vec![10.0, 0.0, 2.0]);\n\n pts.push(vec![10.0, 10.0, 3.0]);\n\n pts.push(vec![0.0, 10.0, 4.0]);\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n\n dt\n\n}\n\n\n", "file_path": "tests/interpolate.rs", "rank": 44, "score": 35143.05626725285 }, { "content": "//-- taken from https://stackoverflow.com/questions/40668074/am-i-incorrectly-implementing-intoiterator-for-a-reference-or-is-this-a-rust-bug\n\nstruct Iter<'a>(Box<dyn Iterator<Item = &'a usize> + 'a>);\n\n\n\nimpl<'a> Iterator for Iter<'a> {\n\n type Item = &'a usize;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.next()\n\n }\n\n}\n\n\n\nimpl std::ops::Index<usize> for Link {\n\n type Output = usize;\n\n fn index(&self, idx: usize) -> &usize {\n\n &self.0[idx as usize]\n\n }\n\n}\n\n\n\nimpl fmt::Display for Link {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n // fmt.write_str(\"pt: {}\\n\", self.pt)?;\n\n fmt.write_str(&format!(\"link: {:?}\\n\", self.0))?;\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A triangulation is a collection of Stars, each Star has its (x,y,z)\n\n/// and a Link (an array of adjacent vertices, ordered CCW)\n", "file_path": "src/lib.rs", "rank": 45, "score": 24878.66452946875 }, { "content": "# Web-demo with WebAssembly\n\n\n\nRust can be compiled to [WebAssembly](https://www.rust-lang.org/what/wasm), and you can see a demo of some of the possibilities of startin (all computations are done locally and it's fast!).\n\n\n\n[--> web-demo](https://hugoledoux.github.io/startin_wasm/)\n\n\n\n\n\n# Documentation\n\n\n\nYou can read the complete documentation [here](https://docs.rs/startin)\n\n\n\n# Usage\n\n\n\n```rust\n\nextern crate startin;\n\n\n\nfn main() {\n\n let mut pts: Vec<Vec<f64>> = Vec::new();\n\n pts.push(vec![20.0, 30.0, 2.0]);\n\n pts.push(vec![120.0, 33.0, 12.5]);\n\n pts.push(vec![124.0, 222.0, 7.65]);\n\n pts.push(vec![20.0, 133.0, 21.0]);\n\n pts.push(vec![60.0, 60.0, 33.0]);\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.insert(&pts);\n\n\n\n println!(\"*****\");\n\n println!(\"Number of points in DT: {}\", dt.number_of_vertices());\n\n println!(\"Number of triangles in DT: {}\", dt.number_of_triangles());\n\n\n\n //-- print all the vertices\n\n for (i, each) in dt.all_vertices().iter().enumerate() {\n\n // skip the first one, the infinite vertex\n\n if i > 0 {\n\n println!(\"#{}: ({:.3}, {:.3}, {:.3})\", i, each[0], each[1], each[2]);\n\n }\n\n }\n\n\n\n //-- insert a new vertex\n\n let re = dt.insert_one_pt(22.2, 33.3, 4.4);\n\n match re {\n\n Ok(_v) => println!(\"Inserted new point\"),\n\n Err(v) => println!(\"Duplicate of vertex #{}, not inserted\", v),\n\n }\n\n //-- remove it\n\n let re = dt.remove(6);\n\n if re.is_err() == true {\n\n println!(\"!!! Deletion error: {:?}\", re.unwrap_err());\n\n } else {\n\n println!(\"Deleted vertex\");\n\n }\n\n\n\n //-- get the convex hull\n\n let ch = dt.convex_hull();\n\n println!(\"Convex hull: {:?}\", ch);\n\n\n\n //-- fetch triangle containing (x, y)\n\n let re = dt.locate(50.0, 50.0);\n\n if re.is_some() {\n\n let t = re.unwrap();\n\n println!(\"The triangle is {}\", t);\n\n assert!(dt.is_triangle(&t));\n\n } else {\n\n println!(\"Outside convex hull\");\n", "file_path": "README.md", "rank": 46, "score": 14643.881679368687 }, { "content": "# startin\n\n\n\n[![crates.io](https://img.shields.io/crates/v/startin.svg)](https://crates.io/crates/startin)\n\n[![PyPI version](https://badge.fury.io/py/startinpy.svg)](https://badge.fury.io/py/startinpy)\n\n\n\nA Delaunay triangulator where the input are 2.5D points, the DT is computed in 2D but the elevation of the vertices are kept.\n\nThis is used mostly for the modelling of terrains.\n\n\n\nThe construction algorithm used is an incremental insertion based on flips, and the data structure is a cheap implementation of the star-based structure defined in [Blandford et al. (2003)](https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.9.6823), cheap because the link of each vertex is stored a simple array (`Vec`) and not in an optimised blob like they did.\n\nIt results in a pretty fast library (comparison will come at some point), but it uses more space than the optimised one.\n\n\n\nThe deletion of a vertex is also possible. The algorithm implemented is a modification of the one of [Mostafavi, Gold, and Dakowicz (2003)](https://doi.org/10.1016/S0098-3004(03)00017-7). The ears are filled by flipping, so it's in theory more robust. \n\nI have also extended the algorithm to allow the deletion of vertices on the boundary of the convex hull. \n\nThe algorithm is sub-optimal, but in practice the number of neighbours of a given vertex in a DT is only 6, so it doesn't really matter.\n\n\n\nRobust arithmetic for the geometric predicates are used ([Shewchuk's predicates](https://www.cs.cmu.edu/~quake/robust.html), well the [Rust port of the code (robust crate)](https://crates.io/crates/robust)), so startin is robust and shouldn't crash (touch wood). \n\n\n\nThere are a few interpolation functions implemented (based on the DT): (1) nearest-neighbour, (2) linear in TIN, (3) Laplace.\n\n\n\n\n\n# Python bindings\n\n\n\nIf you prefer Python, I made bindings: [https://github.com/hugoledoux/startinpy/](https://github.com/hugoledoux/startinpy/)\n\n\n\n\n", "file_path": "README.md", "rank": 47, "score": 14629.892280847884 }, { "content": " }\n\n\n\n //-- some stats\n\n println!(\"Number of points in DT: {}\", dt.number_of_vertices());\n\n println!(\"Number of triangles in DT: {}\", dt.number_of_triangles());\n\n\n\n //-- save the triangulation in geojson for debug purposes\n\n //-- do not attempt on large DT\n\n let _re = dt.write_geojson(\"/home/elvis/tr.geojson\".to_string());\n\n}\n\n```\n", "file_path": "README.md", "rank": 48, "score": 14627.69654489641 }, { "content": "\n\n\n\n# Changelog\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## [0.5.2] - 2021-12-16\n\n### Changed\n\n- fixed a bug that returned the wrong nearest neighbour (and thus wrong interpolate_nn() results)\n\n- improve the geojson output, now the id+z of the vertices are saved\n\n\n\n## [0.5.1] - 2021-06-10\n\n### Changed\n\n- fixed a small bug that arised sometimes when deleting a vertex on the convex hull\n\n\n\n## [0.5.0] - 2021-04-15\n\n### Added\n\n- interplation with natural neighbour (nni, or Sibson's method) is added. \n\n- saving of the triangulation to GeoJSON is added\n\n### Changed\n\n- Delete the robust arithmetic code copied from spades, and use Rust crate \"robust\"\n\n- interpolation functions are more robust (if no DT exists, if estimation at known vertex)\n\n\n\n## [0.4.9] - 2021-03-07\n\n### Added\n\n- Added basic C interface, so startin can be called from other languages (such as C or Julia). Build with `cargo build --features c_api`.\n\n\n\n## [0.4.8] - 2021-02-05 \n\n### Changed\n\n- Fix a small bug in walk, that seemed to have no real effect (except slowly down a bit)\n\n\n\n## [0.4.7] - 2019-11-20\n\n### Changed\n\n- Fix the bug about predicates.rs raised by Martijn Meijers (https://github.com/Stoeoef/spade/issues/48)\n\n\n\n## [0.4.6] - 2019-08-22\n\n### Added\n\n- 3 interpolation functions, based on the DT, added: nearest-neighbour, linear in TIN, Laplace.\n\n### Changed\n\n- fixed a bug with walking that sometimes crashed when point outside convex hull were inserted\n\n- the OBJ writer is now about 1283X faster\n\n\n\n\n\n## [0.4.5] - 2019-07-30\n\n### Changed\n\n- closest_vertex() is now returning the real natural neighbour, and not an approximation\n\n\n\n\n\n## [0.4.4] - 2019-07-29\n\n### Changed\n\n- fixed a few bugs, most important is when walking when starting vertex was infinity vertex, now no crash\n\n- `all_edges()` function to draw faster, used by startin_wasm project\n\n\n\n\n\n## [0.4.3] - 2019-07-26\n", "file_path": "changelog.md", "rank": 49, "score": 14625.240727255872 }, { "content": "### Changed\n\n- minor improvements to the API, alignment with CGAL SurfaceMesh functions (more or less)\n\n- better doc\n\n\n\n\n\n## [0.4.2] - 2019-06-12\n\n### Changed\n\n- predicates.c is not used anymore. The Rust port of it (https://github.com/Stoeoef/spade/blob/master/src/exactpred.rs) is used.\n\n- dependencies for the examples are not used/listed for the library anymore.\n\n\n\n\n\n## [0.4.1] - 2019-06-11\n\n### Changed\n\n- predicates.c has sys removed from include for time.h\n\n- jump-and-walk is not the default anymore, walk starts from last one (no randomness by default thus)\n\n\n\n\n\n## [0.4.0] - 2019-06-06\n\n### Added\n\n- Deletion of vertices now possible, even those on the boundary of the convex hull\n\n- Integration tests available in the /tests/ folder\n\n\n\n## [0.3.1] - 2019-05-06\n\n### Changed\n\n- more examples\n\n- fix of readme.md and a few things for crates.io\n\n\n\n## [0.3.0] - 2019-05-02\n\n### Added\n\n- first release and upload to crates.io\n\n\n", "file_path": "changelog.md", "rank": 50, "score": 14623.433769599182 }, { "content": " match re {\n\n Ok(_x) => continue,\n\n Err(_e) => duplicates = duplicates + 1,\n\n }\n\n }\n\n }\n\n }\n\n\n\n //-- insert_one_pt\n\n pub fn insert_one_pt(&mut self, px: f64, py: f64, pz: f64) -> Result<usize, usize> {\n\n if self.is_init == false {\n\n return self.insert_one_pt_init_phase(px, py, pz);\n\n }\n\n //-- walk\n\n let p: [f64; 3] = [px, py, pz];\n\n let tr = self.walk(&p);\n\n // println!(\"STARTING TR: {}\", tr);\n\n if geom::distance2d_squared(&self.stars[tr.v[0]].pt, &p) <= (self.snaptol * self.snaptol) {\n\n return Err(tr.v[0]);\n\n }\n", "file_path": "src/lib.rs", "rank": 52, "score": 24.40801429276687 }, { "content": " let re = Triangulation::insert_one_pt(t, pts[i], pts[i + 1], pts[i + 2]);\n\n match re {\n\n Ok(_) => continue,\n\n Err(_) => duplicates = duplicates + 1,\n\n }\n\n }\n\n return duplicates;\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn interpolate_nn(ptr: *mut Triangulation, px: c_double, py: c_double) -> c_double {\n\n let t = unsafe { ptr.as_mut().unwrap() };\n\n let re = Triangulation::interpolate_nn(t, px, py);\n\n return re.unwrap_or_else(|| std::f64::NAN);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn interpolate_linear(\n\n ptr: *mut Triangulation,\n\n px: c_double,\n", "file_path": "src/c_interface.rs", "rank": 53, "score": 20.808459691998685 }, { "content": " // dt.is_valid_flat();\n\n // dt.is_valid_double_zero();\n\n // let ch = dt.convex_hull();\n\n // println!(\"{:?}\", ch);\n\n }\n\n let re = dt.insert_one_pt(p[0], p[1], p[2]);\n\n match re {\n\n // Ok(_x) => println!(\"{:?}\", dt),\n\n Ok(_x) => continue,\n\n Err(_e) => duplicates = duplicates + 1,\n\n // Err(e) => println!(\"Duplicate point! Not inserted {}\", dt.get_point(e)),\n\n };\n\n }\n\n\n\n if duplicates > 0 {\n\n println!(\"Duplicates? {} of them.\\n\", duplicates);\n\n } else {\n\n println!(\"Duplicates? none.\\n\");\n\n }\n\n\n", "file_path": "examples/example8.rs", "rank": 54, "score": 20.52987216544182 }, { "content": " centres.push(c2);\n\n // println!(\"centres: {:?}\", centres);\n\n //-- copy first to make circular\n\n centres.push(vec![centres[0][0], centres[0][1]]);\n\n // println!(\"{:?}\", centres);\n\n let mut totalarea = 0.0_f64;\n\n for c in centres.windows(2) {\n\n totalarea += geom::area_triangle(&self.stars[v].pt, &c[0], &c[1]);\n\n }\n\n Some(totalarea)\n\n }\n\n\n\n pub fn voronoi_cell_area_ch_use_bound(\n\n &self,\n\n v: usize,\n\n newc1: &[f64],\n\n newc2: &[f64],\n\n ) -> Option<f64> {\n\n let mut centres: Vec<Vec<f64>> = Vec::new();\n\n let mut tmp = self.stars[v].link.clone();\n", "file_path": "src/lib.rs", "rank": 55, "score": 19.46055379051458 }, { "content": "pub extern \"C\" fn insert_one_pt(\n\n ptr: *mut Triangulation,\n\n px: c_double,\n\n py: c_double,\n\n pz: c_double,\n\n) -> c_int {\n\n let t = unsafe { ptr.as_mut().unwrap() };\n\n let re = Triangulation::insert_one_pt(t, px, py, pz);\n\n match re {\n\n Ok(_) => return 0,\n\n Err(_) => return 1,\n\n };\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn insert(ptr: *mut Triangulation, length: c_int, arr: *mut c_double) -> c_int {\n\n let mut duplicates = 0;\n\n let t = unsafe { ptr.as_mut().unwrap() };\n\n let pts = unsafe { std::slice::from_raw_parts_mut(arr, length as usize) };\n\n for i in (0..length as usize).step_by(3) {\n", "file_path": "src/c_interface.rs", "rank": 56, "score": 19.132045262291733 }, { "content": " self.robust_predicates = b;\n\n }\n\n\n\n // why not use ndarray or similar here?\n\n pub fn insert(&mut self, pts: &Vec<Vec<f64>>) {\n\n let mut duplicates = 0;\n\n for each in pts {\n\n if (each.len() < 2) || (each.len() > 3) {\n\n panic!(\n\n \"Point {:?} should be 2D or 3D (and is now {}D).\",\n\n each,\n\n each.len()\n\n );\n\n } else {\n\n let re;\n\n if each.len() == 2 {\n\n re = self.insert_one_pt(each[0], each[1], 0.0);\n\n } else {\n\n re = self.insert_one_pt(each[0], each[1], each[2]);\n\n }\n", "file_path": "src/lib.rs", "rank": 57, "score": 17.68421672436112 }, { "content": "//! # Web-demo with WebAssembly\n\n//!\n\n//! Rust can be compiled easily to [WebAssembly](https://www.rust-lang.org/what/wasm), and you see a demo of the possibilities of startin (all computations are done locally and it's fast!).\n\n//!\n\n//! [web-demo](https://hugoledoux.github.io/startin_wasm/www/dist/)\n\n//!\n\n//! # Usage\n\n//!\n\n//! ```rust\n\n//! extern crate startin;\n\n//!\n\n//! fn main() {\n\n//! let mut pts: Vec<Vec<f64>> = Vec::new();\n\n//! pts.push(vec![20.0, 30.0, 2.0]);\n\n//! pts.push(vec![120.0, 33.0, 12.5]);\n\n//! pts.push(vec![124.0, 222.0, 7.65]);\n\n//! pts.push(vec![20.0, 133.0, 21.0]);\n\n//! pts.push(vec![60.0, 60.0, 33.0]);\n\n//!\n\n//! let mut dt = startin::Triangulation::new();\n", "file_path": "src/lib.rs", "rank": 58, "score": 17.490836379645124 }, { "content": " // println!(\"centres: {:?}\", centres);\n\n //-- copy first to make circular\n\n centres.push(vec![centres[0][0], centres[0][1]]);\n\n // println!(\"{:?}\", centres);\n\n let mut totalarea = 0.0_f64;\n\n for c in centres.windows(2) {\n\n totalarea += geom::area_triangle(&self.stars[v].pt, &c[0], &c[1]);\n\n }\n\n Some(totalarea)\n\n }\n\n\n\n pub fn voronoi_cell_area(&self, v: usize) -> Option<f64> {\n\n if self.vertex_exists(v) == false {\n\n return None;\n\n }\n\n if self.is_vertex_convex_hull(v) == true {\n\n return Some(f64::INFINITY);\n\n }\n\n //-- process non-CH points that exists\n\n let mut centres: Vec<Vec<f64>> = Vec::new();\n", "file_path": "src/lib.rs", "rank": 59, "score": 16.987476162951538 }, { "content": " println!(\"#{}: ({:.3}, {:.3}, {:.3})\", i, each[0], each[1], each[2]);\n\n }\n\n }\n\n\n\n //-- insert a new vertex\n\n let re = dt.insert_one_pt(22.2, 33.3, 4.4);\n\n match re {\n\n Ok(_v) => println!(\"Inserted new point\"),\n\n Err(v) => println!(\"Duplicate of vertex #{}, not inserted\", v),\n\n }\n\n //-- remove it\n\n let re = dt.remove(6);\n\n if re.is_err() == true {\n\n println!(\"!!! Deletion error: {:?}\", re.unwrap_err());\n\n } else {\n\n println!(\"Deleted vertex\");\n\n }\n\n\n\n //-- get the convex hull\n\n let ch = dt.convex_hull();\n", "file_path": "examples/example2.rs", "rank": 60, "score": 16.92673036977668 }, { "content": " s.push_str(&format!(\"]\\n\"));\n\n if withxyz == true {\n\n s.push_str(&format!(\"\\t{:?}\\n\", self.stars[i].pt));\n\n }\n\n }\n\n s.push_str(\"**********\\n\");\n\n s\n\n }\n\n\n\n pub fn voronoi_cell_area_ch_add_bound(\n\n &self,\n\n v: usize,\n\n newc1: &mut [f64],\n\n newc2: &mut [f64],\n\n ) -> Option<f64> {\n\n let mut centres: Vec<Vec<f64>> = Vec::new();\n\n let mut tmp = self.stars[v].link.clone();\n\n tmp.infinite_first();\n\n tmp.delete(0);\n\n let mut l: Vec<usize> = Vec::new();\n", "file_path": "src/lib.rs", "rank": 61, "score": 15.971900492539994 }, { "content": "use std::fs::File;\n\nuse std::io::Write;\n\n\n\nuse serde_json::{to_value, Map};\n\nuse std::collections::HashMap;\n\n\n\nuse geojson::{Feature, FeatureCollection, Geometry, Value};\n\n\n\nextern crate rand;\n\n\n\n/// A Triangle is a triplet of indices\n\npub struct Triangle {\n\n pub v: [usize; 3],\n\n}\n\n\n\nimpl Triangle {\n\n /// Checks whether a Triangle is \"infinite\",\n\n /// ie if one its vertices is the infinite vertex\n\n fn is_infinite(&self) -> bool {\n\n if self.v[0] == 0 || self.v[1] == 0 || self.v[2] == 0 {\n", "file_path": "src/lib.rs", "rank": 63, "score": 14.334278020576814 }, { "content": " pts.push(vec![31.851227544325145, 51.80273049754918, 2.0]);\n\n pts.push(vec![0.6726961194112979, 43.93675937945696, 2.0]);\n\n pts.push(vec![30.932824361776735, 66.6221847617953, 2.0]);\n\n\n\n let mut dt = startin::Triangulation::new();\n\n dt.set_jump_and_walk(false);\n\n dt.insert(&pts);\n\n println!(\"{}\", dt.printme(true));\n\n // let _re = dt.write_obj(\"/Users/hugo/temp/0.obj\".to_string(), true);\n\n\n\n let _re = dt.remove(11);\n\n // println!(\"{}\", dt.printme(true));\n\n\n\n assert!(dt.is_valid());\n\n // let _re = dt.write_obj(\"/Users/hugo/temp/1.obj\".to_string(), true);\n\n\n\n // let _re = dt.insert_one_pt(1.5, 1.5, 33.0);\n\n // println!(\"{}\", dt.printme(true));\n\n // assert!(dt.is_valid());\n\n}\n", "file_path": "examples/example6.rs", "rank": 64, "score": 14.31796749142022 }, { "content": " // println!(\"is 4 removed {}\", dt.is_vertex_removed(4));\n\n\n\n assert!(dt.is_valid());\n\n\n\n let a = dt.get_point(3);\n\n if a.is_some() == true {\n\n println!(\"point {:?}\", a.unwrap());\n\n }\n\n\n\n let re = dt.locate(50.0, 50.0);\n\n match re {\n\n Some(x) => println!(\"Triangle: {}\", x),\n\n None => println!(\"No triangle found, outside of the CH.\"),\n\n }\n\n\n\n let re = dt.closest_point(1.1, 1.11);\n\n match re {\n\n Some(x) => println!(\"Point: {}\", x),\n\n None => println!(\"Outside of the CH.\"),\n\n }\n\n\n\n // let _re = dt.insert_one_pt(1.5, 1.5, 33.0);\n\n // println!(\"{}\", dt.printme(true));\n\n // assert!(dt.is_valid());\n\n // dt.write_obj(\"/Users/hugo/temp/0.obj\".to_string(), true);\n\n}\n", "file_path": "examples/example5.rs", "rank": 65, "score": 14.307505599762472 }, { "content": " py: c_double,\n\n) -> c_double {\n\n let t = unsafe { ptr.as_mut().unwrap() };\n\n let re = Triangulation::interpolate_tin_linear(t, px, py);\n\n return re.unwrap_or_else(|| std::f64::NAN);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn interpolate_laplace(\n\n ptr: *mut Triangulation,\n\n px: c_double,\n\n py: c_double,\n\n) -> c_double {\n\n let t = unsafe { ptr.as_mut().unwrap() };\n\n let re = Triangulation::interpolate_laplace(t, px, py);\n\n return re.unwrap_or_else(|| std::f64::NAN);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn write_obj(ptr: *mut Triangulation, s: *const c_char) -> c_int {\n", "file_path": "src/c_interface.rs", "rank": 66, "score": 13.918411846422728 }, { "content": "// To run:\n\n// $ ./example1 < ../../data/samples2.xyz\n\n\n\n#![allow(dead_code)]\n\n\n\nextern crate startin;\n\n\n\nuse std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\n\n", "file_path": "examples/example8.rs", "rank": 67, "score": 13.862850638958456 }, { "content": " }\n\n let re = self.insert_one_pt(px, py, 0.);\n\n let pi: usize;\n\n if re.is_ok() {\n\n pi = re.unwrap();\n\n } else {\n\n //-- return the value of the vertex if closer than self.snaptol\n\n return Some(self.stars[re.unwrap_err()].pt[2]);\n\n }\n\n let l = &self.stars[pi].link;\n\n let mut centres: Vec<Vec<f64>> = Vec::new();\n\n for (i, v) in l.iter().enumerate() {\n\n let j = l.next_index(i);\n\n centres.push(geom::circle_centre(\n\n &self.stars[pi].pt,\n\n &self.stars[*v].pt,\n\n &self.stars[l[j]].pt,\n\n ));\n\n }\n\n let mut weights: Vec<f64> = Vec::new();\n", "file_path": "src/lib.rs", "rank": 68, "score": 13.861502263492719 }, { "content": "extern crate las;\n\nextern crate startin;\n\n\n\nuse las::{Read, Reader};\n\n\n", "file_path": "examples/laz2tin.rs", "rank": 69, "score": 13.844866448169732 }, { "content": " if dt.is_vertex_convex_hull(j) == true {\n\n let re = dt.remove(j);\n\n if re.is_err() == true {\n\n println!(\"!!!Error: {:?}\", re.unwrap_err());\n\n }\n\n total = total + 1;\n\n if total == 10 {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n // //-- insert 50 vertices randomly\n\n // for _i in 0..50 {\n\n // let x: f64 = rng.gen();\n\n // let y: f64 = rng.gen();\n\n // let _re = dt.insert_one_pt(x, y, 1.1);\n\n // }\n\n // println!(\"{}\", dt.printme(false));\n\n\n\n println!(\"Is Delaunay?: {}\", dt.is_valid());\n\n println!(\"# vertices {}\", dt.number_of_vertices());\n\n println!(\"# triangles {}\", dt.number_of_triangles());\n\n}\n", "file_path": "examples/example4.rs", "rank": 70, "score": 13.611794720153473 }, { "content": "extern crate startin;\n\nuse rand::prelude::*;\n\n\n", "file_path": "examples/example4.rs", "rank": 71, "score": 13.352435326557636 }, { "content": "extern crate startin;\n\nuse rand::prelude::*;\n\n\n", "file_path": "examples/example7.rs", "rank": 72, "score": 13.352435326557636 }, { "content": " }\n\n }\n\n // println!(\"weights {:?}\", weights);\n\n let mut z: f64 = 0.0;\n\n for (i, nn) in nns.iter().enumerate() {\n\n z += weights[i] * self.stars[*nn].pt[2];\n\n }\n\n Some(z / newarea)\n\n }\n\n\n\n /// Interpolation with Laplace (http://dilbert.engr.ucdavis.edu/~suku/nem/index.html)\n\n /// (variation of nni with distances instead of stolen areas; faster in practice)\n\n /// None if outside the convex hull, other the value\n\n pub fn interpolate_laplace(&mut self, px: f64, py: f64) -> Option<f64> {\n\n //-- cannot interpolation if no TIN\n\n if self.is_init == false {\n\n return None;\n\n }\n\n if self.locate(px, py).is_none() {\n\n return None;\n", "file_path": "src/lib.rs", "rank": 73, "score": 13.316219507191837 }, { "content": "use crate::Triangulation;\n\nuse std::ffi::CStr;\n\nuse std::os::raw::c_char;\n\nuse std::os::raw::c_double;\n\nuse std::os::raw::c_int;\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn new() -> *mut Triangulation {\n\n let x = Box::new(Triangulation::new());\n\n let ptr = Box::into_raw(x);\n\n return ptr;\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn destroy(ptr: *mut Triangulation) -> c_int {\n\n unsafe { drop(Box::from_raw(ptr)) };\n\n return 0;\n\n}\n\n\n\n#[no_mangle]\n", "file_path": "src/c_interface.rs", "rank": 74, "score": 13.271214436664245 }, { "content": "//! dt.insert(&pts);\n\n//!\n\n//! println!(\"*****\");\n\n//! println!(\"Number of points in DT: {}\", dt.number_of_vertices());\n\n//! println!(\"Number of triangles in DT: {}\", dt.number_of_triangles());\n\n//!\n\n//! //-- print all the vertices\n\n//! for (i, each) in dt.all_vertices().iter().enumerate() {\n\n//! // skip the first one, the infinite vertex\n\n//! if i > 0 {\n\n//! println!(\"#{}: ({:.3}, {:.3}, {:.3})\", i, each[0], each[1], each[2]);\n\n//! }\n\n//! }\n\n//!\n\n//! //-- insert a new vertex\n\n//! let re = dt.insert_one_pt(22.2, 33.3, 4.4);\n\n//! match re {\n\n//! Ok(_v) => println!(\"Inserted new point\"),\n\n//! Err(v) => println!(\"Duplicate of vertex #{}, not inserted\", v),\n\n//! }\n", "file_path": "src/lib.rs", "rank": 75, "score": 13.1769155705375 }, { "content": " return Some(self.stars[re.unwrap_err()].pt[2]);\n\n }\n\n let mut addedcentres: HashMap<usize, Vec<Vec<f64>>> = HashMap::new();\n\n let nns = self.adjacent_vertices_to_vertex(pi).unwrap();\n\n let mut weights: Vec<f64> = Vec::new();\n\n for nn in &nns {\n\n let mut a = self.voronoi_cell_area(*nn).unwrap();\n\n if a != f64::INFINITY {\n\n weights.push(a);\n\n } else {\n\n let mut c1 = vec![0.0, 1.0];\n\n let mut c2 = vec![0.0, 1.0];\n\n a = self\n\n .voronoi_cell_area_ch_add_bound(*nn, &mut c1, &mut c2)\n\n .unwrap();\n\n addedcentres.insert(*nn, vec![c1, c2]);\n\n weights.push(a);\n\n }\n\n }\n\n let newarea = self.voronoi_cell_area(pi).unwrap();\n", "file_path": "src/lib.rs", "rank": 76, "score": 12.897340809053093 }, { "content": " for laspt in reader.points() {\n\n let p = laspt.unwrap();\n\n let _re = dt.insert_one_pt(p.x, p.y, p.z);\n\n }\n\n\n\n println!(\"Number of points in DT: {}\", dt.number_of_vertices());\n\n println!(\"Number of triangles in DT: {}\", dt.number_of_triangles());\n\n\n\n // let pathout = \"/Users/hugo/temp/out.obj\";\n\n // println!(\"Writing OBJ file...\");\n\n // let re = dt.write_obj(pathout.to_string(), false);\n\n // match re {\n\n // Ok(_x) => println!(\"--> OBJ output saved to: {}\", pathout),\n\n // Err(_x) => println!(\"ERROR: path {} doesn't exist, abort.\", pathout),\n\n // }\n\n}\n", "file_path": "examples/laz2tin.rs", "rank": 77, "score": 12.468088845530506 }, { "content": " // let y: f64 = rng.gen();\n\n // let _re = dt.insert_one_pt(x, y, 1.1);\n\n // }\n\n // println!(\"{}\", dt.printme(false));\n\n\n\n // println!(\"Is Delaunay?: {}\", dt.is_valid());\n\n // println!(\"# vertices {}\", dt.number_of_vertices());\n\n // println!(\"# triangles {}\", dt.number_of_triangles());\n\n }\n\n}\n", "file_path": "examples/example7.rs", "rank": 78, "score": 12.41656762100407 }, { "content": " pub fn write_obj(&self, path: String, twod: bool) -> std::io::Result<()> {\n\n let trs = self.all_triangles();\n\n let mut f = File::create(path)?;\n\n let mut s = String::new();\n\n for i in 1..self.stars.len() {\n\n if self.stars[i].is_deleted() == true {\n\n continue;\n\n }\n\n if twod == true {\n\n s.push_str(&format!(\n\n \"v {} {} {}\\n\",\n\n self.stars[i].pt[0], self.stars[i].pt[1], 0\n\n ));\n\n } else {\n\n s.push_str(&format!(\n\n \"v {} {} {}\\n\",\n\n self.stars[i].pt[0], self.stars[i].pt[1], self.stars[i].pt[2]\n\n ));\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 79, "score": 12.365538799880623 }, { "content": " }\n\n\n\n /// Interpolation: linear in TIN\n\n /// None if outside the convex hull, other the value\n\n pub fn interpolate_tin_linear(&self, px: f64, py: f64) -> Option<f64> {\n\n //-- cannot interpolation if no TIN\n\n if self.is_init == false {\n\n return None;\n\n }\n\n let p: [f64; 3] = [px, py, 0.0];\n\n let tr = self.walk(&p);\n\n if tr.is_infinite() {\n\n return None;\n\n }\n\n let a0: f64 = geom::area_triangle(&p, &self.stars[tr.v[1]].pt, &self.stars[tr.v[2]].pt);\n\n let a1: f64 = geom::area_triangle(&p, &self.stars[tr.v[2]].pt, &self.stars[tr.v[0]].pt);\n\n let a2: f64 = geom::area_triangle(&p, &self.stars[tr.v[0]].pt, &self.stars[tr.v[1]].pt);\n\n let mut total = 0.;\n\n total += self.stars[tr.v[0]].pt[2] * a0;\n\n total += self.stars[tr.v[1]].pt[2] * a1;\n", "file_path": "src/lib.rs", "rank": 80, "score": 12.355376340844732 }, { "content": " total += self.stars[tr.v[2]].pt[2] * a2;\n\n Some(total / (a0 + a1 + a2))\n\n }\n\n\n\n /// Interpolation with natural neighbour interpolation (nni)\n\n pub fn interpolate_nni(&mut self, px: f64, py: f64) -> Option<f64> {\n\n //-- cannot interpolation if no TIN\n\n if self.is_init == false {\n\n return None;\n\n }\n\n //-- no extrapolation\n\n if self.locate(px, py).is_none() {\n\n return None;\n\n }\n\n let re = self.insert_one_pt(px, py, 0.);\n\n let pi: usize;\n\n if re.is_ok() {\n\n pi = re.unwrap();\n\n } else {\n\n //-- return the value of the vertex if closer than self.snaptol\n", "file_path": "src/lib.rs", "rank": 81, "score": 12.338526807507918 }, { "content": " let c: Vec<f64> = vec![centres[0][0] + a[1], centres[0][1] - a[0], 0.0];\n\n newc1[0] = c[0];\n\n newc1[1] = c[1];\n\n // println!(\"c1: {:?}\", c);\n\n centres.insert(0, c);\n\n //-- replace 0 by bisector, step2\n\n let last = tmp.last().unwrap();\n\n a = [\n\n self.stars[*last].pt[0] - self.stars[v].pt[0],\n\n self.stars[*last].pt[1] - self.stars[v].pt[1],\n\n ];\n\n // mid = [\n\n // self.stars[v].pt[0] + (a[0] / 2.0),\n\n // self.stars[v].pt[1] + (a[1] / 2.0),\n\n // ];\n\n let lastc = centres.last().unwrap();\n\n let c2: Vec<f64> = vec![lastc[0] - a[1], lastc[1] + a[0], 0.0];\n\n newc2[0] = c2[0];\n\n newc2[1] = c2[1];\n\n // println!(\"c2: {:?}\", c2);\n", "file_path": "src/lib.rs", "rank": 82, "score": 12.236559507980346 }, { "content": " {\n\n println!(\"NOT DELAUNAY FFS!\");\n\n println!(\"{} with {}\", tr, i);\n\n re = false\n\n }\n\n }\n\n }\n\n re\n\n }\n\n\n\n fn is_valid_ch_convex(&self) -> bool {\n\n let mut re = true;\n\n let ch = self.convex_hull();\n\n for i in 0..ch.len() {\n\n if geom::orient2d(\n\n &self.stars[ch[i % ch.len()]].pt,\n\n &self.stars[ch[(i + 1) % ch.len()]].pt,\n\n &self.stars[ch[(i + 2) % ch.len()]].pt,\n\n self.robust_predicates,\n\n ) == -1\n", "file_path": "src/lib.rs", "rank": 83, "score": 12.152653375694129 }, { "content": " }\n\n self.cur = l - 1;\n\n if self.is_init == true {\n\n //-- insert the previous vertices in the dt\n\n for j in 1..(l - 3) {\n\n let tr = self.walk(&self.stars[j].pt);\n\n // println!(\"found tr: {}\", tr);\n\n self.flip13(j, &tr);\n\n self.update_dt(j);\n\n }\n\n }\n\n Ok(self.cur)\n\n }\n\n\n\n /// Set a snap tolerance when inserting new points: if the newly inserted\n\n /// one is closer than snap_tolerance to another one, then it is not inserted.\n\n /// Avoids having very close vertices (like at 0.00007mm)\n\n /// Default is 0.001unit (thus 1mm for most datasets).\n\n pub fn set_snap_tolerance(&mut self, snaptol: f64) -> f64 {\n\n if snaptol > 0.0 {\n", "file_path": "src/lib.rs", "rank": 84, "score": 12.033652856779034 }, { "content": "//! //-- remove it\n\n//! let re = dt.remove(6);\n\n//! if re.is_err() == true {\n\n//! println!(\"!!! Deletion error: {:?}\", re.unwrap_err());\n\n//! } else {\n\n//! println!(\"Deleted vertex\");\n\n//! }\n\n//!\n\n//! //-- get the convex hull\n\n//! let ch = dt.convex_hull();\n\n//! println!(\"Convex hull: {:?}\", ch);\n\n//!\n\n//! //-- fetch triangle containing (x, y)\n\n//! let re = dt.locate(50.0, 50.0);\n\n//! if re.is_some() {\n\n//! let t = re.unwrap();\n\n//! println!(\"The triangle is {}\", t);\n\n//! assert!(dt.is_triangle(&t));\n\n//! } else {\n\n//! println!(\"Outside convex hull\");\n", "file_path": "src/lib.rs", "rank": 85, "score": 11.952083737388765 }, { "content": " let mut re = true;\n\n if v >= self.stars.len() || self.is_vertex_removed(v) == true {\n\n re = false;\n\n }\n\n re\n\n }\n\n\n\n /// Interpolation: nearest/closest neighbour\n\n /// None if outside the convex hull, other the value\n\n pub fn interpolate_nn(&self, px: f64, py: f64) -> Option<f64> {\n\n //-- cannot interpolation if no TIN\n\n if self.is_init == false {\n\n return None;\n\n }\n\n let re = self.closest_point(px, py);\n\n if re.is_some() {\n\n Some(self.stars[re.unwrap()].pt[2])\n\n } else {\n\n None\n\n }\n", "file_path": "src/lib.rs", "rank": 86, "score": 11.733783940996911 }, { "content": " self.stars[tr.v[2]].link.delete(tr.v[1]);\n\n //-- make 2 triangles to return (to stack)\n\n let ret0 = Triangle {\n\n v: [tr.v[0], tr.v[1], opposite],\n\n };\n\n let ret1 = Triangle {\n\n v: [tr.v[0], opposite, tr.v[2]],\n\n };\n\n (ret0, ret1)\n\n }\n\n\n\n fn get_opposite_vertex(&self, tr: &Triangle) -> usize {\n\n self.stars[tr.v[2]].link.get_next_vertex(tr.v[1]).unwrap()\n\n }\n\n\n\n /// Returns a Vec<Vec<f64>> of all the vertices (including the infinite one)\n\n pub fn all_vertices(&self) -> Vec<Vec<f64>> {\n\n let mut pts: Vec<Vec<f64>> = Vec::with_capacity(self.stars.len() - 1);\n\n for i in 0..self.stars.len() {\n\n pts.push(self.stars[i].pt.to_vec());\n", "file_path": "src/lib.rs", "rank": 87, "score": 11.608655252610859 }, { "content": " tmp.infinite_first();\n\n tmp.delete(0);\n\n let mut l: Vec<usize> = Vec::new();\n\n for each in tmp.iter() {\n\n // println!(\"{:?}\", each);\n\n l.push(*each)\n\n }\n\n // println!(\"l: {:?}\", l);\n\n for c in l.windows(2) {\n\n centres.push(geom::circle_centre(\n\n &self.stars[v].pt,\n\n &self.stars[c[0]].pt,\n\n &self.stars[c[1]].pt,\n\n ));\n\n }\n\n // println!(\"centres: {:?}\", centres);\n\n //-- replace 0 by bisector, step1\n\n centres.insert(0, newc1.to_vec());\n\n //-- replace 0 by bisector, step2\n\n centres.push(newc2.to_vec());\n", "file_path": "src/lib.rs", "rank": 88, "score": 11.554033623776748 }, { "content": "use crate::startin::Triangulation;\n\nuse startin;\n\n\n", "file_path": "tests/interpolate.rs", "rank": 90, "score": 11.229424616919584 }, { "content": " let l = &self.stars[v].link;\n\n for (i, n) in l.iter().enumerate() {\n\n let j = l.next_index(i);\n\n centres.push(geom::circle_centre(\n\n &self.stars[v].pt,\n\n &self.stars[*n].pt,\n\n &self.stars[l[j]].pt,\n\n ));\n\n }\n\n //-- copy first to make circular\n\n centres.push(vec![centres[0][0], centres[0][1]]);\n\n // println!(\"{:?}\", centres);\n\n let mut totalarea = 0.0_f64;\n\n for c in centres.windows(2) {\n\n totalarea += geom::area_triangle(&self.stars[v].pt, &c[0], &c[1]);\n\n }\n\n Some(totalarea)\n\n }\n\n\n\n fn vertex_exists(&self, v: usize) -> bool {\n", "file_path": "src/lib.rs", "rank": 91, "score": 11.160196637497478 }, { "content": " }\n\n self.stars[v].link.contains_infinite_vertex()\n\n }\n\n\n\n /// Returns, if it exists, the Triangle containing (px,py).\n\n /// If it is direction on a vertex/edge, then one is randomly chosen.\n\n pub fn locate(&self, px: f64, py: f64) -> Option<Triangle> {\n\n let p: [f64; 3] = [px, py, 0.0];\n\n let re = self.walk(&p);\n\n match re.is_infinite() {\n\n true => None,\n\n false => Some(re),\n\n }\n\n }\n\n\n\n // Returns closest point (in 2D) to a query point (x,y).\n\n // if (x,y) is outside the convex hull [`None`]\n\n pub fn closest_point(&self, px: f64, py: f64) -> Option<usize> {\n\n let re = self.locate(px, py);\n\n if re.is_none() == true {\n", "file_path": "src/lib.rs", "rank": 92, "score": 11.159462032861303 }, { "content": " self.snaptol = snaptol;\n\n }\n\n self.snaptol\n\n }\n\n\n\n pub fn get_snap_tolerance(&self) -> f64 {\n\n self.snaptol\n\n }\n\n\n\n /// Activate/deactive the jump-and-walk strategy for locate().\n\n /// If deactivated, then the walk starts from the last inserted triangle.\n\n pub fn set_jump_and_walk(&mut self, b: bool) {\n\n self.jump_and_walk = b;\n\n }\n\n\n\n pub fn is_using_robust_predicates(&self) -> bool {\n\n self.robust_predicates\n\n }\n\n\n\n pub fn use_robust_predicates(&mut self, b: bool) {\n", "file_path": "src/lib.rs", "rank": 93, "score": 11.096646661742001 }, { "content": " }\n\n let mut adjs: Vec<usize> = Vec::new();\n\n for each in self.stars[v].link.iter() {\n\n adjs.push(*each);\n\n }\n\n // println!(\"adjs: {:?}\", adjs);\n\n let mut cur: usize = 0;\n\n while adjs.len() > 3 {\n\n let a = cur % adjs.len();\n\n let b = (cur + 1) % adjs.len();\n\n let c = (cur + 2) % adjs.len();\n\n // println!(\"cur ear--> {:?} {}/{}/{}\", adjs, a, b, c);\n\n if (geom::orient2d(\n\n &self.stars[adjs[a]].pt,\n\n &self.stars[adjs[b]].pt,\n\n &self.stars[adjs[c]].pt,\n\n self.robust_predicates,\n\n ) == 1)\n\n && (geom::orient2d(\n\n &self.stars[adjs[a]].pt,\n", "file_path": "src/lib.rs", "rank": 94, "score": 11.009938740096835 }, { "content": "pub struct Triangulation {\n\n stars: Vec<Star>,\n\n snaptol: f64,\n\n cur: usize,\n\n is_init: bool,\n\n jump_and_walk: bool,\n\n robust_predicates: bool,\n\n removed_indices: Vec<usize>,\n\n}\n\n\n\nimpl Triangulation {\n\n pub fn new() -> Triangulation {\n\n // TODO: allocate a certain number?\n\n // let mut l: Vec<Star> = Vec::with_capacity(100000);\n\n let mut l: Vec<Star> = Vec::new();\n\n l.push(Star::new(-99999.99999, -99999.99999, -99999.99999));\n\n let es: Vec<usize> = Vec::new();\n\n Triangulation {\n\n stars: l,\n\n snaptol: 0.001,\n", "file_path": "src/lib.rs", "rank": 95, "score": 10.944762862789437 }, { "content": " let trs = self.all_triangles();\n\n for tr in trs.iter() {\n\n // s.push_str(&format!(\"f {} {} {}\\n\", tr.v[0], tr.v[1], tr.v[2]));\n\n let mut l: Vec<Vec<Vec<f64>>> = vec![vec![Vec::with_capacity(1); 4]];\n\n l[0][0].push(self.stars[tr.v[0]].pt[0]);\n\n l[0][0].push(self.stars[tr.v[0]].pt[1]);\n\n l[0][1].push(self.stars[tr.v[1]].pt[0]);\n\n l[0][1].push(self.stars[tr.v[1]].pt[1]);\n\n l[0][2].push(self.stars[tr.v[2]].pt[0]);\n\n l[0][2].push(self.stars[tr.v[2]].pt[1]);\n\n l[0][3].push(self.stars[tr.v[0]].pt[0]);\n\n l[0][3].push(self.stars[tr.v[0]].pt[1]);\n\n let gtr = Geometry::new(Value::Polygon(l));\n\n // let mut attributes = Map::new();\n\n // if self.stars[]\n\n // attributes.insert(String::from(\"active\"), to_value();\n\n let f = Feature {\n\n bbox: None,\n\n geometry: Some(gtr),\n\n id: None,\n", "file_path": "src/lib.rs", "rank": 96, "score": 10.716588072785857 }, { "content": "extern crate startin;\n\n\n", "file_path": "examples/example3.rs", "rank": 97, "score": 10.551942427553797 }, { "content": "extern crate startin;\n\n\n", "file_path": "examples/example9.rs", "rank": 98, "score": 10.551942427553797 }, { "content": "extern crate startin;\n\n\n", "file_path": "examples/example2.rs", "rank": 99, "score": 10.551942427553797 } ]
Rust
src/input/json.rs
PassFort/rouille
c3f06096afab039b88076d3d613f9a29e7a5f1dd
use serde; use serde_json; use serde_json::Value; use std::error; use std::fmt; use std::io::Error as IoError; use Request; #[derive(Debug)] pub enum JsonError { BodyAlreadyExtracted, WrongContentType, NullPresent, IoError(IoError), ParseError(serde_json::Error), } impl From<IoError> for JsonError { fn from(err: IoError) -> JsonError { JsonError::IoError(err) } } impl From<serde_json::Error> for JsonError { fn from(err: serde_json::Error) -> JsonError { JsonError::ParseError(err) } } impl error::Error for JsonError { #[inline] fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { JsonError::IoError(ref e) => Some(e), JsonError::ParseError(ref e) => Some(e), _ => None, } } } impl fmt::Display for JsonError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { let description = match *self { JsonError::BodyAlreadyExtracted => "the body of the request was already extracted", JsonError::WrongContentType => "the request didn't have a JSON content type", JsonError::NullPresent => "the JSON body contained an escaped null byte", JsonError::IoError(_) => { "could not read the body from the request, or could not execute the CGI program" } JsonError::ParseError(_) => "error while parsing the JSON body", }; write!(fmt, "{}", description) } } fn check_null(value: &Value) -> Result<&Value, JsonError> { match &value { Value::String(s) => { if s.find("\0").is_some() { return Err(JsonError::NullPresent); } } Value::Array(a) => { for element in a { check_null(element)?; } } Value::Object(o) => { for (k, v) in o { if k.find("\0").is_some() { return Err(JsonError::NullPresent); } check_null(v)?; } } _ => (), }; Ok(value) } pub fn json_input<O>(request: &Request) -> Result<O, JsonError> where O: serde::de::DeserializeOwned, { if let Some(header) = request.header("Content-Type") { if !header.starts_with("application/json") { return Err(JsonError::WrongContentType); } } else { return Err(JsonError::WrongContentType); } if let Some(b) = request.data() { let v: Value = serde_json::from_reader(b)?; check_null(&v)?; serde_json::from_value::<O>(v).map_err(From::from) } else { Err(JsonError::BodyAlreadyExtracted) } } #[cfg(test)] mod test { use super::*; #[test] fn test_check_nulls() { let data = r#" { "name": "John Doe", "age": 43, "phones": [ "+44 1234567", "+44 2345678" ], "children": [ { "name": "Sarah\u0000" }, { "name": "Bill\u0000" } ] }"#; let v: Value = serde_json::from_str(data).unwrap(); assert!(check_null(&v).is_err()); } #[test] fn test_key_nulls() { let data = r#" { "name\u0000": "John Doe", "age": 43 }"#; let v: Value = serde_json::from_str(data).unwrap(); assert!(check_null(&v).is_err()); } #[test] fn test_check_not_null() { let data = r#" { "name": "John Doe", "age": 43, "phones": [ "+44 1234567", "+44 2345678" ], "children": [ { "name": "Sarah" }, { "name": "Bill" } ] }"#; let v: Value = serde_json::from_str(data).unwrap(); assert!(check_null(&v).is_ok()); } }
use serde; use serde_json; use serde_json::Value; use std::error; use std::fmt; use std::io::Error as IoError; use Request; #[derive(Debug)] pub enum JsonError { BodyAlreadyExtracted, WrongContentType, NullPresent, IoError(IoError), ParseError(serde_json::Error), } impl From<IoError> for JsonError { fn from(err: IoError) -> JsonError { JsonError::IoError(err) } } impl From<serde_json::Error> for JsonError { fn from(err: serde_json::Error) -> JsonError { JsonError::ParseError(err) } } impl error::Error for JsonError { #[inline] fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { JsonError::IoError(ref e) => Some(e), JsonError::ParseError(ref e) => Some(e), _ => None, } } } impl fmt::Display for JsonError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { let description = match *self { JsonError::BodyAlreadyExtracted => "the body of the request was already extracted", JsonError::WrongContentType => "the request didn't have a JSON content type", JsonError::NullPresent => "the JSON body contained an escaped null byte", JsonError::IoError(_) => { "could not read the body from the request, or could not execute the CGI program" } JsonError::ParseError(_) => "error while parsing the JSON body", }; write!(fmt, "{}", description) } } fn check_null(value: &Value) -> Result<&Value, JsonError> { match &value { Value::String(
pub fn json_input<O>(request: &Request) -> Result<O, JsonError> where O: serde::de::DeserializeOwned, { if let Some(header) = request.header("Content-Type") { if !header.starts_with("application/json") { return Err(JsonError::WrongContentType); } } else { return Err(JsonError::WrongContentType); } if let Some(b) = request.data() { let v: Value = serde_json::from_reader(b)?; check_null(&v)?; serde_json::from_value::<O>(v).map_err(From::from) } else { Err(JsonError::BodyAlreadyExtracted) } } #[cfg(test)] mod test { use super::*; #[test] fn test_check_nulls() { let data = r#" { "name": "John Doe", "age": 43, "phones": [ "+44 1234567", "+44 2345678" ], "children": [ { "name": "Sarah\u0000" }, { "name": "Bill\u0000" } ] }"#; let v: Value = serde_json::from_str(data).unwrap(); assert!(check_null(&v).is_err()); } #[test] fn test_key_nulls() { let data = r#" { "name\u0000": "John Doe", "age": 43 }"#; let v: Value = serde_json::from_str(data).unwrap(); assert!(check_null(&v).is_err()); } #[test] fn test_check_not_null() { let data = r#" { "name": "John Doe", "age": 43, "phones": [ "+44 1234567", "+44 2345678" ], "children": [ { "name": "Sarah" }, { "name": "Bill" } ] }"#; let v: Value = serde_json::from_str(data).unwrap(); assert!(check_null(&v).is_ok()); } }
s) => { if s.find("\0").is_some() { return Err(JsonError::NullPresent); } } Value::Array(a) => { for element in a { check_null(element)?; } } Value::Object(o) => { for (k, v) in o { if k.find("\0").is_some() { return Err(JsonError::NullPresent); } check_null(v)?; } } _ => (), }; Ok(value) }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn plain_text_body(request: &Request) -> Result<String, PlainTextError> {\n\n plain_text_body_with_limit(request, 1024 * 1024)\n\n}\n\n\n", "file_path": "src/input/plain.rs", "rank": 2, "score": 213355.85341939444 }, { "content": "/// Applies content encoding to the response.\n\n///\n\n/// Analyzes the `Accept-Encoding` header of the request. If one of the encodings is recognized and\n\n/// supported by rouille, it adds a `Content-Encoding` header to the `Response` and encodes its\n\n/// body.\n\n///\n\n/// If the response already has a `Content-Encoding` header, this function is a no-op.\n\n/// If the response has a `Content-Type` header that isn't textual content, this function is a\n\n/// no-op.\n\n///\n\n/// The gzip encoding is supported only if you enable the `gzip` feature of rouille (which is\n\n/// enabled by default).\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rouille::content_encoding;\n\n/// use rouille::Request;\n\n/// use rouille::Response;\n\n///\n\n/// fn handle(request: &Request) -> Response {\n\n/// content_encoding::apply(request, Response::text(\"hello world\"))\n\n/// }\n\n/// ```\n\npub fn apply(request: &Request, mut response: Response) -> Response {\n\n // Only text should be encoded. Otherwise just return.\n\n if !response_is_text(&response) {\n\n return response;\n\n }\n\n\n\n // If any of the response's headers is equal to `Content-Encoding`, ignore the function\n\n // call and return immediately.\n\n if response\n\n .headers\n\n .iter()\n\n .any(|&(ref key, _)| key.eq_ignore_ascii_case(\"Content-Encoding\"))\n\n {\n\n return response;\n\n }\n\n\n\n // Now let's get the list of content encodings accepted by the request.\n\n // The list should be ordered from the most desired to the least desired.\n\n let encoding_preference = [\"br\", \"gzip\", \"x-gzip\", \"identity\"];\n\n let accept_encoding_header = request.header(\"Accept-Encoding\").unwrap_or(\"\");\n", "file_path": "src/content_encoding.rs", "rank": 3, "score": 200161.6680943327 }, { "content": "/// Attempts to decode the content of the request as `multipart/form-data` data.\n\npub fn get_multipart_input(request: &Request) -> Result<Multipart, MultipartError> {\n\n let boundary = match multipart_boundary(request) {\n\n Some(b) => b,\n\n None => return Err(MultipartError::WrongContentType),\n\n };\n\n\n\n let request_body = if let Some(body) = request.data() {\n\n body\n\n } else {\n\n return Err(MultipartError::BodyAlreadyExtracted);\n\n };\n\n\n\n Ok(Multipart {\n\n inner: InnerMultipart::with_body(request_body, boundary),\n\n })\n\n}\n\n\n\n/// Allows you to inspect the content of the multipart input of a request.\n\npub struct Multipart<'a> {\n\n inner: InnerMultipart<RequestBody<'a>>,\n\n}\n\n\n\nimpl<'a> Multipart<'a> {\n\n #[allow(clippy::should_implement_trait)]\n\n pub fn next(&mut self) -> Option<MultipartField<&mut InnerMultipart<RequestBody<'a>>>> {\n\n self.inner.read_entry().unwrap_or(None)\n\n }\n\n}\n\n\n", "file_path": "src/input/multipart.rs", "rank": 4, "score": 195131.8618952768 }, { "content": "/// Sends the request to another HTTP server using the configuration.\n\n///\n\n/// If the function fails to get a response from the target, an error is returned. If you want\n\n/// to instead return a response with a status code such as 502 (`Bad Gateway`) or 504\n\n/// (`Gateway Time-out`), see `full_proxy`.\n\n///\n\n/// > **Note**: Implementation is very hacky for the moment.\n\n///\n\n/// > **Note**: SSL is not supported.\n\n// TODO: ^\n\npub fn proxy<A>(request: &Request, config: ProxyConfig<A>) -> Result<Response, ProxyError>\n\nwhere\n\n A: ToSocketAddrs,\n\n{\n\n let mut socket = TcpStream::connect(config.addr)?;\n\n socket.set_read_timeout(Some(Duration::from_secs(60)))?;\n\n socket.set_write_timeout(Some(Duration::from_secs(60)))?;\n\n\n\n let mut data = match request.data() {\n\n Some(d) => d,\n\n None => return Err(ProxyError::BodyAlreadyExtracted),\n\n };\n\n\n\n socket\n\n .write_all(format!(\"{} {} HTTP/1.1\\r\\n\", request.method(), request.raw_url()).as_bytes())?;\n\n for (header, value) in request.headers() {\n\n let value = if header == \"Host\" {\n\n if let Some(ref replace) = config.replace_host {\n\n &**replace\n\n } else {\n", "file_path": "src/proxy.rs", "rank": 5, "score": 184949.6645127691 }, { "content": "/// Attempts to decode the `POST` data received by the request.\n\n///\n\n/// If successful, returns a list of fields and values.\n\n///\n\n/// Returns an error if the request's content-type is not related to POST data.\n\n// TODO: what to do with this function?\n\npub fn raw_urlencoded_post_input(request: &Request) -> Result<Vec<(String, String)>, PostError> {\n\n if request\n\n .header(\"Content-Type\")\n\n .map(|ct| !ct.starts_with(\"application/x-www-form-urlencoded\"))\n\n .unwrap_or(true)\n\n {\n\n return Err(PostError::WrongContentType);\n\n }\n\n\n\n let body = {\n\n // TODO: DDoSable server if body is too large?\n\n let mut out = Vec::new(); // TODO: with_capacity()?\n\n if let Some(mut b) = request.data() {\n\n b.read_to_end(&mut out)?;\n\n } else {\n\n return Err(PostError::BodyAlreadyExtracted);\n\n }\n\n out\n\n };\n\n\n", "file_path": "src/input/post.rs", "rank": 6, "score": 178816.55682065926 }, { "content": "/// Sends the request to another HTTP server using the configuration.\n\n///\n\n/// Contrary to `proxy`, if the server fails to return a proper response then a response is\n\n/// generated with the status code 502 or 504.\n\n///\n\n/// The only possible remaining error is if the body of the request was already extracted. Since\n\n/// this would be a logic error, it is acceptable to unwrap it.\n\npub fn full_proxy<A>(request: &Request, config: ProxyConfig<A>) -> Result<Response, FullProxyError>\n\nwhere\n\n A: ToSocketAddrs,\n\n{\n\n match proxy(request, config) {\n\n Ok(r) => Ok(r),\n\n Err(ProxyError::IoError(_)) => Ok(Response::text(\"Gateway Time-out\").with_status_code(504)),\n\n Err(ProxyError::HttpParseError) => Ok(Response::text(\"Bad Gateway\").with_status_code(502)),\n\n Err(ProxyError::BodyAlreadyExtracted) => Err(FullProxyError::BodyAlreadyExtracted),\n\n }\n\n}\n", "file_path": "src/proxy.rs", "rank": 7, "score": 177519.61932795617 }, { "content": "/// Searches inside `path` for a file that matches the given request. If a file is found,\n\n/// returns a `Response` that would serve this file if returned. If no file is found, a 404\n\n/// response is returned instead.\n\n///\n\n/// The value of the `Content-Type` header of the response is guessed based on the file's\n\n/// extension. If you wish so, you can modify that `Content-Type` by modifying the `Response`\n\n/// object returned by this function.\n\n///\n\n/// # Example\n\n///\n\n/// In this example, a request made for example to `/test.txt` will return the file\n\n/// `public/test.txt` (relative to the current working directory, which is usually the location\n\n/// of the `Cargo.toml`) if it exists.\n\n///\n\n/// ```no_run\n\n/// rouille::start_server(\"localhost:8000\", move |request| {\n\n/// let response = rouille::match_assets(&request, \"public\");\n\n/// if response.is_success() {\n\n/// return response;\n\n/// }\n\n///\n\n/// // ...\n\n/// # panic!()\n\n/// });\n\n/// ```\n\n///\n\n/// # Security\n\n///\n\n/// Everything inside the directory that you pass as `path` is potentially accessible by any\n\n/// client. **Do not use assume that client won't be able to guess the URL of a sensitive file**.\n\n/// All sensitive files should require a login/password to be accessed.\n\n///\n\n/// If you want to serve sensitive files, you are encouraged to put them in a different directory\n\n/// than public files, and call `match_assets` once for public files and once for private files\n\n/// after you checked the user's credentials.\n\n/// Only call `match_assets` **after** you know that the user can have access to all the files\n\n/// that can be served.\n\n///\n\n/// If you manage the user's accesses per-file, use a white list of authorized files instead of a\n\n/// black list of forbidden files. Files can potentially be accessed from multiple different URLs\n\n/// and a black list may not cover everything.\n\n///\n\n/// # Example with prefix\n\n///\n\n/// Sometimes you want to add a prefix to the URL of your static files. To do that, you can use\n\n/// the `remove_prefix` method on `Request`.\n\n///\n\n/// ```no_run\n\n/// rouille::start_server(\"localhost:8000\", move |request| {\n\n/// if let Some(request) = request.remove_prefix(\"/static\") {\n\n/// return rouille::match_assets(&request, \"public\");\n\n/// }\n\n///\n\n/// // ...\n\n/// # panic!()\n\n/// });\n\n/// ```\n\n///\n\n/// In this example, a request made to `/static/test.txt` will return the file\n\n/// `public/test.txt` if it exists.\n\n///\n\npub fn match_assets<P: ?Sized>(request: &Request, path: &P) -> Response\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let path = path.as_ref();\n\n let path = match path.canonicalize() {\n\n Ok(p) => p,\n\n Err(_) => return Response::empty_404(),\n\n };\n\n\n\n // The potential location of the file on the disk.\n\n let potential_file = {\n\n // Clippy erroneously identifies this transform as a redundant clone\n\n #[allow(clippy::redundant_clone)]\n\n let mut path = path.to_path_buf();\n\n for component in request.url().split('/') {\n\n path.push(component);\n\n }\n\n path\n\n };\n", "file_path": "src/assets.rs", "rank": 8, "score": 152854.1723310967 }, { "content": "/// Returns a list of the websocket protocols requested by the client.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use rouille::websocket;\n\n///\n\n/// # let request: rouille::Request = return;\n\n/// for protocol in websocket::requested_protocols(&request) {\n\n/// // ...\n\n/// }\n\n/// ```\n\n// TODO: return references to the request\n\npub fn requested_protocols(request: &Request) -> RequestedProtocolsIter {\n\n match request.header(\"Sec-WebSocket-Protocol\") {\n\n None => RequestedProtocolsIter {\n\n iter: Vec::new().into_iter(),\n\n },\n\n Some(h) => {\n\n let iter = h\n\n .split(',')\n\n .map(|s| s.trim())\n\n .filter(|s| !s.is_empty())\n\n .map(|s| s.to_owned())\n\n .collect::<Vec<_>>()\n\n .into_iter();\n\n RequestedProtocolsIter { iter }\n\n }\n\n }\n\n}\n\n\n\n/// Iterator to the list of protocols requested by the user.\n\npub struct RequestedProtocolsIter {\n", "file_path": "src/websocket/mod.rs", "rank": 9, "score": 149806.788151087 }, { "content": "/// Attempts to parse the list of cookies from the request.\n\n///\n\n/// Returns an iterator that produces a pair of `(key, value)`. If the header is missing or\n\n/// malformed, an empty iterator is returned.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use rouille::Request;\n\n/// use rouille::input;\n\n///\n\n/// # let request: Request = return;\n\n/// if let Some((_, val)) = input::cookies(&request).find(|&(n, _)| n == \"cookie-name\") {\n\n/// println!(\"Value of cookie = {:?}\", val);\n\n/// }\n\n/// ```\n\n// TODO: should an error be returned if the header is malformed?\n\n// TODO: be less tolerant to what is accepted?\n\npub fn cookies(request: &Request) -> CookiesIter {\n\n let header = request.header(\"Cookie\").unwrap_or(\"\");\n\n\n\n CookiesIter {\n\n iter: header.split(';'),\n\n }\n\n}\n\n\n\n/// Iterator that returns the list of cookies of a request.\n\n///\n\n/// See [the `cookies` functions](fn.cookies.html).\n\npub struct CookiesIter<'a> {\n\n iter: Split<'a, char>,\n\n}\n\n\n\nimpl<'a> Iterator for CookiesIter<'a> {\n\n type Item = (&'a str, &'a str);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n", "file_path": "src/input/cookies.rs", "rank": 10, "score": 146087.58287941015 }, { "content": "/// Adds a log entry to the given writer for each request.\n\n///\n\n/// Writes a line to the given \"writer\" after processing each request.\n\n/// Log line info has the format:\n\n/// `\"{%Y-%m-%d %H:%M%S%.6f} UTC - {METHOD} {URL} - {ELAPSED_TIME} - {RESP_SATUS}\"`\n\n///\n\n/// If you would like to customize the log output or functionality (such as integrating\n\n/// with the [`log`](https://docs.rs/log) crate, see [`rouille::log_custom`](fn.log_custom.html))\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use std::io;\n\n/// use rouille::{Request, Response};\n\n///\n\n/// fn handle(request: &Request) -> Response {\n\n/// rouille::log(request, io::stdout(), || {\n\n/// Response::text(\"hello world\")\n\n/// })\n\n/// }\n\n/// ```\n\npub fn log<W, F>(rq: &Request, mut output: W, f: F) -> Response\n\nwhere\n\n W: Write,\n\n F: FnOnce() -> Response,\n\n{\n\n let start_instant = Instant::now();\n\n let rq_line = format!(\n\n \"{} UTC - {} {}\",\n\n chrono::Utc::now().format(\"%Y-%m-%d %H:%M:%S%.6f\"),\n\n rq.method(),\n\n rq.raw_url()\n\n );\n\n\n\n // Calling the handler and catching potential panics.\n\n // Note that this we always resume unwinding afterwards, we can ignore the small panic-safety\n\n // mechanism of `catch_unwind`.\n\n let response = panic::catch_unwind(panic::AssertUnwindSafe(f));\n\n\n\n let elapsed_time = format_time(start_instant.elapsed());\n\n\n", "file_path": "src/log.rs", "rank": 11, "score": 142818.6378261873 }, { "content": "/// Calls custom logging functions after processing a request.\n\n///\n\n/// This is nearly identical to the [`rouille::log`](fn.log.html) function except it\n\n/// takes two logging functions that will be called with access to the request/response\n\n/// structs and the total execution duration of the handler.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// #[macro_use] extern crate log;\n\n/// extern crate chrono;\n\n/// # extern crate rouille;\n\n/// use rouille::{Request, Response};\n\n///\n\n///\n\n/// fn handle(request: &Request) -> Response {\n\n/// let now = chrono::Utc::now().format(\"%Y-%m-%d %H:%M:%S%.6f\");\n\n/// let log_ok = |req: &Request, resp: &Response, _elap: std::time::Duration| {\n\n/// info!(\"{} {} {}\", now, req.method(), req.raw_url());\n\n/// };\n\n/// let log_err = |req: &Request, _elap: std::time::Duration| {\n\n/// error!(\"{} Handler panicked: {} {}\", now, req.method(), req.raw_url());\n\n/// };\n\n/// rouille::log_custom(request, log_ok, log_err, || {\n\n/// Response::text(\"hello world\")\n\n/// })\n\n/// }\n\n/// #\n\n/// # fn main() { }\n\n/// ```\n\npub fn log_custom<L, E, F>(req: &Request, log_ok_f: L, log_err_f: E, handler: F) -> Response\n\nwhere\n\n L: Fn(&Request, &Response, Duration),\n\n E: Fn(&Request, Duration),\n\n F: FnOnce() -> Response,\n\n{\n\n let start_instant = Instant::now();\n\n\n\n // Call the handler and catch panics.\n\n // Note that we always resume unwinding afterwards.\n\n // We can ignore the small panic-safety mechanism of `catch_unwind`.\n\n let response = panic::catch_unwind(panic::AssertUnwindSafe(handler));\n\n let elapsed = start_instant.elapsed();\n\n\n\n match response {\n\n Ok(response) => {\n\n log_ok_f(req, &response, elapsed);\n\n response\n\n }\n\n Err(payload) => {\n\n log_err_f(req, elapsed);\n\n // The panic handler will print the payload contents\n\n panic::resume_unwind(payload);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 12, "score": 141465.1775456363 }, { "content": "// This function actually handles the request.\n\nfn note_routes(request: &Request, db: &mut Transaction) -> Response {\n\n router!(request,\n\n (GET) (/) => {\n\n // For the sake of the example we just put a dummy route for `/` so that you see\n\n // something if you connect to the server with a browser.\n\n Response::text(\"Hello! Unfortunately there is nothing to see here.\")\n\n },\n\n\n\n (GET) (/notes) => {\n\n // This route returns the list of notes. We perform the query and output it as JSON.\n\n\n\n #[derive(Serialize)]\n\n struct Elem { id: String }\n\n\n\n let mut out = Vec::new();\n\n // We perform the query and iterate over the rows, writing each row to `out`.\n\n for row in &db.query(\"SELECT id FROM notes\", &[]).unwrap() {\n\n let id: i32 = row.get(0);\n\n out.push(Elem { id: format!(\"/note/{}\", id) });\n\n }\n", "file_path": "examples/database.rs", "rank": 13, "score": 140022.05976760574 }, { "content": "#[inline]\n\npub fn extension_to_mime(extension: &str) -> &'static str {\n\n extension_to_mime_impl(Some(extension))\n\n}\n\n\n", "file_path": "src/assets.rs", "rank": 14, "score": 128753.37482197805 }, { "content": "/// Attempts to parse a `Authorization` header with basic HTTP auth.\n\n///\n\n/// If such a header is present and valid, a `HttpAuthCredentials` is returned.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use rouille::input;\n\n/// use rouille::Request;\n\n/// use rouille::Response;\n\n///\n\n/// fn handle(request: &Request) -> Response {\n\n/// let auth = match input::basic_http_auth(request) {\n\n/// Some(a) => a,\n\n/// None => return Response::basic_http_auth_login_required(\"realm\")\n\n/// };\n\n///\n\n/// if auth.login == \"admin\" && auth.password == \"GT5GeKyLvKLxuc7mjF5h\" {\n\n/// handle_after_login(request)\n\n/// } else {\n\n/// Response::text(\"Bad login/password\").with_status_code(403)\n\n/// }\n\n/// }\n\n///\n\n/// fn handle_after_login(request: &Request) -> Response {\n\n/// Response::text(\"You are in a secret area\")\n\n/// }\n\n/// ```\n\npub fn basic_http_auth(request: &Request) -> Option<HttpAuthCredentials> {\n\n let header = match request.header(\"Authorization\") {\n\n None => return None,\n\n Some(h) => h,\n\n };\n\n\n\n let mut split = header.splitn(2, |c| c == ' ');\n\n let authtype = match split.next() {\n\n None => return None,\n\n Some(t) => t,\n\n };\n\n\n\n if authtype != \"Basic\" {\n\n return None;\n\n }\n\n\n\n let authvalue = match split.next().and_then(|val| base64::decode(val).ok()) {\n\n Some(v) => v,\n\n None => return None,\n\n };\n", "file_path": "src/input/basic_http_auth.rs", "rank": 15, "score": 128278.3091242388 }, { "content": "// This is the function that truly handles the routes.\n\n//\n\n// The `session_data` parameter holds what we know about the client. It can be modified by the\n\n// body of this function. Keep in my mind that the way we designed `session_data` is appropriate\n\n// for most situations but not all. If for example you want to keep track of the pages that the\n\n// user visited, you should design it in another way, otherwise the data of some requests will\n\n// overwrite the data of other requests.\n\nfn handle_route(request: &Request, session_data: &mut Option<SessionData>) -> Response {\n\n // First we handle the routes that are always accessible and always the same, no matter whether\n\n // the user is logged in or not.\n\n router!(request,\n\n (POST) (/login) => {\n\n // This is the route that is called when the user wants to log in.\n\n\n\n // In order to retrieve what the user sent us through the <form>, we use the\n\n // `post_input!` macro. This macro returns an error (if a field is missing for example),\n\n // so we use the `try_or_400!` macro to handle any possible error.\n\n //\n\n // If the macro is successful, `data` is an instance of a struct that has one member\n\n // for each field that we indicated in the macro.\n\n let data = try_or_400!(post_input!(request, {\n\n login: String,\n\n password: String,\n\n }));\n\n\n\n // Just a small debug message for this example. You could also output something in the\n\n // logs in a real application.\n", "file_path": "examples/login-session.rs", "rank": 16, "score": 127707.03142172052 }, { "content": "#[cfg(not(feature = \"brotli\"))]\n\n#[inline]\n\nfn brotli(response: &mut Response) {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use content_encoding;\n\n use Request;\n\n use Response;\n\n\n\n // TODO: more tests for encoding stuff\n\n #[test]\n\n fn text_response() {\n\n assert!(content_encoding::response_is_text(&Response::text(\"\")));\n\n }\n\n\n\n #[test]\n\n fn non_text_response() {\n\n assert!(!content_encoding::response_is_text(&Response::from_data(\n\n \"image/jpeg\",\n\n \"\"\n\n )));\n", "file_path": "src/content_encoding.rs", "rank": 17, "score": 120002.07332622807 }, { "content": "#[cfg(not(feature = \"gzip\"))]\n\n#[inline]\n\nfn gzip(response: &mut Response) {}\n\n\n", "file_path": "src/content_encoding.rs", "rank": 18, "score": 120002.07332622807 }, { "content": "#[cfg(feature = \"brotli\")]\n\nfn brotli(response: &mut Response) {\n\n use brotli::enc::reader::CompressorReader;\n\n use std::mem;\n\n use ResponseBody;\n\n\n\n response\n\n .headers\n\n .push((\"Content-Encoding\".into(), \"br\".into()));\n\n let previous_body = mem::replace(&mut response.data, ResponseBody::empty());\n\n let (raw_data, _) = previous_body.into_reader_and_size();\n\n // Using default Brotli parameters: 0 buffer_size == 4096, compression level 6, lgwin == 22\n\n response.data = ResponseBody::from_reader(CompressorReader::new(raw_data, 0, 6, 22));\n\n}\n\n\n", "file_path": "src/content_encoding.rs", "rank": 19, "score": 119997.73802130236 }, { "content": "#[cfg(feature = \"gzip\")]\n\nfn gzip(response: &mut Response) {\n\n use deflate::deflate_bytes_gzip;\n\n use std::io;\n\n use std::mem;\n\n use ResponseBody;\n\n\n\n response\n\n .headers\n\n .push((\"Content-Encoding\".into(), \"gzip\".into()));\n\n let previous_body = mem::replace(&mut response.data, ResponseBody::empty());\n\n let (mut raw_data, size) = previous_body.into_reader_and_size();\n\n let mut src = match size {\n\n Some(size) => Vec::with_capacity(size),\n\n None => Vec::new(),\n\n };\n\n io::copy(&mut raw_data, &mut src).expect(\"Failed reading response body while gzipping\");\n\n let zipped = deflate_bytes_gzip(&src);\n\n response.data = ResponseBody::from_data(zipped);\n\n}\n\n\n", "file_path": "src/content_encoding.rs", "rank": 20, "score": 119997.73802130236 }, { "content": "/// Reads plain text data from the body of a request.\n\n///\n\n/// This does the same as `plain_text_body`, but with a customizable limit in bytes to how much\n\n/// data will be read from the request. If the limit is exceeded, a `LimitExceeded` error is\n\n/// returned.\n\npub fn plain_text_body_with_limit(\n\n request: &Request,\n\n limit: usize,\n\n) -> Result<String, PlainTextError> {\n\n // TODO: handle encoding ; return NotUtf8 if a non-utf8 charset is sent\n\n // if no encoding is specified by the client, the default is `US-ASCII` which is compatible with UTF8\n\n\n\n if let Some(header) = request.header(\"Content-Type\") {\n\n if !header.starts_with(\"text/plain\") {\n\n return Err(PlainTextError::WrongContentType);\n\n }\n\n } else {\n\n return Err(PlainTextError::WrongContentType);\n\n }\n\n\n\n let body = match request.data() {\n\n Some(b) => b,\n\n None => return Err(PlainTextError::BodyAlreadyExtracted),\n\n };\n\n\n", "file_path": "src/input/plain.rs", "rank": 21, "score": 118854.76329050705 }, { "content": "pub fn session<'r, F>(request: &'r Request, cookie_name: &str, timeout_s: u64, inner: F) -> Response\n\nwhere\n\n F: FnOnce(&Session<'r>) -> Response,\n\n{\n\n let mut cookie = input::cookies(request);\n\n let cookie = cookie.find(|&(ref k, _)| k == &cookie_name);\n\n let cookie = cookie.map(|(_, v)| v);\n\n\n\n let session = if let Some(cookie) = cookie {\n\n Session {\n\n key_was_retrieved: AtomicBool::new(false),\n\n key_was_given: true,\n\n key: cookie.into(),\n\n }\n\n } else {\n\n Session {\n\n key_was_retrieved: AtomicBool::new(false),\n\n key_was_given: false,\n\n key: generate_session_id().into(),\n\n }\n", "file_path": "src/session.rs", "rank": 22, "score": 114709.24267334345 }, { "content": "/// Returns the mime type of a file based on its extension.\n\nfn extension_to_mime_impl(extension: Option<&str>) -> &'static str {\n\n // List taken from https://github.com/cybergeek94/mime_guess/blob/master/src/mime_types.rs,\n\n // itself taken from a dead link.\n\n match extension {\n\n Some(\"323\") => \"text/h323; charset=utf8\",\n\n Some(\"3g2\") => \"video/3gpp2\",\n\n Some(\"3gp\") => \"video/3gpp\",\n\n Some(\"3gp2\") => \"video/3gpp2\",\n\n Some(\"3gpp\") => \"video/3gpp\",\n\n Some(\"7z\") => \"application/x-7z-compressed\",\n\n Some(\"aa\") => \"audio/audible\",\n\n Some(\"aac\") => \"audio/aac\",\n\n Some(\"aaf\") => \"application/octet-stream\",\n\n Some(\"aax\") => \"audio/vnd.audible.aax\",\n\n Some(\"ac3\") => \"audio/ac3\",\n\n Some(\"aca\") => \"application/octet-stream\",\n\n Some(\"accda\") => \"application/msaccess.addin\",\n\n Some(\"accdb\") => \"application/msaccess\",\n\n Some(\"accdc\") => \"application/msaccess.cab\",\n\n Some(\"accde\") => \"application/msaccess\",\n", "file_path": "src/assets.rs", "rank": 23, "score": 108020.72942421952 }, { "content": "// Sends a message to a websocket.\n\n// TODO: message fragmentation?\n\nfn send<W: Write>(data: &[u8], mut dest: W, opcode: u8) -> io::Result<()> {\n\n // Write the opcode\n\n assert!(opcode <= 0xf);\n\n let first_byte = 0x80 | opcode;\n\n dest.write_all(&[first_byte])?;\n\n\n\n // Write the length\n\n if data.len() >= 65536 {\n\n dest.write_all(&[127u8])?;\n\n let len = data.len() as u64;\n\n assert!(len < 0x8000_0000_0000_0000);\n\n dest.write_all(&len.to_be_bytes())?;\n\n } else if data.len() >= 126 {\n\n dest.write_all(&[126u8])?;\n\n let len = data.len() as u16;\n\n dest.write_all(&len.to_be_bytes())?;\n\n } else {\n\n dest.write_all(&[data.len() as u8])?;\n\n }\n\n\n", "file_path": "src/websocket/websocket.rs", "rank": 24, "score": 102862.75717765393 }, { "content": "fn multipart_boundary(request: &Request) -> Option<String> {\n\n const BOUNDARY: &str = \"boundary=\";\n\n\n\n let content_type = match request.header(\"Content-Type\") {\n\n None => return None,\n\n Some(c) => c,\n\n };\n\n\n\n let start = match content_type.find(BOUNDARY) {\n\n Some(pos) => pos + BOUNDARY.len(),\n\n None => return None,\n\n };\n\n\n\n let end = content_type[start..]\n\n .find(';')\n\n .map_or(content_type.len(), |end| start + end);\n\n Some(content_type[start..end].to_owned())\n\n}\n", "file_path": "src/input/multipart.rs", "rank": 25, "score": 100954.51493743359 }, { "content": "pub trait CgiRun {\n\n /// Dispatches a request to the process.\n\n ///\n\n /// This function modifies the `Command` to add all the required environment variables\n\n /// and the request's body, then executes the command and waits until the child process has\n\n /// returned all the headers of the response. Once the headers have been sent back, this\n\n /// function returns.\n\n ///\n\n /// The body of the returned `Response` will hold a handle to the child's stdout output. This\n\n /// means that the child can continue running in the background and send data to the client,\n\n /// even after you have finished handling the request.\n\n fn start_cgi(self, request: &Request) -> Result<Response, CgiError>;\n\n}\n\n\n\nimpl CgiRun for Command {\n\n fn start_cgi(mut self, request: &Request) -> Result<Response, CgiError> {\n\n self.env(\"SERVER_SOFTWARE\", \"rouille\")\n\n .env(\"SERVER_NAME\", \"localhost\") // FIXME:\n\n .env(\"GATEWAY_INTERFACE\", \"CGI/1.1\")\n\n .env(\"SERVER_PROTOCOL\", \"HTTP/1.1\") // FIXME:\n", "file_path": "src/cgi.rs", "rank": 26, "score": 95200.32340146977 }, { "content": "/// Builds a `Response` that initiates the websocket protocol.\n\npub fn start<S>(\n\n request: &Request,\n\n subprotocol: Option<S>,\n\n) -> Result<(Response, mpsc::Receiver<Websocket>), WebsocketError>\n\nwhere\n\n S: Into<Cow<'static, str>>,\n\n{\n\n let subprotocol = subprotocol.map(|s| s.into());\n\n\n\n if request.method() != \"GET\" {\n\n return Err(WebsocketError::InvalidWebsocketRequest);\n\n }\n\n\n\n // TODO:\n\n /*if request.http_version() < &HTTPVersion(1, 1) {\n\n return Err(WebsocketError::InvalidWebsocketRequest);\n\n }*/\n\n\n\n match request.header(\"Connection\") {\n\n Some(h) if h.to_ascii_lowercase().contains(\"upgrade\") => (),\n", "file_path": "src/websocket/mod.rs", "rank": 27, "score": 94078.42171976907 }, { "content": "// This function handles the routes that are accessible only if the user is logged in.\n\nfn handle_route_logged_in(request: &Request, _session_data: &SessionData) -> Response {\n\n router!(request,\n\n (GET) (/) => {\n\n // Show some greetings with a dummy response.\n\n Response::html(r#\"You are now logged in. If you close your tab and open it again,\n\n you will still be logged in.<br />\n\n <a href=\"/private\">Click here for the private area</a>\n\n <form action=\"/logout\" method=\"POST\">\n\n <button>Logout</button></form>\"#)\n\n },\n\n\n\n (GET) (/private) => {\n\n // This route is here to demonstrate that the client can go to `/private` only if\n\n // they are successfully logged in.\n\n Response::html(r#\"You are in the private area! <a href=\"/\">Go back</a>.\"#)\n\n },\n\n\n\n _ => Response::empty_404()\n\n )\n\n}\n", "file_path": "examples/login-session.rs", "rank": 28, "score": 92200.9566887326 }, { "content": "/// Generates a string suitable for a session ID.\n\n///\n\n/// The output string doesn't contain any punctuation or character such as quotes or brackets\n\n/// that could need to be escaped.\n\npub fn generate_session_id() -> String {\n\n // 5e+114 possibilities is reasonable.\n\n rand::thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .map(char::from)\n\n .filter(|&c| {\n\n ('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || ('0'..='9').contains(&c)\n\n })\n\n .take(64)\n\n .collect::<String>()\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 29, "score": 91498.21626705953 }, { "content": "/// Must be implemented on types used with the `post_input!` macro.\n\n///\n\n/// The template parameter represents the type of a configuration object that can be passed by\n\n/// the user when the macro is called. If the user doesn't pass any configuration, the expected\n\n/// type is `()`.\n\npub trait DecodePostField<Config>: fmt::Debug {\n\n /// Called when a field with the given name is found in the POST input.\n\n ///\n\n /// The value of `content` is what the client sent. This function should attempt to parse it\n\n /// into `Self` or return an error if it couldn't. If `Self` can't handle a field, then a\n\n /// `PostFieldError::WrongFieldType` error should be returned.\n\n fn from_field(config: Config, content: &str) -> Result<Self, PostFieldError>\n\n where\n\n Self: Sized;\n\n\n\n /// Called when a file with the given name is found in the POST input.\n\n ///\n\n /// The `file` is an object from which the body of the file can be read. The `filename` and\n\n /// `mime` are also arbitrary values sent directly by the client, so you shouldn't trust them\n\n /// blindly.\n\n ///\n\n /// > **Note**: The `file` object can typically read directly from the socket. But don't worry\n\n /// > about doing something wrong, as there are protection mechanisms that will prevent you\n\n /// > from reading too far.\n\n ///\n", "file_path": "src/input/post.rs", "rank": 30, "score": 91227.44677076847 }, { "content": "// Helpers for decoding masked big-endian byte sequences\n\n// These could probably be replaced with something more robust like `nom` if we want to\n\n// take the hit of adding another dependency.\n\nfn read_u16_be<'a, T: Iterator<Item = &'a u8>>(input: &mut T) -> u16 {\n\n let buf: [u8; 2] = [*input.next().unwrap(), *input.next().unwrap()];\n\n u16::from_be_bytes(buf)\n\n}\n\n\n", "file_path": "src/websocket/low_level.rs", "rank": 31, "score": 90601.53262501839 }, { "content": "fn read_u64_be<'a, T: Iterator<Item = &'a u8>>(input: &mut T) -> u64 {\n\n let buf: [u8; 8] = [\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n ];\n\n u64::from_be_bytes(buf)\n\n}\n\n\n\n/// Iterator to the list of elements that were received.\n\npub struct ElementsIter<'a> {\n\n state: &'a mut StateMachine,\n\n data: &'a [u8],\n\n}\n\n\n", "file_path": "src/websocket/low_level.rs", "rank": 32, "score": 90597.8251760034 }, { "content": "fn read_u32_be<'a, T: Iterator<Item = &'a u8>>(input: &mut T) -> u32 {\n\n let buf: [u8; 4] = [\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n *input.next().unwrap(),\n\n ];\n\n u32::from_be_bytes(buf)\n\n}\n\n\n", "file_path": "src/websocket/low_level.rs", "rank": 33, "score": 90597.8251760034 }, { "content": "// Function run in a separate thread.\n\nfn websocket_handling_thread(mut websocket: websocket::Websocket) {\n\n // We wait for a new message to come from the websocket.\n\n while let Some(message) = websocket.next() {\n\n match message {\n\n websocket::Message::Text(txt) => {\n\n // If the message is text, send it back with `send_text`.\n\n println!(\"received {:?} from a websocket\", txt);\n\n websocket.send_text(&txt).unwrap();\n\n }\n\n websocket::Message::Binary(_) => {\n\n println!(\"received binary from a websocket\");\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/websocket.rs", "rank": 34, "score": 85022.43688681436 }, { "content": "fn main() {\n\n // This example shows how to serve static files with rouille.\n\n\n\n // Note that like all examples we only listen on `localhost`, so you can't access this server\n\n // from another machine than your own.\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n {\n\n // The `match_assets` function tries to find a file whose name corresponds to the URL\n\n // of the request. The second parameter (`\".\"`) tells where the files to look for are\n\n // located.\n\n // In order to avoid potential security threats, `match_assets` will never return any\n\n // file outside of this directory even if the URL is for example `/../../foo.txt`.\n\n let response = rouille::match_assets(&request, \".\");\n\n\n\n // If a file is found, the `match_assets` function will return a response with a 200\n\n // status code and the content of the file. If no file is found, it will instead return\n\n // an empty 404 response.\n\n // Here we check whether if a file is found, and if so we return the response.\n", "file_path": "examples/static-files.rs", "rank": 35, "score": 81022.85603117669 }, { "content": "/// Starts a server and uses the given requests handler.\n\n///\n\n/// The request handler takes a `&Request` and must return a `Response` to send to the user.\n\n///\n\n/// > **Note**: `start_server` is meant to be an easy-to-use function. If you want more control,\n\n/// > see [the `Server` struct](struct.Server.html).\n\n///\n\n/// # Common mistakes\n\n///\n\n/// The handler must capture its environment by value and not by reference (`'static`). If you\n\n/// use closure, don't forget to put `move` in front of the closure.\n\n///\n\n/// The handler must also be thread-safe (`Send` and `Sync`).\n\n/// For example this handler isn't thread-safe:\n\n///\n\n/// ```should_fail\n\n/// let mut requests_counter = 0;\n\n///\n\n/// rouille::start_server(\"localhost:80\", move |request| {\n\n/// requests_counter += 1;\n\n///\n\n/// // ... rest of the handler ...\n\n/// # panic!()\n\n/// })\n\n/// ```\n\n///\n\n/// Multiple requests can be processed simultaneously, therefore you can't mutably access\n\n/// variables from the outside.\n\n///\n\n/// Instead you must use a `Mutex`:\n\n///\n\n/// ```no_run\n\n/// use std::sync::Mutex;\n\n/// let requests_counter = Mutex::new(0);\n\n///\n\n/// rouille::start_server(\"localhost:80\", move |request| {\n\n/// *requests_counter.lock().unwrap() += 1;\n\n///\n\n/// // rest of the handler\n\n/// # panic!()\n\n/// })\n\n/// ```\n\n///\n\n/// # Panic handling in the handler\n\n///\n\n/// If your request handler panics, a 500 error will automatically be sent to the client.\n\n///\n\n/// # Panic\n\n///\n\n/// This function will panic if the server starts to fail (for example if you use a port that is\n\n/// already occupied) or if the socket is force-closed by the operating system.\n\n///\n\n/// If you need to handle these situations, please see `Server`.\n\npub fn start_server<A, F>(addr: A, handler: F) -> !\n\nwhere\n\n A: ToSocketAddrs,\n\n F: Send + Sync + 'static + Fn(&Request) -> Response,\n\n{\n\n Server::new(addr, handler)\n\n .expect(\"Failed to start server\")\n\n .run();\n\n panic!(\"The server socket closed unexpectedly\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 36, "score": 79184.03591482257 }, { "content": "#[inline]\n\npub fn parse_priority_header(input: &str) -> PriorityHeaderIter {\n\n PriorityHeaderIter {\n\n iter: input.split(','),\n\n }\n\n}\n\n\n\n/// Iterator to the elements of a priority header.\n\n///\n\n/// Created with [`parse_priority_header`](fn.parse_priority_header.html).\n\npub struct PriorityHeaderIter<'a> {\n\n iter: Split<'a, char>,\n\n}\n\n\n\nimpl<'a> Iterator for PriorityHeaderIter<'a> {\n\n type Item = (&'a str, f32);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n let elem = match self.iter.next() {\n\n Some(n) => n,\n", "file_path": "src/input/priority_header.rs", "rank": 37, "score": 76128.15317166693 }, { "content": "/// Returns the preferred value amongst a priority header.\n\n///\n\n/// This function takes the value of a priority header and a list of elements that can be handled\n\n/// by the server, and returns the index within that list of the element with the highest priority.\n\n///\n\n/// If multiple elements in `handled` match and have the same priority, the first one is returned.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use rouille::input::priority_header_preferred;\n\n///\n\n/// let header = \"text/plain; q=1.2, image/png; q=2.0\";\n\n/// let handled = [\"image/gif\", \"image/png\", \"text/plain\"];\n\n/// assert_eq!(priority_header_preferred(header, handled.iter().cloned()), Some(1));\n\n/// ```\n\npub fn priority_header_preferred<'a, I>(input: &'a str, elements: I) -> Option<usize>\n\nwhere\n\n I: Iterator<Item = &'a str>,\n\n{\n\n let mut result = (None, f32::NEG_INFINITY);\n\n\n\n for (index, req_elem) in elements.enumerate() {\n\n for (header_elem, prio) in parse_priority_header(input) {\n\n if prio <= result.1 {\n\n continue;\n\n }\n\n\n\n if req_elem == header_elem {\n\n result = (Some(index), prio);\n\n continue;\n\n }\n\n\n\n let (req_elem_left, req_elem_right) = {\n\n let mut parts = req_elem.split('/');\n\n let left = parts.next();\n", "file_path": "src/input/priority_header.rs", "rank": 38, "score": 68768.80417860753 }, { "content": "// Returns true if the Content-Type of the response is a type that should be encoded.\n\n// Since encoding is purely an optimization, it's not a problem if the function sometimes has\n\n// false positives or false negatives.\n\nfn response_is_text(response: &Response) -> bool {\n\n response.headers.iter().any(|&(ref key, ref value)| {\n\n if !key.eq_ignore_ascii_case(\"Content-Type\") {\n\n return false;\n\n }\n\n\n\n let content_type = value.to_lowercase();\n\n content_type.starts_with(\"text/\")\n\n || content_type.contains(\"javascript\")\n\n || content_type.contains(\"json\")\n\n || content_type.contains(\"xml\")\n\n || content_type.contains(\"font\")\n\n })\n\n}\n\n\n", "file_path": "src/content_encoding.rs", "rank": 39, "score": 67425.14601965413 }, { "content": "/// Identical to `start_server` but uses a `ThreadPool` of the given size.\n\n///\n\n/// When `pool_size` is `None`, the thread pool size will default to `8 * num-cpus`.\n\n/// `pool_size` must be greater than zero or this function will panic.\n\npub fn start_server_with_pool<A, F>(addr: A, pool_size: Option<usize>, handler: F) -> !\n\nwhere\n\n A: ToSocketAddrs,\n\n F: Send + Sync + 'static + Fn(&Request) -> Response,\n\n{\n\n Server::new(addr, handler)\n\n .expect(\"Failed to start server\")\n\n .pool_size(pool_size.unwrap_or_else(|| 8 * num_cpus::get()))\n\n .run();\n\n panic!(\"The server socket closed unexpectedly\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 67259.28128878221 }, { "content": "/// Executes a function in either a thread of a thread pool\n\nenum Executor {\n\n Threaded { count: Arc<AtomicUsize> },\n\n Pooled { pool: threadpool::ThreadPool },\n\n}\n\nimpl Executor {\n\n /// `size` must be greater than zero or the call to `ThreadPool::new` will panic.\n\n fn with_size(size: usize) -> Self {\n\n let pool = threadpool::ThreadPool::new(size);\n\n Executor::Pooled { pool }\n\n }\n\n\n\n #[inline]\n\n fn execute<F: FnOnce() + Send + 'static>(&self, f: F) {\n\n match *self {\n\n Executor::Threaded { ref count } => {\n\n let counter = AtomicCounter::new(count);\n\n thread::spawn(move || {\n\n let _counter = counter;\n\n f()\n\n });\n", "file_path": "src/lib.rs", "rank": 41, "score": 62041.543774591395 }, { "content": "enum StateMachineInner {\n\n // If `StateMachine::inner` is `InHeader`, then `buffer` contains the start of the header.\n\n InHeader,\n\n // If `StateMachine::inner` is `InData`, then `buffer` must be empty.\n\n InData {\n\n // Mask to decode the message.\n\n mask: u32,\n\n // Value between 0 and 3 that indicates the number of bytes between the start of the data\n\n // and the next expected byte.\n\n offset: u8,\n\n // Number of bytes remaining in the frame.\n\n remaining_len: u64,\n\n },\n\n}\n\n\n\nimpl StateMachine {\n\n /// Initializes a new state machine for a new stream. Expects to see a new frame as the first\n\n /// packet.\n\n pub fn new() -> StateMachine {\n\n StateMachine {\n", "file_path": "src/websocket/low_level.rs", "rank": 42, "score": 55964.298194992225 }, { "content": "/// Trait for objects that can take ownership of a raw connection to the client data.\n\n///\n\n/// The purpose of this trait is to be used with the `Connection: Upgrade` header, hence its name.\n\npub trait Upgrade {\n\n /// Initializes the object with the given socket.\n\n fn build(&mut self, socket: Box<dyn ReadWrite + Send>);\n\n}\n\n\n\n/// Represents a request that your handler must answer to.\n\n///\n\n/// This can be either a real request (received by the HTTP server) or a mock object created with\n\n/// one of the `fake_*` constructors.\n\npub struct Request {\n\n method: String,\n\n url: String,\n\n headers: Vec<(String, String)>,\n\n https: bool,\n\n data: Arc<Mutex<Option<Box<dyn Read + Send>>>>,\n\n remote_addr: SocketAddr,\n\n}\n\n\n\nimpl fmt::Debug for Request {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/lib.rs", "rank": 43, "score": 55167.23528881988 }, { "content": "fn main() {\n\n // This example demonstrates how to connect to a database and perform queries when the client\n\n // performs a request.\n\n // The server created in this example uses a REST API.\n\n\n\n // The first thing we do is try to connect to the database.\n\n //\n\n // One important thing to note here is that we wrap a `Mutex` around the connection. Since the\n\n // request handler can be called multiple times in parallel, everything that we use in it must\n\n // be thread-safe. By default the PostgresSQL connection isn't thread-safe, so we need a mutex\n\n // to make it thread-safe.\n\n //\n\n // Not wrapping a mutex around the database would lead to a compilation error when we attempt\n\n // to use the variable `db` from within the closure passed to `start_server`.\n\n let db = {\n\n let db = Client::connect(\"postgres://test:test@localhost/test\", NoTls);\n\n Mutex::new(db.expect(\"Failed to connect to database\"))\n\n };\n\n\n\n // We perform some initialization for the sake of the example.\n", "file_path": "examples/database.rs", "rank": 44, "score": 49656.03606071294 }, { "content": "fn main() {\n\n // This example demonstrates how to use websockets with rouille.\n\n\n\n // Small message so that people don't need to read the source code.\n\n // Note that like all examples we only listen on `localhost`, so you can't access this server\n\n // from another machine than your own.\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n router!(request,\n\n (GET) (/) => {\n\n // The / route outputs an HTML client so that the user can try the websockets.\n\n // Note that in a real website you should probably use some templating system, or\n\n // at least load the HTML from a file.\n\n Response::html(\"<script type=\\\"text/javascript\\\">\n\n var socket = new WebSocket(\\\"ws://localhost:8000/ws\\\", \\\"echo\\\");\n", "file_path": "examples/websocket.rs", "rank": 45, "score": 49656.03606071294 }, { "content": "fn main() {\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n // TODO: add logging\n\n let mut cmd = Command::new(\"php-cgi\");\n\n cmd.arg(\"-n\"); // Don't use a php.ini.\n\n cmd.env(\"SCRIPT_FILENAME\", \"examples/php-test.php\"); // The PHP script to use.\n\n cmd.env(\"REDIRECT_STATUS\", \"1\"); // Necessary for security.\n\n cmd.start_cgi(&request).unwrap()\n\n });\n\n}\n", "file_path": "examples/php.rs", "rank": 46, "score": 49656.03606071294 }, { "content": "fn main() {\n\n // This example demonstrates how to handle HTML forms.\n\n\n\n // Note that like all examples we only listen on `localhost`, so you can't access this server\n\n // from another machine than your own.\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n rouille::log(&request, io::stdout(), || {\n\n router!(request,\n\n (GET) (/) => {\n\n // When viewing the home page, we return an HTML document described below.\n\n rouille::Response::html(FORM)\n\n },\n\n\n\n (POST) (/submit) => {\n\n // This is the route that is called when the user submits the form of the\n\n // home page.\n\n\n\n // We query the data with the `post_input!` macro. Each field of the macro\n", "file_path": "examples/simple-form.rs", "rank": 47, "score": 47940.27191449815 }, { "content": "fn main() {\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n // The `start_server` starts listening forever on the given address.\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n // The closure passed to `start_server` will be called once for each client request. It\n\n // will be called multiple times concurrently when there are multiple clients.\n\n\n\n // Here starts the real handler for the request.\n\n //\n\n // The `router!` macro is very similar to a `match` expression in core Rust. The macro\n\n // takes the request as parameter and will jump to the first block that matches the\n\n // request.\n\n //\n\n // Each of the possible blocks builds a `Response` object. Just like most things in Rust,\n\n // the `router!` macro is an expression whose value is the `Response` built by the block\n\n // that was called. Since `router!` is the last piece of code of this closure, the\n\n // `Response` is then passed back to the `start_server` function and sent to the client.\n\n router!(request,\n\n (GET) (/) => {\n", "file_path": "examples/hello-world.rs", "rank": 48, "score": 47940.27191449815 }, { "content": "fn main() {\n\n // This example demonstrates how to create a website with a simple login form.\n\n\n\n // Small message so that people don't need to read the source code.\n\n // Note that like all examples we only listen on `localhost`, so you can't access this server\n\n // from another machine than your own.\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n // For the sake of the example, we are going to store the sessions data in a hashmap in memory.\n\n // This has the disadvantage that all the sessions are erased if the program reboots (for\n\n // example because of an update), and that if you start multiple processes of the same\n\n // application (for example for load balancing) then they won't share sessions.\n\n // Therefore in a real project you should store probably the sessions in a database of some\n\n // sort instead.\n\n //\n\n // We created a struct that contains the data that we store on the server for each session,\n\n // and a hashmap that associates each session ID with the data.\n\n let sessions_storage: Mutex<HashMap<String, SessionData>> = Mutex::new(HashMap::new());\n\n\n\n rouille::start_server(\"localhost:8000\", move |request| {\n", "file_path": "examples/login-session.rs", "rank": 49, "score": 47940.27191449815 }, { "content": "fn main() {\n\n // This example shows how to create a reverse proxy with rouille.\n\n\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n rouille::proxy::full_proxy(\n\n &request,\n\n rouille::proxy::ProxyConfig {\n\n addr: \"example.com:80\",\n\n replace_host: Some(\"example.com\".into()),\n\n },\n\n )\n\n .unwrap()\n\n });\n\n}\n", "file_path": "examples/reverse-proxy.rs", "rank": 50, "score": 47940.27191449815 }, { "content": "fn main() {\n\n // This example demonstrates how to serve a git repository with rouille.\n\n // After starting this example, you should be able to run `git clone http://localhost:8000/`\n\n // in order to clone the repository of the current working directory.\n\n\n\n println!(\"Now listening on localhost:8000\");\n\n\n\n rouille::start_server(\"localhost:8000\", move |request| {\n\n rouille::log(&request, io::stdout(), || {\n\n // When a request is received, we invoke the `git http-backend` command through CGI.\n\n let mut cmd = Command::new(\"git\");\n\n cmd.arg(\"http-backend\");\n\n\n\n // We need to set some git-specific environment variables.\n\n cmd.env(\n\n \"GIT_PROJECT_ROOT\",\n\n env::current_dir().unwrap().to_str().unwrap(),\n\n );\n\n cmd.env(\"GIT_HTTP_EXPORT_ALL\", \"\"); // This one is required to avoid security errors.\n\n\n", "file_path": "examples/git-http-backend.rs", "rank": 51, "score": 46388.19461558269 }, { "content": "#[test]\n\nfn test_generate_session_id() {\n\n assert!(generate_session_id().len() >= 32);\n\n}\n", "file_path": "src/session.rs", "rank": 52, "score": 44977.44662796038 }, { "content": "fn format_time(duration: Duration) -> String {\n\n let secs_part = match duration.as_secs().checked_mul(1_000_000_000) {\n\n Some(v) => v,\n\n None => return format!(\"{}s\", duration.as_secs() as f64),\n\n };\n\n\n\n let duration_in_ns = secs_part + u64::from(duration.subsec_nanos());\n\n\n\n if duration_in_ns < 1_000 {\n\n format!(\"{}ns\", duration_in_ns)\n\n } else if duration_in_ns < 1_000_000 {\n\n format!(\"{:.1}us\", duration_in_ns as f64 / 1_000.0)\n\n } else if duration_in_ns < 1_000_000_000 {\n\n format!(\"{:.1}ms\", duration_in_ns as f64 / 1_000_000.0)\n\n } else {\n\n format!(\"{:.1}s\", duration_in_ns as f64 / 1_000_000_000.0)\n\n }\n\n}\n", "file_path": "src/log.rs", "rank": 53, "score": 41163.69884393028 }, { "content": "/// Turns a `Sec-WebSocket-Key` into a `Sec-WebSocket-Accept`.\n\nfn convert_key(input: &str) -> String {\n\n let mut sha1 = Sha1::new();\n\n sha1.update(input.as_bytes());\n\n sha1.update(b\"258EAFA5-E914-47DA-95CA-C5AB0DC85B11\");\n\n\n\n base64::encode_config(&sha1.digest().bytes(), base64::STANDARD)\n\n}\n", "file_path": "src/websocket/mod.rs", "rank": 54, "score": 39983.301787451564 }, { "content": " fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n CgiError::BodyAlreadyExtracted => \"the body of the request was already extracted\",\n\n CgiError::IoError(_) => {\n\n \"could not read the body from the request, or could not execute the CGI program\"\n\n }\n\n };\n\n\n\n write!(fmt, \"{}\", description)\n\n }\n\n}\n\n\n", "file_path": "src/cgi.rs", "rank": 55, "score": 36769.060521117106 }, { "content": "use std::fmt;\n\nuse std::io;\n\nuse std::io::BufRead;\n\nuse std::io::Error as IoError;\n\nuse std::io::Read;\n\nuse std::process::Command;\n\nuse std::process::Stdio;\n\n\n\nuse Request;\n\nuse Response;\n\nuse ResponseBody;\n\n\n\n/// Error that can happen when parsing the JSON input.\n\n#[derive(Debug)]\n\npub enum CgiError {\n\n /// Can't pass through the body of the request because it was already extracted.\n\n BodyAlreadyExtracted,\n\n\n\n /// Could not read the body from the request, or could not execute the CGI program.\n\n IoError(IoError),\n", "file_path": "src/cgi.rs", "rank": 56, "score": 36758.45191665271 }, { "content": "}\n\n\n\nimpl From<IoError> for CgiError {\n\n fn from(err: IoError) -> CgiError {\n\n CgiError::IoError(err)\n\n }\n\n}\n\n\n\nimpl error::Error for CgiError {\n\n #[inline]\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match *self {\n\n CgiError::IoError(ref e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for CgiError {\n\n #[inline]\n", "file_path": "src/cgi.rs", "rank": 57, "score": 36745.04600657089 }, { "content": "//!\n\n//! rouille::start_server(\"localhost:8080\", move |request| {\n\n//! Command::new(\"php-cgi\").start_cgi(request).unwrap()\n\n//! });\n\n//! ```\n\n//!\n\n//! # About the Result returned by start_cgi\n\n//!\n\n//! The `start_cgi` method returns a `Result<Response, std::io::Error>`. This object will contain\n\n//! an error if and only if there was a problem executing the command (for example if it fails to\n\n//! start, or starts then crashes, ...).\n\n//!\n\n//! If the process returns an error 400 or an error 404 for example, then the result will contain\n\n//! `Ok`.\n\n//!\n\n//! It is therefore appropriate to simply call `.unwrap()` on that result. Any panic will be turned\n\n//! into an error 500 and add an entry to the logs, which is probably what you want when your\n\n//! server is misconfigured.\n\n\n\nuse std::error;\n", "file_path": "src/cgi.rs", "rank": 58, "score": 36739.05680962869 }, { "content": " // TODO: `HTTP_` env vars with the headers\n\n\n\n let mut child = self.spawn()?;\n\n\n\n if let Some(mut body) = request.data() {\n\n io::copy(&mut body, child.stdin.as_mut().unwrap())?;\n\n } else {\n\n return Err(CgiError::BodyAlreadyExtracted);\n\n }\n\n\n\n let response = {\n\n let mut stdout = io::BufReader::new(child.stdout.take().unwrap());\n\n\n\n let mut headers = Vec::new();\n\n let mut status_code = 200;\n\n for header in stdout.by_ref().lines() {\n\n let header = header?;\n\n if header.is_empty() {\n\n break;\n\n }\n", "file_path": "src/cgi.rs", "rank": 59, "score": 36736.506654891215 }, { "content": "\n\n let mut splits = header.splitn(2, ':');\n\n let header = splits.next().unwrap(); // TODO: return Err instead?\n\n let val = splits.next().unwrap(); // TODO: return Err instead?\n\n let val = &val[1..];\n\n\n\n if header == \"Status\" {\n\n status_code = val[0..3]\n\n .parse()\n\n .expect(\"Status returned by CGI program is invalid\");\n\n } else {\n\n headers.push((header.to_owned().into(), val.to_owned().into()));\n\n }\n\n }\n\n\n\n Response {\n\n status_code,\n\n headers,\n\n data: ResponseBody::from_reader(stdout),\n\n upgrade: None,\n\n }\n\n };\n\n\n\n Ok(response)\n\n }\n\n}\n", "file_path": "src/cgi.rs", "rank": 60, "score": 36727.62150416114 }, { "content": " .env(\"SERVER_PORT\", \"80\") // FIXME:\n\n .env(\"REQUEST_METHOD\", request.method())\n\n .env(\"PATH_INFO\", &request.url()) // TODO: incorrect + what about PATH_TRANSLATED?\n\n .env(\"SCRIPT_NAME\", \"\") // FIXME:\n\n .env(\"QUERY_STRING\", request.raw_query_string())\n\n .env(\"REMOTE_ADDR\", &request.remote_addr().to_string())\n\n .env(\"AUTH_TYPE\", \"\") // FIXME:\n\n .env(\"REMOTE_USER\", \"\") // FIXME:\n\n .env(\n\n \"CONTENT_TYPE\",\n\n &request.header(\"Content-Type\").unwrap_or(\"\"),\n\n )\n\n .env(\n\n \"CONTENT_LENGTH\",\n\n &request.header(\"Content-Length\").unwrap_or(\"\"),\n\n )\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::inherit())\n\n .stdin(Stdio::piped());\n\n\n", "file_path": "src/cgi.rs", "rank": 61, "score": 36726.615838016536 }, { "content": "// Copyright (c) 2016 The Rouille developers\n\n// Licensed under the Apache License, Version 2.0\n\n// <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT\n\n// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,\n\n// at your option. All files in the project carrying such\n\n// notice may not be copied, modified, or distributed except\n\n// according to those terms.\n\n\n\n//! Allows you to let an external process handle the request through CGI.\n\n//!\n\n//! This module provides a trait named `CgiRun` which is implemented on `std::process::Command`.\n\n//! In order to dispatch a request, simply start building a `Command` object and call `start_cgi`\n\n//! on it.\n\n//!\n\n//! ## Example\n\n//!\n\n//! ```no_run\n\n//! use std::process::Command;\n\n//! use rouille::cgi::CgiRun;\n", "file_path": "src/cgi.rs", "rank": 62, "score": 36724.82721678923 }, { "content": " if response.is_success() {\n\n return response;\n\n }\n\n }\n\n\n\n // This point of the code is reached only if no static file matched the request URL.\n\n\n\n // In a real website you probably want to serve non-static files here (with the `router!`\n\n // macro for example), but here we just return a 404 response.\n\n Response::html(\n\n \"404 error. Try <a href=\\\"/README.md\\\"`>README.md</a> or \\\n\n <a href=\\\"/src/lib.rs\\\">src/lib.rs</a> for example.\",\n\n )\n\n .with_status_code(404)\n\n });\n\n}\n", "file_path": "examples/static-files.rs", "rank": 63, "score": 34839.33717654173 }, { "content": "// Copyright (c) 2016 The Rouille developers\n\n// Licensed under the Apache License, Version 2.0\n\n// <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT\n\n// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,\n\n// at your option. All files in the project carrying such\n\n// notice may not be copied, modified, or distributed except\n\n// according to those terms.\n\n\n\nextern crate rouille;\n\n\n\nuse rouille::Response;\n\n\n", "file_path": "examples/static-files.rs", "rank": 65, "score": 34829.87188722796 }, { "content": "//! use rouille::content_encoding;\n\n//!\n\n//! fn handle_request(request: &Request) -> Response {\n\n//! let response = Response::text(\"Hello world\");\n\n//! content_encoding::apply(&request, response)\n\n//! }\n\n//! ```\n\nuse input;\n\nuse Request;\n\nuse Response;\n\n\n\n/// Applies content encoding to the response.\n\n///\n\n/// Analyzes the `Accept-Encoding` header of the request. If one of the encodings is recognized and\n\n/// supported by rouille, it adds a `Content-Encoding` header to the `Response` and encodes its\n\n/// body.\n\n///\n\n/// If the response already has a `Content-Encoding` header, this function is a no-op.\n\n/// If the response has a `Content-Type` header that isn't textual content, this function is a\n\n/// no-op.\n", "file_path": "src/content_encoding.rs", "rank": 73, "score": 34687.72830243754 }, { "content": " let encoded_response = content_encoding::apply(&request, response);\n\n assert!(encoded_response\n\n .headers\n\n .contains(&(\"Content-Encoding\".into(), \"br\".into()))); // Brotli Content-Encoding header\n\n let mut encoded_content = vec![];\n\n encoded_response\n\n .data\n\n .into_reader_and_size()\n\n .0\n\n .read_to_end(&mut encoded_content)\n\n .unwrap();\n\n assert_eq!(\n\n encoded_content,\n\n vec![\n\n 27, 75, 0, 0, 4, 28, 114, 164, 129, 5, 210, 206, 25, 30, 90, 114, 224, 114, 73,\n\n 109, 45, 196, 23, 126, 240, 144, 77, 40, 26, 211, 228, 67, 73, 40, 236, 55, 101,\n\n 254, 127, 147, 194, 129, 132, 65, 130, 120, 152, 249, 68, 56, 93, 2\n\n ]\n\n ); // Applied proper Brotli encoding\n\n }\n", "file_path": "src/content_encoding.rs", "rank": 74, "score": 34684.098511989054 }, { "content": "\n\n #[test]\n\n fn gzip_encoding() {\n\n let request = {\n\n let h = vec![(\"Accept-Encoding\".to_owned(), \"gzip\".to_owned())];\n\n Request::fake_http(\"GET\", \"/\", h, vec![])\n\n };\n\n let response = Response::html(\n\n \"<html><head><title>Hello world</title><body><p>Hello world</p></body></html>\",\n\n );\n\n\n\n let encoded_response = content_encoding::apply(&request, response);\n\n assert!(encoded_response\n\n .headers\n\n .contains(&(\"Content-Encoding\".into(), \"gzip\".into()))); // gzip Content-Encoding header\n\n let mut encoded_content = vec![];\n\n encoded_response\n\n .data\n\n .into_reader_and_size()\n\n .0\n", "file_path": "src/content_encoding.rs", "rank": 75, "score": 34683.70806917059 }, { "content": " }\n\n\n\n #[test]\n\n fn no_req_encodings() {\n\n let request = Request::fake_http(\"GET\", \"/\", vec![], vec![]);\n\n let response = Response::html(\"<p>Hello world</p>\");\n\n let encoded_response = content_encoding::apply(&request, response);\n\n assert!(!encoded_response\n\n .headers\n\n .iter()\n\n .any(|(header_name, _)| header_name == \"Content-Encoding\")); // No Content-Encoding header\n\n let mut encoded_content = vec![];\n\n encoded_response\n\n .data\n\n .into_reader_and_size()\n\n .0\n\n .read_to_end(&mut encoded_content)\n\n .unwrap();\n\n assert_eq!(\n\n String::from_utf8(encoded_content).unwrap(),\n", "file_path": "src/content_encoding.rs", "rank": 76, "score": 34681.44043708837 }, { "content": " Request::fake_http(\"GET\", \"/\", h, vec![])\n\n };\n\n let response = Response::html(\"<p>Hello world</p>\");\n\n\n\n let encoded_response = content_encoding::apply(&request, response);\n\n assert!(encoded_response\n\n .headers\n\n .contains(&(\"Content-Encoding\".into(), \"br\".into()))); // Brotli Content-Encoding header\n\n }\n\n\n\n #[test]\n\n fn brotli_encoding() {\n\n let request = {\n\n let h = vec![(\"Accept-Encoding\".to_owned(), \"br\".to_owned())];\n\n Request::fake_http(\"GET\", \"/\", h, vec![])\n\n };\n\n let response = Response::html(\n\n \"<html><head><title>Hello world</title><body><p>Hello world</p></body></html>\",\n\n );\n\n\n", "file_path": "src/content_encoding.rs", "rank": 77, "score": 34681.04111997023 }, { "content": " .headers\n\n .iter()\n\n .any(|(header_name, _)| header_name == \"Content-Encoding\")); // No Content-Encoding header\n\n let mut encoded_content = vec![];\n\n encoded_response\n\n .data\n\n .into_reader_and_size()\n\n .0\n\n .read_to_end(&mut encoded_content)\n\n .unwrap();\n\n assert_eq!(\n\n String::from_utf8(encoded_content).unwrap(),\n\n \"<p>Hello world</p>\"\n\n ); // No encoding applied\n\n }\n\n\n\n #[test]\n\n fn unknown_req_encoding() {\n\n let request = {\n\n let h = vec![(\"Accept-Encoding\".to_owned(), \"x-gzip, br\".to_owned())];\n", "file_path": "src/content_encoding.rs", "rank": 78, "score": 34680.52057120456 }, { "content": " .into_reader_and_size()\n\n .0\n\n .read_to_end(&mut encoded_content)\n\n .unwrap();\n\n assert_eq!(\n\n String::from_utf8(encoded_content).unwrap(),\n\n \"<p>Hello world</p>\"\n\n ); // No encoding applied\n\n }\n\n\n\n #[test]\n\n fn multi_req_encoding() {\n\n let request = {\n\n let h = vec![(\"Accept-Encoding\".to_owned(), \"foo\".to_owned())];\n\n Request::fake_http(\"GET\", \"/\", h, vec![])\n\n };\n\n let response = Response::html(\"<p>Hello world</p>\");\n\n\n\n let encoded_response = content_encoding::apply(&request, response);\n\n assert!(!encoded_response\n", "file_path": "src/content_encoding.rs", "rank": 79, "score": 34680.2890125802 }, { "content": "///\n\n/// The gzip encoding is supported only if you enable the `gzip` feature of rouille (which is\n\n/// enabled by default).\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rouille::content_encoding;\n\n/// use rouille::Request;\n\n/// use rouille::Response;\n\n///\n\n/// fn handle(request: &Request) -> Response {\n\n/// content_encoding::apply(request, Response::text(\"hello world\"))\n\n/// }\n\n/// ```\n", "file_path": "src/content_encoding.rs", "rank": 80, "score": 34679.61643936158 }, { "content": " .read_to_end(&mut encoded_content)\n\n .unwrap();\n\n assert_eq!(\n\n encoded_content,\n\n vec![\n\n 31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 179, 201, 40, 201, 205, 177, 179, 201, 72, 77, 76,\n\n 177, 179, 41, 201, 44, 201, 73, 181, 243, 72, 205, 201, 201, 87, 40, 207, 47, 202,\n\n 73, 177, 209, 135, 8, 217, 36, 229, 167, 84, 218, 217, 20, 160, 202, 21, 216, 217,\n\n 232, 67, 36, 244, 193, 166, 0, 0, 202, 239, 44, 120, 76, 0, 0, 0\n\n ]\n\n ); // Applied proper gzip encoding\n\n }\n\n}\n", "file_path": "src/content_encoding.rs", "rank": 81, "score": 34678.942174691285 }, { "content": "// Copyright (c) 2016 The Rouille developers\n\n// Licensed under the Apache License, Version 2.0\n\n// <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT\n\n// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,\n\n// at your option. All files in the project carrying such\n\n// notice may not be copied, modified, or distributed except\n\n// according to those terms.\n\n\n\n//! Apply content encodings (such as gzip compression) to the response.\n\n//!\n\n//! This module provides access to the content encodings supported by a request as well as\n\n//! a function to automatically apply common content encodings to a response.\n\n//! # Basic example\n\n//!\n\n//! Here is a basic example showing how to use content encodings:\n\n//!\n\n//! ```\n\n//! use rouille::Request;\n\n//! use rouille::Response;\n", "file_path": "src/content_encoding.rs", "rank": 82, "score": 34677.81619348241 }, { "content": " \"<p>Hello world</p>\"\n\n ); // No encoding applied\n\n }\n\n\n\n #[test]\n\n fn empty_req_encodings() {\n\n let request = {\n\n let h = vec![(\"Accept-Encoding\".to_owned(), \"\".to_owned())];\n\n Request::fake_http(\"GET\", \"/\", h, vec![])\n\n };\n\n let response = Response::html(\"<p>Hello world</p>\");\n\n\n\n let encoded_response = content_encoding::apply(&request, response);\n\n assert!(!encoded_response\n\n .headers\n\n .iter()\n\n .any(|(header_name, _)| header_name == \"Content-Encoding\")); // No Content-Encoding header\n\n let mut encoded_content = vec![];\n\n encoded_response\n\n .data\n", "file_path": "src/content_encoding.rs", "rank": 83, "score": 34677.75006345108 }, { "content": " if let Some(preferred_index) = input::priority_header_preferred(\n\n accept_encoding_header,\n\n encoding_preference.iter().cloned(),\n\n ) {\n\n match encoding_preference[preferred_index] {\n\n \"br\" => brotli(&mut response),\n\n \"gzip\" | \"x-gzip\" => gzip(&mut response),\n\n _ => (),\n\n }\n\n }\n\n\n\n response\n\n}\n\n\n", "file_path": "src/content_encoding.rs", "rank": 84, "score": 34674.24305922057 }, { "content": "impl error::Error for PlainTextError {\n\n #[inline]\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match *self {\n\n PlainTextError::IoError(ref e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for PlainTextError {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n PlainTextError::BodyAlreadyExtracted => \"the body of the request was already extracted\",\n\n PlainTextError::WrongContentType => \"the request didn't have a plain text content type\",\n\n PlainTextError::IoError(_) => {\n\n \"could not read the body from the request, or could not execute the CGI program\"\n\n }\n\n PlainTextError::LimitExceeded => \"the limit to the number of bytes has been exceeded\",\n", "file_path": "src/input/plain.rs", "rank": 85, "score": 61.133158594416486 }, { "content": " #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n MultipartError::WrongContentType => {\n\n \"the `Content-Type` header of the request indicates that it doesn't contain \\\n\n multipart data or is invalid\"\n\n }\n\n MultipartError::BodyAlreadyExtracted => {\n\n \"can't parse the body of the request because it was already extracted\"\n\n }\n\n };\n\n\n\n write!(fmt, \"{}\", description)\n\n }\n\n}\n\n\n\n/// Attempts to decode the content of the request as `multipart/form-data` data.\n", "file_path": "src/input/multipart.rs", "rank": 86, "score": 50.648843447999255 }, { "content": " status_code,\n\n headers,\n\n data: ResponseBody::from_reader(socket),\n\n upgrade: None,\n\n })\n\n}\n\n\n\n/// Error that can happen when calling `full_proxy`.\n\n#[derive(Debug)]\n\npub enum FullProxyError {\n\n /// Can't pass through the body of the request because it was already extracted.\n\n BodyAlreadyExtracted,\n\n}\n\n\n\nimpl error::Error for FullProxyError {}\n\n\n\nimpl fmt::Display for FullProxyError {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n FullProxyError::BodyAlreadyExtracted => \"the body of the request was already extracted\",\n\n };\n\n\n\n write!(fmt, \"{}\", description)\n\n }\n\n}\n\n\n", "file_path": "src/proxy.rs", "rank": 87, "score": 47.632909490264204 }, { "content": " #[inline]\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match *self {\n\n PostFieldError::IoError(ref e) => Some(e),\n\n PostFieldError::WrongDataTypeInt(ref e) => Some(e),\n\n PostFieldError::WrongDataTypeFloat(ref e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for PostFieldError {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n PostFieldError::IoError(_) => {\n\n \"could not read the body from the request, or could not execute the CGI program\"\n\n }\n\n PostFieldError::MissingField => \"the field is missing from the request's client\",\n\n PostFieldError::WrongFieldType => \"expected a file but got a field, or vice versa\",\n", "file_path": "src/input/post.rs", "rank": 88, "score": 47.22022458883109 }, { "content": " let description = match *self {\n\n PostError::BodyAlreadyExtracted => \"the body of the request was already extracted\",\n\n PostError::WrongContentType => \"the request didn't have a post content type\",\n\n PostError::IoError(_) => {\n\n \"could not read the body from the request, or could not execute the CGI program\"\n\n }\n\n PostError::NotUtf8(_) => {\n\n \"the content-type encoding is not ASCII or UTF-8, or the body is not valid UTF-8\"\n\n }\n\n PostError::Field { .. } => \"failed to parse a requested field\",\n\n };\n\n\n\n write!(fmt, \"{}\", description)\n\n }\n\n}\n\n\n\n/// Error returned by the methods of [the `DecodePostField` trait](trait.DecodePostField.html).\n\n#[derive(Debug)]\n\npub enum PostFieldError {\n\n /// Could not read the body. Usually happens with files.\n", "file_path": "src/input/post.rs", "rank": 89, "score": 46.90861026776958 }, { "content": " ProxyError::IoError(ref e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ProxyError {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n ProxyError::BodyAlreadyExtracted => \"the body of the request was already extracted\",\n\n ProxyError::IoError(_) => {\n\n \"could not read the body from the request, or could not connect to the remote \\\n\n server, or the connection to the remote server closed unexpectedly\"\n\n }\n\n ProxyError::HttpParseError => \"the destination server didn't produce compliant HTTP\",\n\n };\n\n\n\n write!(fmt, \"{}\", description)\n\n }\n", "file_path": "src/proxy.rs", "rank": 90, "score": 45.866932108909324 }, { "content": "use multipart::server::Multipart as InnerMultipart;\n\n\n\n// TODO: provide wrappers around these\n\npub use multipart::server::MultipartData;\n\npub use multipart::server::MultipartField;\n\n\n\n/// Error that can happen when decoding multipart data.\n\n#[derive(Clone, Debug)]\n\npub enum MultipartError {\n\n /// The `Content-Type` header of the request indicates that it doesn't contain multipart data\n\n /// or is invalid.\n\n WrongContentType,\n\n\n\n /// Can't parse the body of the request because it was already extracted.\n\n BodyAlreadyExtracted,\n\n}\n\n\n\nimpl error::Error for MultipartError {}\n\n\n\nimpl fmt::Display for MultipartError {\n", "file_path": "src/input/multipart.rs", "rank": 91, "score": 41.30119642579137 }, { "content": " /// The `Content-Type` header of the request indicates that it doesn't contain POST data.\n\n WrongContentType,\n\n\n\n /// Can't parse the body of the request because it was already extracted.\n\n BodyAlreadyExtracted,\n\n\n\n /// Could not read the body from the request.\n\n IoError(IoError),\n\n\n\n /// Failed to parse a string field.\n\n NotUtf8(String),\n\n\n\n /// There was an error with a particular field.\n\n Field {\n\n field: Cow<'static, str>,\n\n error: PostFieldError,\n\n },\n\n}\n\n\n\nimpl From<IoError> for PostError {\n", "file_path": "src/input/post.rs", "rank": 92, "score": 39.02432283705346 }, { "content": " /// The subprotocol passed to the function was not requested by the client.\n\n WrongSubprotocol,\n\n}\n\n\n\nimpl error::Error for WebsocketError {}\n\n\n\nimpl fmt::Display for WebsocketError {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let description = match *self {\n\n WebsocketError::InvalidWebsocketRequest => {\n\n \"the request does not match a websocket request\"\n\n }\n\n WebsocketError::WrongSubprotocol => {\n\n \"the subprotocol passed to the function was not requested by the client\"\n\n }\n\n };\n\n\n\n write!(fmt, \"{}\", description)\n\n }\n\n}\n\n\n\n/// Builds a `Response` that initiates the websocket protocol.\n", "file_path": "src/websocket/mod.rs", "rank": 93, "score": 33.126870251691244 }, { "content": " )*\n\n }\n\n\n\n } else {\n\n let mut multipart = match multipart::get_multipart_input(request) {\n\n Ok(m) => m,\n\n Err(multipart::MultipartError::WrongContentType) => {\n\n return Err(PostError::WrongContentType);\n\n },\n\n Err(multipart::MultipartError::BodyAlreadyExtracted) => {\n\n return Err(PostError::BodyAlreadyExtracted);\n\n },\n\n };\n\n\n\n while let Some(mut multipart_entry) = multipart.next() {\n\n $(\n\n if multipart_entry.headers.name.as_ref() == stringify!($field) {\n\n let config = ();\n\n $(\n\n let config = $config;\n", "file_path": "src/input/post.rs", "rank": 94, "score": 32.160594554218946 }, { "content": " #[inline]\n\n fn from(err: IoError) -> PostError {\n\n PostError::IoError(err)\n\n }\n\n}\n\n\n\nimpl error::Error for PostError {\n\n #[inline]\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match *self {\n\n PostError::IoError(ref e) => Some(e),\n\n PostError::Field { ref error, .. } => Some(error),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for PostError {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n", "file_path": "src/input/post.rs", "rank": 95, "score": 32.060193491251475 }, { "content": "\n\n // TODO: handle if the same field is specified multiple times\n\n\n\n if request.header(\"Content-Type\").map(|ct| ct.starts_with(\"application/x-www-form-urlencoded\")).unwrap_or(false) {\n\n let body = {\n\n // TODO: DDoSable server if body is too large?\n\n let mut out = Vec::new(); // TODO: with_capacity()?\n\n if let Some(mut b) = request.data() {\n\n b.read_to_end(&mut out)?;\n\n } else {\n\n return Err(PostError::BodyAlreadyExtracted);\n\n }\n\n out\n\n };\n\n\n\n for (field, value) in form_urlencoded::parse(&body) {\n\n $(\n\n if field == stringify!($field) {\n\n let config = ();\n\n $(\n", "file_path": "src/input/post.rs", "rank": 96, "score": 30.944533195281835 }, { "content": " BodyAlreadyExtracted,\n\n\n\n /// Could not read the body from the request, or could not connect to the remote server, or\n\n /// the connection to the remote server closed unexpectedly.\n\n IoError(IoError),\n\n\n\n /// The destination server didn't produce compliant HTTP.\n\n HttpParseError,\n\n}\n\n\n\nimpl From<IoError> for ProxyError {\n\n fn from(err: IoError) -> ProxyError {\n\n ProxyError::IoError(err)\n\n }\n\n}\n\n\n\nimpl error::Error for ProxyError {\n\n #[inline]\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match *self {\n", "file_path": "src/proxy.rs", "rank": 97, "score": 30.908246887482775 }, { "content": " ///\n\n /// The body can only be retrieved once. Returns `None` is the body has already been retrieved\n\n /// before.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use std::io::Read;\n\n /// use rouille::{Request, Response, ResponseBody};\n\n ///\n\n /// fn echo(request: &Request) -> Response {\n\n /// let mut data = request.data().expect(\"Oops, body already retrieved, problem \\\n\n /// in the server\");\n\n ///\n\n /// let mut buf = Vec::new();\n\n /// match data.read_to_end(&mut buf) {\n\n /// Ok(_) => (),\n\n /// Err(_) => return Response::text(\"Failed to read body\")\n\n /// };\n\n ///\n", "file_path": "src/lib.rs", "rank": 98, "score": 30.06670445598747 }, { "content": "\n\nimpl<'a> Read for RequestBody<'a> {\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {\n\n self.body.read(buf)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use Request;\n\n\n\n #[test]\n\n fn header() {\n\n let request = Request::fake_http(\n\n \"GET\",\n\n \"/\",\n\n vec![(\"Host\".to_owned(), \"localhost\".to_owned())],\n\n vec![],\n\n );\n", "file_path": "src/lib.rs", "rank": 99, "score": 29.401821785296963 } ]
Rust
src/commands/commands/math.rs
Chronophylos/chb4
8754dbeb16eb36b79118f676fbf1897b582d67ed
use super::prelude::*; use evalexpr::*; use std::f64::consts; static PHI: f64 = 1.61803398874989484820; pub fn command() -> Arc<Command> { Command::with_name("math") .alias("quickmafs") .command(move |_context, args, _msg, _user| { let context = context_map! { "e" => consts::E, "π" => consts::PI, "pi" => consts::PI, "phi" => PHI, "φ" => PHI, "sqrt" => Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Float((int as f64).sqrt())) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.sqrt())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "abs" => Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int.abs())) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.abs())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "floor"=> Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int)) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.floor())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "ceil"=> Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int)) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.ceil())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "round"=> Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int)) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.round())) } else { Err(EvalexprError::expected_number(argument.clone())) } })) } .unwrap(); let expr = args.join(" "); Ok(match eval_with_context(&expr, &context) { Ok(s) => MessageResult::Message(format!("{}", s)), Err(err) => MessageResult::Error(err.to_string()), }) }) .about("Do some math") .description( " This command uses the `evalexpr` crate. This crate allows the definition of constants and functions. .Constants |=== | Name | Value | Description | e | 2.71828182845904523536028747135266250f64 | Euler's number (e) | pi, π | 3.14159265358979323846264338327950288f64 | Archimedes' constant (π) |=== .Functions |=== | Name | Description | sqrt(x) | Square root of x | abs(x) | Absolute value of x | floor(x) | Returns the largest integer less than or equal to a number. | ceil(x) | Returns the smallest integer greater than or equal to a number. | round(x) | Returns the nearest integer to a number. Round half-way cases away from `0.0`. |=== ==== USAGE ``` math <expr> ``` Where <expr> is a valid mathematical expression. ", ) .done() }
use super::prelude::*; use evalexpr::*; use std::f64::consts; static PHI: f64 = 1.61803398874989484820; pub fn command() -> Arc<Command> { Command::with_name("math") .alias("quickmafs") .command(move |_context, args, _msg, _user| { let context = context_map! { "e" => consts::E, "π" => consts::PI, "pi" => consts::PI, "phi" => PHI, "φ" => PHI, "sqrt" => Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Float((int as f64).sqrt())) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.sqrt())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "abs" => Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int.abs())) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.abs())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "floor"=> Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int)) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.floor())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "ceil"=> Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int)) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.ceil())) } else { Err(EvalexprError::expected_number(argument.clone())) } })), "round"=> Function::new(Box::new(|argument| { if let Ok(int) = argument.as_int() { Ok(Value::Int(int)) } else if let Ok(float) = argument.as_float() { Ok(Value::Float(float.round())) } else { Err(EvalexprError::expected_number(argument.clone())) } })) } .unwrap(); let expr = args.join(" ");
}) .about("Do some math") .description( " This command uses the `evalexpr` crate. This crate allows the definition of constants and functions. .Constants |=== | Name | Value | Description | e | 2.71828182845904523536028747135266250f64 | Euler's number (e) | pi, π | 3.14159265358979323846264338327950288f64 | Archimedes' constant (π) |=== .Functions |=== | Name | Description | sqrt(x) | Square root of x | abs(x) | Absolute value of x | floor(x) | Returns the largest integer less than or equal to a number. | ceil(x) | Returns the smallest integer greater than or equal to a number. | round(x) | Returns the nearest integer to a number. Round half-way cases away from `0.0`. |=== ==== USAGE ``` math <expr> ``` Where <expr> is a valid mathematical expression. ", ) .done() }
Ok(match eval_with_context(&expr, &context) { Ok(s) => MessageResult::Message(format!("{}", s)), Err(err) => MessageResult::Error(err.to_string()), })
call_expression
[ { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"system\")\n\n .alias(\"sysstat\")\n\n .command(|context, _args, _msg, _user| {\n\n let sys = System::new();\n\n\n\n let (mem_proc, mem_used, mem_total) =\n\n mem(&sys).context(\"Could not get memory information\")?;\n\n let load_avg = sys.load_average().context(\"Could not get load average\")?;\n\n let uptime = sys.uptime().context(\"Could not get system uptime\")?;\n\n\n\n Ok(MessageResult::Message(format!(\n\n \"Memory usage: {}/{}/{} Load: {} Uptime: {} System Uptime: {}\",\n\n mem_proc.to_string_as(true),\n\n mem_used.to_string_as(true),\n\n mem_total.to_string_as(true),\n\n load_avg.five,\n\n humantime::format_duration(truncate_duration(context.elapsed())),\n\n humantime::format_duration(truncate_duration(uptime)),\n\n )))\n\n })\n\n .about(\"Get information about the Bot and the Server\")\n\n .done()\n\n}\n\n\n", "file_path": "src/commands/commands/system.rs", "rank": 0, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"color\")\n\n .chainable()\n\n .command(|_context, _args, msg, _user| {\n\n let color = msg.color();\n\n Ok(MessageResult::Reply(color.clone()))\n\n })\n\n .about(\"Print your current chat color\")\n\n .description(\n\n \"\n\nThis always prints the hex code.\n\nReturning the actual name of the color is wip an depends on the `twitchchat` crate.\n\n\",\n\n )\n\n .example(\n\n \"\n\n```\n\n> ~color\n\n< Chronophylos, #7700B3\n\n```\",\n\n )\n\n .done()\n\n}\n", "file_path": "src/commands/commands/color.rs", "rank": 1, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"quote\")\n\n .alias(\"quotes\")\n\n .command(\n\n move |context, args, msg, user| match args.get(0).map(String::as_str) {\n\n Some(\"add\") => add(context.clone(), msg, user, args[1..].to_vec()),\n\n Some(\"remove\") | Some(\"delete\") => {\n\n remove(context.clone(), msg, user, args.get(1).map(String::as_str))\n\n }\n\n Some(\"edit\") => edit(context.clone(), msg, user, args[1..].to_vec()),\n\n Some(\"show\") => show(context.clone(), args.get(1).map(String::as_str)),\n\n Some(qid) => show(context.clone(), Some(qid)),\n\n None => Ok(MessageResult::Message(String::from(\"Missing sub-command\"))),\n\n },\n\n )\n\n .about(\"Show or manage quotes\")\n\n .description(\n\n \"\n\nUSAGE: quote SUBCOMMAND\n\n quote <quote id>\n", "file_path": "src/commands/commands/quote.rs", "rank": 2, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"version\")\n\n .command(|context, _args, _msg, _user| {\n\n Ok(MessageResult::Message(format!(\n\n \"Currently running CHB4 Version {} ({})\",\n\n context.version, context.git_commit,\n\n )))\n\n })\n\n .about(\"Get the current version\")\n\n .done()\n\n}\n", "file_path": "src/commands/commands/version.rs", "rank": 3, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"admin\")\n\n .command(move |context, args, msg, user| {\n\n let permission = Permission::from_user(msg, user).unwrap();\n\n\n\n if permission != Permission::Owner {\n\n debug!(\n\n \"Permission not high enough (is: {:?}, needed: {:?})\",\n\n permission,\n\n Permission::Owner\n\n );\n\n return Ok(MessageResult::None);\n\n }\n\n\n\n match args.get(0).map(String::as_str) {\n\n Some(\"stop\") => stop(context.clone()),\n\n Some(\"leave\") => leave(context.clone(), args[1..].to_vec()),\n\n Some(\"join\") => join(context.clone(), args[1..].to_vec()),\n\n Some(_) => Ok(MessageResult::Message(\"Unknown sub-command\".into())),\n\n None => Ok(MessageResult::MissingArgument(\"Missing sub-command\")),\n", "file_path": "src/commands/commands/admin.rs", "rank": 5, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"ping\")\n\n .chainable()\n\n .command(move |context, _args, msg, _user| {\n\n let now = Utc::now().timestamp_millis() as u64;\n\n let ts = msg.sent_ts();\n\n let latency = Duration::from_millis(now - ts);\n\n let elapsed = context.elapsed();\n\n\n\n Ok(MessageResult::Message(format!(\n\n \"Pong! Latency to TMI: {}. The bot has been running for {}\",\n\n format_duration(latency),\n\n format_duration(truncate_duration(elapsed))\n\n )))\n\n })\n\n .about(\"Get information about the bot instance\")\n\n .done()\n\n}\n", "file_path": "src/commands/commands/ping.rs", "rank": 6, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"voicemail\")\n\n .alias(\"tell\")\n\n .command(move |context, args, msg, _user| {\n\n let user_id = msg.twitch_id().unwrap();\n\n let line = args.join(\" \");\n\n let mut voicemail: Voicemail = match line.parse() {\n\n Ok(v) => v,\n\n Err(err) => {\n\n return Ok(MessageResult::Error(format!(\n\n \"Could not parse voicemail: {}\",\n\n err\n\n )))\n\n }\n\n };\n\n\n\n let conn = &context.conn();\n\n\n\n let channel_name = msg.channel().to_owned();\n\n let channel = database::Channel::by_name(conn, channel_name.trim_start_matches('#'))\n", "file_path": "src/commands/commands/voicemail.rs", "rank": 7, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"man\")\n\n .aliases(vec![\"help\", \"whatis\", \"hilbe\"])\n\n .command(move |context, args, _msg, _user| {\n\n if args.is_empty() {\n\n return Ok(MessageResult::MissingArgument(\"page\"));\n\n }\n\n\n\n let (chapter, name) = match args.get(1) {\n\n Some(name) => (args.get(0), name),\n\n None => (None, args.get(0).unwrap()),\n\n };\n\n\n\n let chapter = chapter.cloned().map(|c| c.into());\n\n\n\n match context.whatis(chapter, name.to_owned()) {\n\n Some(m) => Ok(MessageResult::Message(m.short())),\n\n None => Ok(MessageResult::Message(\"No page found\".into())),\n\n }\n\n })\n", "file_path": "src/commands/commands/man.rs", "rank": 8, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n let uri = Uri::from_static(\"https://fonts.google.com/analytics\");\n\n\n\n // todo cache\n\n async fn stats(uri: Uri) -> Result<FontStats> {\n\n let client = Client::new();\n\n let resp = client.get(uri).await?;\n\n\n\n if !resp.status().is_success() {\n\n SimpleError::new(\"Status code is not Ok\");\n\n }\n\n\n\n let body = hyper::body::aggregate(resp).await?;\n\n\n\n // yank first 5 bytes\n\n body.advance(5);\n\n\n\n // try to parse as json with serde_json\n\n let stats: FontStats = serde_json::from_reader(body.reader())?;\n\n\n", "file_path": "src/commands/commands/font.rs", "rank": 9, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"time\")\n\n .command(|_context, args, _msg, _user| match args.get(0) {\n\n None => Ok(MessageResult::Message(format!(\n\n \"Current Time: {}\",\n\n Utc::now()\n\n ))),\n\n Some(tz) => {\n\n let now = Utc::now();\n\n\n\n Ok(MessageResult::Message(match str_to_offset(tz) {\n\n Ok(zone) => format!(\n\n \"Current Time: {}\",\n\n now.with_timezone(&zone)\n\n .to_rfc3339_opts(SecondsFormat::Secs, false)\n\n ),\n\n Err(_) => {\n\n let zone = match tz.parse::<Tz>() {\n\n Ok(z) => z,\n\n Err(_) => {\n", "file_path": "src/commands/commands/time.rs", "rank": 10, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"lastseen\")\n\n .aliases(vec![\"ls\"])\n\n .command(|context, args, _msg, _user| {\n\n let name = match args.get(0) {\n\n None => return Ok(MessageResult::MissingArgument(\"name\")),\n\n Some(name) => name,\n\n };\n\n\n\n let conn = context.conn();\n\n\n\n let user = User::by_name(&conn, &name.to_lowercase())?;\n\n match user {\n\n Some(user) => {\n\n let now = Utc::now().naive_utc();\n\n let last_seen = user.last_seen.context(\"last_seen is not set\")?;\n\n let duration = now.signed_duration_since(last_seen).to_std()?;\n\n\n\n Ok(MessageResult::Message(format!(\n\n \"The user {} was last seen {} ago\",\n", "file_path": "src/commands/commands/lastseen.rs", "rank": 11, "score": 154912.3658627093 }, { "content": "pub fn command() -> Arc<Command> {\n\n Command::with_name(\"test\")\n\n .aliases(vec![\"tset\", \"tets\"])\n\n .command(|_context, args, _msg, _user| {\n\n Ok(MessageResult::Message(if args.is_empty() {\n\n \"Test what?\".into()\n\n } else {\n\n format!(\"Testing {}\", &args.join(\" \"))\n\n }))\n\n })\n\n .about(\"Test everything!\")\n\n .description(\n\n \"\n\nUSAGE: test [TEXT]...\n\n\",\n\n )\n\n .done()\n\n}\n", "file_path": "src/commands/commands/test.rs", "rank": 12, "score": 154912.3658627093 }, { "content": "pub fn all() -> Vec<Arc<Command>> {\n\n vec![\n\n admin::command(),\n\n color::command(),\n\n lastseen::command(),\n\n man::command(),\n\n math::command(),\n\n ping::command(),\n\n quote::command(),\n\n system::command(),\n\n test::command(),\n\n time::command(),\n\n version::command(),\n\n voicemail::command(),\n\n ]\n\n}\n", "file_path": "src/commands/commands/mod.rs", "rank": 13, "score": 140059.38343088876 }, { "content": "pub fn prettify_bool(b: bool) -> &'static str {\n\n if b {\n\n \"✔\"\n\n } else {\n\n \"✘\"\n\n }\n\n}\n", "file_path": "src/helpers.rs", "rank": 14, "score": 124185.09728981921 }, { "content": "fn join(context: Arc<BotContext>, args: Vec<String>) -> Result<MessageResult> {\n\n let channel = match args.get(0) {\n\n Some(c) => c,\n\n None => return Ok(MessageResult::MissingArgument(\"channel\")),\n\n };\n\n\n\n let conn = &context.conn();\n\n\n\n context.twitchbot().join(channel)?;\n\n\n\n Channel::join(conn, &channel)?;\n\n\n\n Ok(MessageResult::Message(format!(\"Joined {}\", channel)))\n\n}\n", "file_path": "src/commands/commands/admin.rs", "rank": 15, "score": 117922.87576198883 }, { "content": "fn leave(context: Arc<BotContext>, args: Vec<String>) -> Result<MessageResult> {\n\n let name = match args.get(0) {\n\n Some(c) => c,\n\n None => return Ok(MessageResult::MissingArgument(\"channel\")),\n\n };\n\n\n\n let conn = &context.conn();\n\n let channel = Channel::by_name(conn, name)?;\n\n\n\n let channel = match channel {\n\n Some(c) => c,\n\n None => return Ok(MessageResult::Error(format!(\"I am no in channel {}\", name))),\n\n };\n\n\n\n context.twitchbot().part(name)?;\n\n\n\n channel.leave(conn)?;\n\n\n\n Ok(MessageResult::Message(format!(\"I lef channel {}\", name)))\n\n}\n\n\n", "file_path": "src/commands/commands/admin.rs", "rank": 16, "score": 117922.87576198883 }, { "content": "fn stop(context: Arc<BotContext>) -> Result<MessageResult> {\n\n warn!(\"Stopping bot by command!\");\n\n\n\n // stop the chat client\n\n block_on(async {\n\n info!(\"Stopping chat client\");\n\n context.twitchbot().stop()\n\n });\n\n\n\n info!(\"Stopping process\");\n\n std::process::exit(0);\n\n}\n\n\n", "file_path": "src/commands/commands/admin.rs", "rank": 17, "score": 112025.52941612703 }, { "content": "fn show(context: Arc<BotContext>, qid: Option<&str>) -> Result<MessageResult> {\n\n // unwrap option\n\n let qid: &str = match qid {\n\n Some(qid) => qid,\n\n None => {\n\n return Ok(MessageResult::Error(String::from(\n\n \"Missing argument: quote id\",\n\n )))\n\n }\n\n };\n\n\n\n // parse str to i32\n\n let qid: i32 = qid.parse()?;\n\n\n\n // query quote\n\n let quote = Quote::by_id(&context.conn(), qid)?;\n\n\n\n Ok(MessageResult::Message(match quote {\n\n Some(q) => format!(\"{}\", q),\n\n None => format!(\"No quote with id {} found\", qid),\n\n }))\n\n}\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 18, "score": 99880.45728568174 }, { "content": "pub fn format(\n\n w: &mut dyn std::io::Write,\n\n now: &mut flexi_logger::DeferredNow,\n\n record: &flexi_logger::Record,\n\n) -> Result<(), std::io::Error> {\n\n let level = record.level();\n\n write!(\n\n w,\n\n \" {} {:<5} {} > {}\",\n\n now.now().format(\"%Y-%m-%d %H:%M:%S\"),\n\n style_level(level, record.level()),\n\n yansi::Paint::new(record.module_path().unwrap_or(\"<unnamed>\")).bold(),\n\n style_message(level, &record.args())\n\n )\n\n}\n\n\n", "file_path": "src/log_format.rs", "rank": 19, "score": 99070.17784141048 }, { "content": "pub fn action() -> Arc<Action> {\n\n Action::with_name(\"test\")\n\n .regex(r\"^test\")\n\n .command(|_context, _msg, _user| {\n\n Ok(MessageResult::Message(String::from(if random() {\n\n \"Test successful ppHop\"\n\n } else {\n\n \"Test unsuccessful FeelsBadMan\"\n\n })))\n\n })\n\n .done()\n\n}\n", "file_path": "src/actions/actions/test.rs", "rank": 20, "score": 85155.05264105609 }, { "content": "pub fn action() -> Arc<Action> {\n\n Action::with_name(\"voicemail\")\n\n .command(move |context, _msg, user| {\n\n let conn = &context.conn();\n\n\n\n let voicemails = user.pop(conn).context(\"Could not pop voicemails\")?;\n\n\n\n if voicemails.is_empty() {\n\n trace!(\"No voicemails found\");\n\n return Ok(MessageResult::None);\n\n }\n\n\n\n trace!(\"Found {} voicemails\", voicemails.len());\n\n\n\n Ok(MessageResult::Message(Voicemail::format_vec(\n\n conn, voicemails,\n\n )?))\n\n })\n\n .noisy()\n\n .done()\n\n}\n", "file_path": "src/actions/actions/voicemail.rs", "rank": 21, "score": 85155.05264105609 }, { "content": "pub fn action() -> Arc<Action> {\n\n Action::with_name(\"flamongo\")\n\n .regex(r\"\\br[io]ngo\\b\")\n\n .command(move |_context, _msg, _user| {\n\n let range = Uniform::new(0, FLAMONGOS.len());\n\n let flamongo = FLAMONGOS[range.sample(&mut thread_rng())].to_owned();\n\n\n\n Ok(MessageResult::Message(flamongo.clone()))\n\n })\n\n .done()\n\n}\n", "file_path": "src/actions/actions/flamongo.rs", "rank": 22, "score": 85155.05264105609 }, { "content": "fn edit(\n\n context: Arc<BotContext>,\n\n msg: Message,\n\n user: &User,\n\n args: Vec<String>,\n\n) -> Result<MessageResult> {\n\n // unwrap option\n\n let qid: &str = match args.get(0) {\n\n Some(q) => q,\n\n None => return Ok(MessageResult::Message(String::from(\"Missing quote id\"))),\n\n };\n\n\n\n let permission = Permission::from_user(msg, &user).unwrap();\n\n\n\n // check if permission is at least friend\n\n if permission < Permission::Friend {\n\n debug!(\"Permission not high enough\");\n\n return Ok(MessageResult::None);\n\n }\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 23, "score": 83635.99848249054 }, { "content": "fn remove(\n\n context: Arc<BotContext>,\n\n msg: Message,\n\n user: &User,\n\n qid: Option<&str>,\n\n) -> Result<MessageResult> {\n\n // unwrap option\n\n let qid: &str = match qid {\n\n Some(qid) => qid,\n\n None => {\n\n return Ok(MessageResult::Error(String::from(\n\n \"Missing argument: quote id\",\n\n )))\n\n }\n\n };\n\n\n\n let permission = Permission::from_user(msg, &user).unwrap();\n\n\n\n // check if permission is at least friend\n\n if permission < Permission::Friend {\n", "file_path": "src/commands/commands/quote.rs", "rank": 24, "score": 83635.99848249054 }, { "content": "fn add(\n\n context: Arc<BotContext>,\n\n msg: Message,\n\n user: &User,\n\n args: Vec<String>,\n\n) -> Result<MessageResult> {\n\n let permission = Permission::from_user(msg, &user).unwrap();\n\n\n\n // check if permission is at least friend\n\n if permission < Permission::Friend {\n\n debug!(\"Permission not high enough\");\n\n return Ok(MessageResult::None);\n\n }\n\n\n\n let msg = args.join(\" \");\n\n let (message, author, authored) = match parse_quote(&msg) {\n\n Ok(t) => t,\n\n Err(err) => return Ok(MessageResult::Error(err.to_string())),\n\n };\n\n\n\n // insert quote\n\n let quote = Quote::new(&context.conn(), user.id, author, authored, message)?;\n\n\n\n info!(\"Added quote {} (id: {})\", quote, quote.id);\n\n Ok(MessageResult::Message(format!(\n\n \"Added new quote with id {}\",\n\n quote.id\n\n )))\n\n}\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 25, "score": 83635.99848249054 }, { "content": "pub fn all() -> Vec<Arc<Action>> {\n\n vec![test::action(), voicemail::action(), flamongo::action()]\n\n}\n", "file_path": "src/actions/actions/mod.rs", "rank": 26, "score": 82545.8955103373 }, { "content": "pub fn truncate_duration(dur: Duration) -> Duration {\n\n Duration::from_secs(dur.as_secs())\n\n}\n\n\n", "file_path": "src/helpers.rs", "rank": 27, "score": 82545.8955103373 }, { "content": "/// voicemail = recipients [schedule] SP message\n\npub fn parse_voicemail<'a>(i: &'a str) -> IResult<&'a str, Voicemail> {\n\n let (i, recipients) = parse_recipients(i)?;\n\n let (i, schedule) = opt(parse_schedule_with_space)(i)?;\n\n let (i, message) = parse_message(i)?;\n\n\n\n Ok((\n\n i,\n\n Voicemail {\n\n recipients: recipients.iter().map(|&x| x.to_owned()).collect(),\n\n message: message.to_owned(),\n\n schedule,\n\n },\n\n ))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/voicemail/parser.rs", "rank": 28, "score": 69543.23788829288 }, { "content": "fn str_to_offset(name: &str) -> Result<FixedOffset> {\n\n let hour = 3600;\n\n let minute = 60;\n\n\n\n let offset = match name.to_uppercase().as_str() {\n\n \"ACDT\" => FixedOffset::east(10 * hour + 30 * minute), // Australian Central Daylight Saving Time\n\n \"ACST\" => FixedOffset::east(9 * hour + 30 * minute), // Australian Central Standard Time\n\n \"ACT\" => FixedOffset::west(5 * hour), // Acre Time\n\n \"ACWST\" => FixedOffset::east(8 * hour + 45 * minute), // Australian Central Western Standard Time (unofficial)\n\n \"ADT\" => FixedOffset::west(3 * hour), // Atlantic Daylight Time\n\n \"AEDT\" => FixedOffset::east(11 * hour), // Australian Eastern Daylight Saving Time\n\n \"AEST\" => FixedOffset::east(10 * hour), // Australian Eastern Standard Time\n\n \"AET\" => FixedOffset::east(11 * hour), // Australian Eastern Time\n\n \"AFT\" => FixedOffset::east(4 * hour + 30 * minute), // Afghanistan Time\n\n \"AKDT\" => FixedOffset::west(8 * hour), // Alaska Daylight Time\n\n \"AKST\" => FixedOffset::west(9 * hour), // Alaska Standard Time\n\n \"ALMT\" => FixedOffset::east(6 * hour), // Alma-Ata Time\n\n \"AMST\" => FixedOffset::west(3 * hour), // Amazon Summer Time (Brazil)\n\n \"AMT\" => FixedOffset::west(4 * hour), // Amazon Time (Brazil)\n\n //\"AMT\" => FixedOffset::east(4 * hour), // Armenia Time\n", "file_path": "src/commands/commands/time.rs", "rank": 29, "score": 65088.2537181326 }, { "content": "fn parse_quote(msg: &str) -> Result<(&str, &str, &str)> {\n\n // parse quote\n\n let caps = RE.captures(&msg).context(\"Regex does not match\")?;\n\n\n\n let message = caps.get(1).unwrap().as_str();\n\n let author = caps.get(3).unwrap().as_str();\n\n let authored = caps.get(4).unwrap().as_str();\n\n\n\n ensure!(message.len() < 400, \"Quote is too long, max lenght is 400\");\n\n ensure!(\n\n author.len() < 400,\n\n \"Author name is too long, max lenght is 25\"\n\n );\n\n ensure!(authored.len() < 400, \"Date is too long, max lenght is 25\");\n\n\n\n Ok((message, author, authored))\n\n}\n", "file_path": "src/commands/commands/quote.rs", "rank": 30, "score": 61567.20221091189 }, { "content": "fn mem(sys: &System) -> Result<(ByteSize, ByteSize, ByteSize)> {\n\n let mem_proc = statm_self()\n\n .context(\"Could not get process memory stats\")?\n\n .size;\n\n let mem_proc = ByteSize::b(mem_proc as u64);\n\n\n\n let memory = sys\n\n .memory()\n\n .context(\"Could not get sysmtem memory information\")?;\n\n let mem_used = ByteSize::b(memory.total.as_u64() - memory.free.as_u64());\n\n\n\n Ok((mem_proc, mem_used, memory.total))\n\n}\n", "file_path": "src/commands/commands/system.rs", "rank": 31, "score": 58661.79329257545 }, { "content": "fn main() {\n\n let output = Command::new(\"git\")\n\n .args(&[\"show-ref\", \"--hash\", \"--abbrev\"])\n\n .output()\n\n .unwrap();\n\n let git_hash = String::from_utf8(output.stdout).unwrap();\n\n println!(\"cargo:rustc-env=GIT_HASH={}\", git_hash);\n\n}\n", "file_path": "build.rs", "rank": 32, "score": 53563.26458353206 }, { "content": "pub trait SimpleHandler {\n\n fn name(&self) -> &str;\n\n}\n", "file_path": "src/handler.rs", "rank": 33, "score": 52380.951097788246 }, { "content": "pub trait ManpageProducer {\n\n fn get_manpage(&self) -> Manpage;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Manpage {\n\n names: Vec<String>,\n\n pub chapter: ChapterName,\n\n about: String,\n\n description: String,\n\n example: Option<String>,\n\n characteristics: Vec<(String, String)>,\n\n}\n\n\n\nimpl Manpage {\n\n pub fn new(\n\n names: Vec<String>,\n\n chapter: ChapterName,\n\n about: String,\n\n description: String,\n", "file_path": "src/manpages/manpage.rs", "rank": 34, "score": 51004.91947724436 }, { "content": "pub trait MessageConsumer: Send + Sync {\n\n fn name(&self) -> &str;\n\n fn whitelisted(&self) -> bool;\n\n\n\n fn consume(\n\n &self,\n\n context: Arc<BotContext>,\n\n args: Vec<String>,\n\n msg: Message,\n\n user: &User,\n\n ) -> Result<MessageResult>;\n\n}\n\n\n\npub enum Message<'a> {\n\n TwitchPrivmsg(Arc<Privmsg<'a>>),\n\n}\n\n\n\nimpl Message<'_> {\n\n pub fn channel(&self) -> &str {\n\n match self {\n", "file_path": "src/message.rs", "rank": 35, "score": 46146.38350951766 }, { "content": "#[cfg(test)]\n\nfn now() -> NaiveDateTime {\n\n NaiveDate::from_ymd(2000, 1, 1).and_hms(0, 0, 0)\n\n}\n\n\n", "file_path": "src/voicemail/parser.rs", "rank": 36, "score": 44150.27490700815 }, { "content": "#[async_trait]\n\npub trait Twitch: SimpleHandler + Send + Sync {\n\n async fn handle(&self, msg: Arc<Privmsg<'_>>, user: &User) -> Result<()>;\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 37, "score": 43573.00128131079 }, { "content": "fn is_number(c: char) -> bool {\n\n c.is_digit(10)\n\n}\n\n\n", "file_path": "src/voicemail/parser.rs", "rank": 38, "score": 42438.20378433369 }, { "content": "pub trait Handler<T>: Twitch + Send + Sync\n\nwhere\n\n T: MessageConsumer,\n\n{\n\n fn get(&self, name: String) -> Option<Arc<T>>;\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 39, "score": 42310.58373950896 }, { "content": "fn is_recipent_name(c: char) -> bool {\n\n c.is_ascii_alphanumeric() || c == '_'\n\n}\n\n\n", "file_path": "src/voicemail/parser.rs", "rank": 40, "score": 41232.87506872355 }, { "content": "fn to_chrono_duration(d: Duration) -> chrono::Duration {\n\n match chrono::Duration::from_std(d) {\n\n Ok(d) => d,\n\n Err(_) => {\n\n if d.as_secs() > 0 {\n\n chrono::Duration::max_value()\n\n } else {\n\n chrono::Duration::min_value()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/voicemail/parser.rs", "rank": 41, "score": 38972.89422902651 }, { "content": "use crate::{\n\n context::BotContext,\n\n database::User,\n\n helpers::prettify_bool,\n\n manpages::{ChapterName, Manpage, ManpageProducer},\n\n message::{Message, MessageConsumer, MessageResult},\n\n};\n\nuse anyhow::Result;\n\nuse std::{fmt, sync::Arc};\n\n\n\npub type CommandFunction = Box<\n\n dyn Fn(Arc<BotContext>, Vec<String>, Message, &User) -> Result<MessageResult>\n\n + Send\n\n + Sync\n\n + 'static,\n\n>;\n\n\n\n// I want trait aliases PepeHands\n\n// pub type CommandFunctionImpl =\n\n// impl Fn(Vec<String>, Message, &User) -> Result + Send + Sync + 'static;\n", "file_path": "src/commands/command.rs", "rank": 42, "score": 37815.453414616866 }, { "content": " self.description = Some(text);\n\n self\n\n }\n\n\n\n pub fn example(mut self, text: &'static str) -> Self {\n\n self.example = Some(text);\n\n self\n\n }\n\n\n\n pub fn command(\n\n mut self,\n\n f: impl Fn(Arc<BotContext>, Vec<String>, Message, &User) -> Result<MessageResult>\n\n + Send\n\n + Sync\n\n + 'static,\n\n ) -> Self {\n\n self.command = Some(Box::new(f));\n\n self\n\n }\n\n\n\n pub fn done(self) -> Arc<Command> {\n\n Arc::new(Command { ..self.into() })\n\n }\n\n}\n", "file_path": "src/commands/command.rs", "rank": 43, "score": 37814.64337065273 }, { "content": " if self.aliases.is_some() {\n\n warn!(\n\n \"alias is used to everwrite the current alias (command: {})\",\n\n self.name.unwrap_or(\"unnamed\")\n\n )\n\n }\n\n\n\n self.aliases = Some(vec![a]);\n\n self\n\n }\n\n\n\n pub fn aliases(mut self, a: Vec<&'static str>) -> Self {\n\n if self.aliases.is_some() {\n\n warn!(\n\n \"aliases is used to everwrite the current alias (command: {})\",\n\n self.name.unwrap_or(\"unnamed\")\n\n )\n\n }\n\n\n\n self.aliases = Some(a);\n", "file_path": "src/commands/command.rs", "rank": 44, "score": 37814.48858471132 }, { "content": " example: self.example,\n\n command: self.command.unwrap(),\n\n }\n\n }\n\n}\n\n\n\n/// Builder functions\n\nimpl CommandBuilder {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn with_name(name: &'static str) -> Self {\n\n Self {\n\n name: Some(name),\n\n ..Self::new()\n\n }\n\n }\n\n\n\n pub fn alias(mut self, a: &'static str) -> Self {\n", "file_path": "src/commands/command.rs", "rank": 45, "score": 37813.2273790281 }, { "content": " aliases: Option<Vec<&'static str>>,\n\n chainable: Option<bool>,\n\n whitelisted: Option<bool>,\n\n about: Option<&'static str>,\n\n description: Option<&'static str>,\n\n example: Option<&'static str>,\n\n command: Option<CommandFunction>,\n\n}\n\n\n\nimpl Into<Command> for CommandBuilder {\n\n fn into(self) -> Command {\n\n Command {\n\n name: self\n\n .name\n\n .unwrap_or_else(|| panic!(\"Missing name for command\")),\n\n aliases: self.aliases.unwrap_or_default(),\n\n chainable: self.chainable.unwrap_or(false),\n\n whitelisted: self.whitelisted.unwrap_or(false),\n\n about: self.about.unwrap_or(\"about missing\"),\n\n description: self.description.unwrap_or(\"description missing\"),\n", "file_path": "src/commands/command.rs", "rank": 46, "score": 37812.07083385338 }, { "content": "\n\npub struct Command {\n\n name: &'static str,\n\n aliases: Vec<&'static str>,\n\n chainable: bool,\n\n whitelisted: bool,\n\n about: &'static str,\n\n description: &'static str,\n\n example: Option<&'static str>,\n\n command: CommandFunction,\n\n}\n\n\n\nimpl Command {\n\n pub fn aliases(&self) -> Vec<&str> {\n\n self.aliases.clone()\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn chainable(&self) -> bool {\n\n self.chainable\n", "file_path": "src/commands/command.rs", "rank": 47, "score": 37811.76541940077 }, { "content": " &self,\n\n context: Arc<BotContext>,\n\n args: Vec<String>,\n\n msg: Message,\n\n user: &User,\n\n ) -> Result<MessageResult> {\n\n info!(\"Executing command {} with args {:?}\", self.name, args);\n\n (self.command)(context, args, msg, &user)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Command {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Action\")\n\n .field(\"name\", &self.name)\n\n .field(\"aliases\", &self.aliases)\n\n .field(\"chainable\", &self.chainable)\n\n .field(\"whitelisted\", &self.whitelisted)\n\n .field(\"about\", &self.about)\n\n .field(\"description\", &self.description)\n", "file_path": "src/commands/command.rs", "rank": 48, "score": 37810.45421115291 }, { "content": " }\n\n}\n\n\n\n/// Shadow constructor for `CommandBuilder`\n\nimpl Command {\n\n pub fn with_name(name: &'static str) -> CommandBuilder {\n\n CommandBuilder::with_name(name)\n\n }\n\n}\n\n\n\nimpl MessageConsumer for Command {\n\n fn name(&self) -> &str {\n\n self.name\n\n }\n\n\n\n fn whitelisted(&self) -> bool {\n\n self.whitelisted\n\n }\n\n\n\n fn consume(\n", "file_path": "src/commands/command.rs", "rank": 49, "score": 37809.622420127205 }, { "content": " (\n\n String::from(\"whitelisted\"),\n\n prettify_bool(self.whitelisted).to_owned(),\n\n ),\n\n ];\n\n\n\n Manpage::new(\n\n names,\n\n ChapterName::Command,\n\n self.about.to_owned(),\n\n self.description.to_owned(),\n\n self.example.map(|s| s.to_owned()),\n\n characteristics,\n\n )\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct CommandBuilder {\n\n name: Option<&'static str>,\n", "file_path": "src/commands/command.rs", "rank": 50, "score": 37808.64023894465 }, { "content": " self\n\n }\n\n\n\n pub fn chainable(mut self) -> Self {\n\n self.chainable = Some(true);\n\n self\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn whitelisted(mut self) -> Self {\n\n self.whitelisted = Some(true);\n\n self\n\n }\n\n\n\n pub fn about(mut self, text: &'static str) -> Self {\n\n self.about = Some(text);\n\n self\n\n }\n\n\n\n pub fn description(mut self, text: &'static str) -> Self {\n", "file_path": "src/commands/command.rs", "rank": 51, "score": 37807.82459724775 }, { "content": " .field(\"example\", &self.example)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl ManpageProducer for Command {\n\n fn get_manpage(&self) -> Manpage {\n\n let mut names = self.aliases.clone();\n\n names.insert(0, self.name);\n\n\n\n let names = names\n\n .iter()\n\n .map(|x| (*x).to_string())\n\n .collect::<Vec<String>>();\n\n\n\n let characteristics = vec![\n\n (\n\n String::from(\"chainable\"),\n\n prettify_bool(self.chainable).to_owned(),\n\n ),\n", "file_path": "src/commands/command.rs", "rank": 52, "score": 37802.96530803949 }, { "content": "pub use crate::{\n\n commands::command::Command,\n\n context::BotContext,\n\n database,\n\n helpers::{truncate_duration, Permission},\n\n message::{Message, MessageResult},\n\n};\n\npub use anyhow::{bail, ensure, Context, Result};\n\npub use std::sync::Arc;\n\npub use twitchchat::messages::Privmsg;\n", "file_path": "src/commands/commands/prelude.rs", "rank": 55, "score": 36407.8416098493 }, { "content": "use super::prelude::*;\n\n\n\nuse crate::database::{Quote, User};\n\nuse crate::helpers::Permission;\n\nuse regex::Regex;\n\n\n\nlazy_static! {\n\n static ref RE: Regex = {\n\n trace!(\"Compiling regex for `quote`\");\n\n Regex::new(r#\"^\"(.*)\"( - )?(.*) ([0-9:\\-\\.]+)$\"#).unwrap()\n\n };\n\n}\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 58, "score": 36403.08904508412 }, { "content": "use super::prelude::*;\n\nuse crate::voicemail::Voicemail;\n\nuse chrono::prelude::*;\n\nuse humantime::format_duration;\n\n\n\nlazy_static! {\n\n static ref SEPERATORS: Vec<&'static str> = vec![\"&&\", \"and\", \"und\"];\n\n}\n\n\n", "file_path": "src/commands/commands/voicemail.rs", "rank": 60, "score": 36401.424102661425 }, { "content": " Ok(stats)\n\n }\n\n\n\n fn font(name: &str) -> MessageResult {\n\n MessageResult::Message(String::from(\"Not implemented\"))\n\n }\n\n\n\n fn total() -> MessageResult {\n\n let stats = async {\n\n stats(uri).await;\n\n };\n\n MessageResult::Message(format!(\"Total views: {}\", stats.totalViews))\n\n }\n\n\n\n Command::with_name(\"font\")\n\n .command(move |_context, args, msg| match args.get(0) {\n\n Some(name) => font(name),\n\n None => total(),\n\n })\n\n .done()\n\n}\n", "file_path": "src/commands/commands/font.rs", "rank": 61, "score": 36400.79215382507 }, { "content": "use crate::commands::command::Command;\n\nuse std::sync::Arc;\n\n\n\nmod prelude;\n\n\n\nmod admin;\n\nmod color;\n\nmod lastseen;\n\nmod man;\n\nmod math;\n\nmod ping;\n\nmod quote;\n\nmod system;\n\nmod test;\n\nmod time;\n\nmod version;\n\nmod voicemail;\n\n\n", "file_path": "src/commands/commands/mod.rs", "rank": 63, "score": 36399.96675997724 }, { "content": "use super::prelude::*;\n\nuse chrono::prelude::*;\n\nuse chrono_tz::Tz;\n\nuse thiserror::Error;\n\n\n\n#[derive(Debug, Error)]\n\npub enum TimeError {\n\n #[error(\"Unkown Offset: {0}\")]\n\n UnknownOffset(String),\n\n}\n\n\n", "file_path": "src/commands/commands/time.rs", "rank": 64, "score": 36399.79978904397 }, { "content": " .context(\"Could not get channel from database\")?\n\n .context(\"Channel is not in database\")?;\n\n\n\n let bot_name = context.bot_name();\n\n voicemail.recipients.retain(|x| x != &bot_name);\n\n\n\n let now = Utc::now().naive_utc();\n\n let voicemails =\n\n database::Voicemail::new(conn, &voicemail, user_id as i64, channel.id, now)\n\n .context(\"Could not insert voicemail(s) to database\")?;\n\n if voicemail.schedule.is_none() {\n\n Ok(MessageResult::Message(format!(\n\n \"I'll send that message to {} when they next type in chat.\",\n\n voicemail.recipients.join(\", \")\n\n )))\n\n } else {\n\n // actually schedule voicemail\n\n\n\n for voicemail in voicemails {\n\n context.scheduler().schedule(voicemail).unwrap();\n", "file_path": "src/commands/commands/voicemail.rs", "rank": 65, "score": 36398.44181619051 }, { "content": "use super::prelude::*;\n\nuse bytes::buf::BufExt as _;\n\nuse bytes::Buf;\n\nuse hyper::Client;\n\nuse hyper::Uri;\n\nuse serde_json::Value;\n\nuse simple_error::SimpleError;\n\nuse std::error::Error;\n\n\n", "file_path": "src/commands/commands/font.rs", "rank": 66, "score": 36397.922806574825 }, { "content": "use super::prelude::*;\n\nuse chrono::prelude::*;\n\nuse humantime::format_duration;\n\nuse std::time::Duration;\n\n\n", "file_path": "src/commands/commands/ping.rs", "rank": 67, "score": 36397.814517579995 }, { "content": "use super::prelude::*;\n\nuse crate::database::User;\n\nuse chrono::prelude::*;\n\n\n", "file_path": "src/commands/commands/lastseen.rs", "rank": 68, "score": 36397.74519188465 }, { "content": "use super::prelude::*;\n\nuse crate::database::Channel;\n\n\n\nuse futures_executor::block_on;\n\n\n", "file_path": "src/commands/commands/admin.rs", "rank": 69, "score": 36397.70516233171 }, { "content": "use super::prelude::*;\n\nuse procinfo::pid::statm_self;\n\nuse systemstat::{ByteSize, Platform, System};\n\n\n", "file_path": "src/commands/commands/system.rs", "rank": 70, "score": 36397.58942895032 }, { "content": " .about(\"Get help about a command\")\n\n .description(\"\n\nProvides manuals to every command or action. The manuals are split into different chapters.\n\nChapter `action` contains Actions such as the `voicemail (1)` action that allows replaying of messages.\n\nChapter `command` contains Commands like this `man (2)` command or the `voicemail (2)` command.\n\n\n\nThis command gives a short overview over a command. The full manual is available on the web.\n\n\n\nUSAGE: man [CHAPTER] PAGE\n\n\")\n\n.example(\"\n\n```\n\n< man action voicemail\n\n> voicemail - redeems voicemails created with tell https://crate.chronophylos.com/action/voicemail\n\n```\n\n\")\n\n .done()\n\n}\n", "file_path": "src/commands/commands/man.rs", "rank": 71, "score": 36397.54707586636 }, { "content": "use super::prelude::*;\n\n\n", "file_path": "src/commands/commands/version.rs", "rank": 72, "score": 36397.167842866096 }, { "content": "use super::prelude::*;\n\n\n", "file_path": "src/commands/commands/color.rs", "rank": 73, "score": 36397.167842866096 }, { "content": "use super::prelude::*;\n\n\n", "file_path": "src/commands/commands/test.rs", "rank": 74, "score": 36397.167842866096 }, { "content": "use super::prelude::*;\n\n\n", "file_path": "src/commands/commands/man.rs", "rank": 75, "score": 36397.167842866096 }, { "content": " }\n\n })\n\n .about(\"Various commands to manage the bot.\")\n\n .description(\n\n r#\"\n\nNOTE: This is a owner only command!\n\n\n\n=== USAGE\n\n\n\n```\n\nadmin SUBCOMMAND\n\n```\n\n\n\n.SUBCOMMAND\n\n* `stop` -- stop the bot\n\n* `leave CHANNEL` -- leave a channel\n\n* `join CHANNEL` -- join a channel\n\n\"#,\n\n )\n\n .done()\n\n}\n\n\n", "file_path": "src/commands/commands/admin.rs", "rank": 76, "score": 36396.84720133177 }, { "content": " }\n\n\n\n Ok(MessageResult::Message(format!(\n\n \"I'll send that message to {} in {}\",\n\n voicemail.recipients.join(\", \"),\n\n format_duration(\n\n voicemail\n\n .schedule\n\n .unwrap()\n\n .signed_duration_since(now)\n\n .to_std()\n\n .unwrap_or_default()\n\n ),\n\n )))\n\n }\n\n })\n\n .about(\"Send messages to other users or yourself\")\n\n .description(\n\n \"\n\n=== USAGE\n", "file_path": "src/commands/commands/voicemail.rs", "rank": 77, "score": 36396.754188958075 }, { "content": " debug!(\"Permission not high enough\");\n\n return Ok(MessageResult::None);\n\n }\n\n\n\n // parse str to i32\n\n let qid: i32 = qid.parse()?;\n\n\n\n let conn = &context.conn();\n\n\n\n // query quote\n\n let quote = Quote::by_id(conn, qid)?;\n\n\n\n let quote = match quote {\n\n Some(q) => q,\n\n None => {\n\n return Ok(MessageResult::Message(format!(\n\n \"No quote with id {} found\",\n\n qid\n\n )))\n\n }\n", "file_path": "src/commands/commands/quote.rs", "rank": 78, "score": 36395.43724090172 }, { "content": " return Ok(MessageResult::Message(String::from(\n\n \"You do not have permissions for this quote\",\n\n )));\n\n }\n\n\n\n let msg = args.join(\" \");\n\n let (message, author, authored) = match parse_quote(&msg) {\n\n Ok(t) => t,\n\n Err(err) => return Ok(MessageResult::Error(err.to_string())),\n\n };\n\n\n\n let quote = quote.update(conn, author, authored, message)?;\n\n\n\n Ok(MessageResult::Message(format!(\n\n \"Updated quote {}\",\n\n quote.id\n\n )))\n\n}\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 79, "score": 36395.43716975573 }, { "content": " // parse str to i32\n\n let qid: i32 = qid.parse()?;\n\n\n\n let conn = &context.conn();\n\n\n\n // query quote\n\n let quote = Quote::by_id(conn, qid)?;\n\n\n\n let quote = match quote {\n\n Some(q) => q,\n\n None => {\n\n return Ok(MessageResult::Message(format!(\n\n \"No quote with id {} found\",\n\n qid\n\n )))\n\n }\n\n };\n\n\n\n // check permissions\n\n if quote.creator_id != user.id && permission != Permission::Owner {\n", "file_path": "src/commands/commands/quote.rs", "rank": 80, "score": 36395.38625067251 }, { "content": " \"HMT\" => FixedOffset::east(5 * hour), // Heard and McDonald Islands Time\n\n \"HOVST\" => FixedOffset::east(8 * hour), // Hovd Summer Time (not used from 2017-present)\n\n \"HOVT\" => FixedOffset::east(7 * hour), // Hovd Time\n\n \"ICT\" => FixedOffset::east(7 * hour), // Indochina Time\n\n \"IDLW\" => FixedOffset::west(12 * hour), // International Day Line West time zone\n\n \"IDT\" => FixedOffset::east(3 * hour), // Israel Daylight Time\n\n \"IOT\" => FixedOffset::east(3 * hour), // Indian Ocean Time\n\n \"IRDT\" => FixedOffset::east(4 * hour + 30 * minute), // Iran Daylight Time\n\n \"IRKT\" => FixedOffset::east(8 * hour), // Irkutsk Time\n\n \"IRST\" => FixedOffset::east(3 * hour + 30 * minute), // Iran Standard Time\n\n \"IST\" => FixedOffset::east(5 * hour + 30 * minute), // Indian Standard Time\n\n //\"IST\" => FixedOffset::east(1 * hour), // Irish Standard Time\n\n //\"IST\" => FixedOffset::east(2 * hour), // Israel Standard Time\n\n \"JST\" => FixedOffset::east(9 * hour), // Japan Standard Time\n\n \"KALT\" => FixedOffset::east(2 * hour), // Kaliningrad Time\n\n \"KGT\" => FixedOffset::east(6 * hour), // Kyrgyzstan Time\n\n \"KOST\" => FixedOffset::east(11 * hour), // Kosrae Time\n\n \"KRAT\" => FixedOffset::east(7 * hour), // Krasnoyarsk Time\n\n \"KST\" => FixedOffset::east(9 * hour), // Korea Standard Time\n\n \"LHST\" => FixedOffset::east(10 * hour + 30 * minute), // Lord Howe Standard Time\n", "file_path": "src/commands/commands/time.rs", "rank": 81, "score": 36394.20017590726 }, { "content": "\n\n```\n\ntell RECIPIENTS [SCHEDULE] MESSAGE\n\n```\n\n\n\n==== RECIPIENTS\n\n\n\nA separated list of recipients.\n\nValid separators are `and`, `und`, `&&` and `,`.\n\n\n\n.Example:\n\n nymn and pajlada\n\n\n\n==== SCHEDLUE\n\n\n\nTo schedule a voicemail you need a marker and a value.\n\n\n\n[cols=3*,options=header]\n\n|===\n\n| type\n", "file_path": "src/commands/commands/voicemail.rs", "rank": 82, "score": 36393.45031924608 }, { "content": " };\n\n\n\n // check permissions\n\n if quote.creator_id != user.id && permission != Permission::Owner {\n\n return Ok(MessageResult::Message(String::from(\n\n \"You do not have permissions for this quote\",\n\n )));\n\n }\n\n\n\n quote.remove(conn)?;\n\n\n\n Ok(MessageResult::Message(format!(\n\n \"Removed quote with id {}\",\n\n qid\n\n )))\n\n}\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 83, "score": 36393.45031924608 }, { "content": "| markers\n\n| value\n\n\n\n| relative\n\n| `in`\n\n| A number and a unit, like `20 minutes` or `1 day`. You can even combine values: `1 day 12 hours`\n\n\n\n| absolute\n\n| `on`, `at`\n\n| A https://tools.ietf.org/html/rfc3339[RFC3339] Date-Time formatted string.\n\nRFC2822 and keywords like `noon` are wip.\n\n|===\n\n\n\n.Example:\n\n in 20 minutes 2 hours\n\n at 2020-02-20 20:20\n\n\",\n\n )\n\n .done()\n\n}\n", "file_path": "src/commands/commands/voicemail.rs", "rank": 84, "score": 36393.45031924608 }, { "content": "\n\nSUBCOMMANDS:\n\n add - add a new quote\n\n remove - remove a quote\n\n edit - edit a quote you made\n\n show - show a quote you made\n\n random - show a random quote (Not Implemented)\n\n\n\nNOTES:\n\n You need friend permissions to add quotes.\n\n\n\n When adding a quote you have to follow a specific format:\n\n ```\n\n \\\"<message>\\\" - <author> <date>\n\n ```\n\n\",\n\n )\n\n .done()\n\n}\n\n\n", "file_path": "src/commands/commands/quote.rs", "rank": 85, "score": 36393.45031924608 }, { "content": " user.display_name_or_name(),\n\n humantime::format_duration(truncate_duration(duration)),\n\n )))\n\n }\n\n None => Ok(MessageResult::Message(\n\n \"I have never seen this user before\".into(),\n\n )),\n\n }\n\n })\n\n .about(\"Check when a user was last seen\")\n\n .done()\n\n}\n", "file_path": "src/commands/commands/lastseen.rs", "rank": 86, "score": 36393.45031924608 }, { "content": " \"THA\" => FixedOffset::east(7 * hour), // Thailand Standard Time\n\n \"TFT\" => FixedOffset::east(5 * hour), // French Southern and Antarctic Time\n\n \"TJT\" => FixedOffset::east(5 * hour), // Tajikistan Time\n\n \"TKT\" => FixedOffset::east(13 * hour), // Tokelau Time\n\n \"TLT\" => FixedOffset::east(9 * hour), // Timor Leste Time\n\n \"TMT\" => FixedOffset::east(5 * hour), // Turkmenistan Time\n\n \"TRT\" => FixedOffset::east(3 * hour), // Turkey Time\n\n \"TOT\" => FixedOffset::east(13 * hour), // Tonga Time\n\n \"TVT\" => FixedOffset::east(12 * hour), // Tuvalu Time\n\n \"ULAST\" => FixedOffset::east(9 * hour), // Ulaanbaatar Summer Time\n\n \"ULAT\" => FixedOffset::east(8 * hour), // Ulaanbaatar Standard Time\n\n \"UTC\" => FixedOffset::east(0), // Coordinated Universal Time\n\n \"UYST\" => FixedOffset::west(2 * hour), // Uruguay Summer Time\n\n \"UYT\" => FixedOffset::west(3 * hour), // Uruguay Standard Time\n\n \"UZT\" => FixedOffset::east(5 * hour), // Uzbekistan Time\n\n \"VET\" => FixedOffset::west(4 * hour), // Venezuelan Standard Time\n\n \"VLAT\" => FixedOffset::east(10 * hour), // Vladivostok Time\n\n \"VOLT\" => FixedOffset::east(4 * hour), // Volgograd Time\n\n \"VOST\" => FixedOffset::east(6 * hour), // Vostok Station Time\n\n \"VUT\" => FixedOffset::east(11 * hour), // Vanuatu Time\n", "file_path": "src/commands/commands/time.rs", "rank": 87, "score": 36393.45031924608 }, { "content": " \"ANAT\" => FixedOffset::east(12 * hour), // Anadyr Time\n\n \"AQTT\" => FixedOffset::east(5 * hour), // Aqtobe Time\n\n \"ART\" => FixedOffset::west(3 * hour), // Argentina Time\n\n //\"AST\" => FixedOffset::east(3 * hour), // Arabia Standard Time\n\n \"KSA\" => FixedOffset::east(3 * hour), // Arabia Standard Time\n\n \"AST\" => FixedOffset::west(4 * hour), // Atlantic Standard Time\n\n \"AWST\" => FixedOffset::east(8 * hour), // Australian Western Standard Time\n\n \"AZOST\" => FixedOffset::east(0), // Azores Summer Time\n\n \"AZOT\" => FixedOffset::west(1 * hour), // Azores Standard Time\n\n \"AZT\" => FixedOffset::east(4 * hour), // Azerbaijan Time\n\n \"BDT\" => FixedOffset::east(8 * hour), // Brunei Time\n\n \"BIOT\" => FixedOffset::east(6 * hour), // British Indian Ocean Time\n\n \"BIT\" => FixedOffset::west(12 * hour), // Baker Island Time\n\n \"BOT\" => FixedOffset::west(4 * hour), // Bolivia Time\n\n \"BRST\" => FixedOffset::west(2 * hour), // Brasília Summer Time\n\n \"BRT\" => FixedOffset::west(3 * hour), // Brasília Time\n\n \"BST\" => FixedOffset::east(6 * hour), // Bangladesh Standard Time\n\n //\"BST\" => FixedOffset::east(11 * hour), // Bougainville Standard Time\n\n //\"BST\" => FixedOffset::east(1 * hour), // British Summer Time (British Standard Time from Feb 1968 to Oct 1971)\n\n \"BTT\" => FixedOffset::east(6 * hour), // Bhutan Time\n", "file_path": "src/commands/commands/time.rs", "rank": 88, "score": 36393.45031924608 }, { "content": " \"EST\" => FixedOffset::west(5 * hour), // Eastern Standard Time (North America)\n\n \"FET\" => FixedOffset::east(3 * hour), // Further-western European Time\n\n \"FJT\" => FixedOffset::east(12 * hour), // Fiji Time\n\n \"FKST\" => FixedOffset::west(3 * hour), // Falkland Islands Summer Time\n\n \"FKT\" => FixedOffset::west(4 * hour), // Falkland Islands Time\n\n \"FNT\" => FixedOffset::west(2 * hour), // Fernando de Noronha Time\n\n \"GALT\" => FixedOffset::west(6 * hour), // Galápagos Time\n\n \"GAMT\" => FixedOffset::west(9 * hour), // Gambier Islands Time\n\n \"GET\" => FixedOffset::east(4 * hour), // Georgia Standard Time\n\n \"GFT\" => FixedOffset::west(3 * hour), // French Guiana Time\n\n \"GILT\" => FixedOffset::east(12 * hour), // Gilbert Island Time\n\n \"GIT\" => FixedOffset::west(9 * hour), // Gambier Island Time\n\n \"GMT\" => FixedOffset::east(0), // Greenwich Mean Time\n\n //\"GST\" => FixedOffset::west(2 * hour), // South Georgia and the South Sandwich Islands Time\n\n \"GST\" => FixedOffset::east(4 * hour), // Gulf Standard Time\n\n \"GYT\" => FixedOffset::west(4 * hour), // Guyana Time\n\n \"HDT\" => FixedOffset::west(9 * hour), // Hawaii–Aleutian Daylight Time\n\n \"HAEC\" => FixedOffset::east(2 * hour), // Heure Avancée d'Europe Centrale French-language name for CEST\n\n \"HST\" => FixedOffset::west(10 * hour), // Hawaii–Aleutian Standard Time\n\n \"HKT\" => FixedOffset::east(8 * hour), // Hong Kong Time\n", "file_path": "src/commands/commands/time.rs", "rank": 89, "score": 36393.45031924608 }, { "content": " \"PST\" => FixedOffset::west(8 * hour), // Pacific Standard Time (North America)\n\n //\"PST\" => FixedOffset::east(8 * hour), // Philippine Standard Time\n\n \"PYST\" => FixedOffset::west(3 * hour), // Paraguay Summer Time\n\n \"PYT\" => FixedOffset::west(4 * hour), // Paraguay Time\n\n \"RET\" => FixedOffset::east(4 * hour), // Réunion Time\n\n \"ROTT\" => FixedOffset::west(3 * hour), // Rothera Research Station Time\n\n \"SAKT\" => FixedOffset::east(11 * hour), // Sakhalin Island Time\n\n \"SAMT\" => FixedOffset::east(4 * hour), // Samara Time\n\n \"SAST\" => FixedOffset::east(2 * hour), // South African Standard Time\n\n \"SBT\" => FixedOffset::east(11 * hour), // Solomon Islands Time\n\n \"SCT\" => FixedOffset::east(4 * hour), // Seychelles Time\n\n \"SDT\" => FixedOffset::west(10 * hour), // Samoa Daylight Time\n\n \"SGT\" => FixedOffset::east(8 * hour), // Singapore Time\n\n \"SLST\" => FixedOffset::east(5 * hour + 30 * minute), // Sri Lanka Standard Time\n\n \"SRET\" => FixedOffset::east(11 * hour), // Srednekolymsk Time\n\n \"SRT\" => FixedOffset::west(3 * hour), // Suriname Time\n\n //\"SST\" => FixedOffset::west(11 * hour), // Samoa Standard Time\n\n \"SST\" => FixedOffset::east(8 * hour), // Singapore Standard Time\n\n \"SYOT\" => FixedOffset::east(3 * hour), // Showa Station Time\n\n \"TAHT\" => FixedOffset::west(10 * hour), // Tahiti Time\n", "file_path": "src/commands/commands/time.rs", "rank": 90, "score": 36393.45031924608 }, { "content": " \"NFT\" => FixedOffset::east(11 * hour), // Norfolk Island Time\n\n \"NOVT\" => FixedOffset::east(7 * hour), // Novosibirsk Time\n\n \"NPT\" => FixedOffset::east(5 * hour + 45 * minute), // Nepal Time\n\n \"NST\" => FixedOffset::west(3 * hour + 30 * minute), // Newfoundland Standard Time\n\n \"NT\" => FixedOffset::west(3 * hour + 30 * minute), // Newfoundland Time\n\n \"NUT\" => FixedOffset::west(11 * hour), // Niue Time\n\n \"NZDT\" => FixedOffset::east(13 * hour), // New Zealand Daylight Time\n\n \"NZST\" => FixedOffset::east(12 * hour), // New Zealand Standard Time\n\n \"OMST\" => FixedOffset::east(6 * hour), // Omsk Time\n\n \"ORAT\" => FixedOffset::east(5 * hour), // Oral Time\n\n \"PDT\" => FixedOffset::west(7 * hour), // Pacific Daylight Time (North America)\n\n \"PET\" => FixedOffset::west(5 * hour), // Peru Time\n\n \"PETT\" => FixedOffset::east(12 * hour), // Kamchatka Time\n\n \"PGT\" => FixedOffset::east(10 * hour), // Papua New Guinea Time\n\n \"PHOT\" => FixedOffset::east(13 * hour), // Phoenix Island Time\n\n \"PHT\" => FixedOffset::east(8 * hour), // Philippine Time\n\n \"PKT\" => FixedOffset::east(5 * hour), // Pakistan Standard Time\n\n \"PMDT\" => FixedOffset::west(2 * hour), // Saint Pierre and Miquelon Daylight Time\n\n \"PMST\" => FixedOffset::west(3 * hour), // Saint Pierre and Miquelon Standard Time\n\n \"PONT\" => FixedOffset::east(11 * hour), // Pohnpei Standard Time\n", "file_path": "src/commands/commands/time.rs", "rank": 91, "score": 36393.45031924608 }, { "content": " \"WAKT\" => FixedOffset::east(12 * hour), // Wake Island Time\n\n \"WAST\" => FixedOffset::east(2 * hour), // West Africa Summer Time\n\n \"WAT\" => FixedOffset::east(1 * hour), // West Africa Time\n\n \"WEST\" => FixedOffset::east(1 * hour), // Western European Summer Time\n\n \"WET\" => FixedOffset::east(0), // Western European Time\n\n \"WIT\" => FixedOffset::east(7 * hour), // Western Indonesian Time\n\n \"WGST\" => FixedOffset::west(2 * hour), // West Greenland Summer Time\n\n \"WGT\" => FixedOffset::west(3 * hour), // West Greenland Time\n\n \"WST\" => FixedOffset::east(8 * hour), // Western Standard Time\n\n \"YAKT\" => FixedOffset::east(9 * hour), // Yakutsk Time\n\n \"YEKT\" => FixedOffset::east(5 * hour), // Yekaterinburg Time\n\n _ => bail!(TimeError::UnknownOffset(name.into())),\n\n };\n\n Ok(offset)\n\n}\n", "file_path": "src/commands/commands/time.rs", "rank": 92, "score": 36393.45031924608 }, { "content": " //\"LHST\" => FixedOffset::east(11 * hour), // Lord Howe Summer Time\n\n \"LINT\" => FixedOffset::east(14 * hour), // Line Islands Time\n\n \"MAGT\" => FixedOffset::east(12 * hour), // Magadan Time\n\n \"MART\" => FixedOffset::west(9 * hour + 30 * minute), // Marquesas Islands Time\n\n \"MAWT\" => FixedOffset::east(5 * hour), // Mawson Station Time\n\n \"MDT\" => FixedOffset::west(6 * hour), // Mountain Daylight Time (North America)\n\n \"MET\" => FixedOffset::east(1 * hour), // Middle European Time Same zone as CET\n\n \"MEST\" => FixedOffset::east(2 * hour), // Middle European Summer Time Same zone as CEST\n\n \"MHT\" => FixedOffset::east(12 * hour), // Marshall Islands Time\n\n \"MIST\" => FixedOffset::east(11 * hour), // Macquarie Island Station Time\n\n \"MIT\" => FixedOffset::west(9 * hour + 30 * minute), // Marquesas Islands Time\n\n \"MMT\" => FixedOffset::east(6 * hour + 30 * minute), // Myanmar Standard Time\n\n \"MSK\" => FixedOffset::east(3 * hour), // Moscow Time\n\n //\"MST\" => FixedOffset::east(8 * hour), // Malaysia Standard Time\n\n \"MST\" => FixedOffset::west(7 * hour), // Mountain Standard Time (North America)\n\n \"MUT\" => FixedOffset::east(4 * hour), // Mauritius Time\n\n \"MVT\" => FixedOffset::east(5 * hour), // Maldives Time\n\n \"MYT\" => FixedOffset::east(8 * hour), // Malaysia Time\n\n \"NCT\" => FixedOffset::east(11 * hour), // New Caledonia Time\n\n \"NDT\" => FixedOffset::west(2 * hour + 30 * minute), // Newfoundland Daylight Time\n", "file_path": "src/commands/commands/time.rs", "rank": 93, "score": 36393.45031924608 }, { "content": " //\"CST\" => FixedOffset::east(8 * hour), // China Standard Time\n\n //\"CST\" => FixedOffset::west(5 * hour), // Cuba Standard Time\n\n \"CT\" => FixedOffset::east(8 * hour), // China Time\n\n \"CVT\" => FixedOffset::west(1 * hour), // Cape Verde Time\n\n \"CWST\" => FixedOffset::east(8 * hour + 45 * minute), // Central Western Standard Time (Australia) unofficial\n\n \"CXT\" => FixedOffset::east(7 * hour), // Christmas Island Time\n\n \"DAVT\" => FixedOffset::east(7 * hour), // Davis Time\n\n \"DDUT\" => FixedOffset::east(10 * hour), // Dumont d'Urville Time\n\n \"DFT\" => FixedOffset::east(1 * hour), // AIX-specific equivalent of Central European Time\n\n \"EASST\" => FixedOffset::west(5 * hour), // Easter Island Summer Time\n\n \"EAST\" => FixedOffset::west(6 * hour), // Easter Island Standard Time\n\n \"EAT\" => FixedOffset::east(3 * hour), // East Africa Time\n\n \"ECT\" => FixedOffset::west(4 * hour), // Eastern Caribbean Time (does not recognise DST)\n\n //\"ECT\" => FixedOffset::west(5 * hour), // Ecuador Time\n\n \"EDT\" => FixedOffset::west(4 * hour), // Eastern Daylight Time (North America)\n\n \"EEST\" => FixedOffset::east(3 * hour), // Eastern European Summer Time\n\n \"EET\" => FixedOffset::east(2 * hour), // Eastern European Time\n\n \"EGST\" => FixedOffset::east(0), // Eastern Greenland Summer Time\n\n \"EGT\" => FixedOffset::west(1 * hour), // Eastern Greenland Time\n\n \"EIT\" => FixedOffset::east(9 * hour), // Eastern Indonesian Time\n", "file_path": "src/commands/commands/time.rs", "rank": 94, "score": 36393.45031924608 }, { "content": " \"CAT\" => FixedOffset::east(2 * hour), // Central Africa Time\n\n \"CCT\" => FixedOffset::east(6 * hour + 30 * minute), // Cocos Islands Time\n\n \"CDT\" => FixedOffset::west(5 * hour), // Central Daylight Time (North America)\n\n //\"CDT\" => FixedOffset::west(4 * hour), // Cuba Daylight Time\n\n \"CEST\" => FixedOffset::east(2 * hour), // Central European Summer Time (Cf HAEC)\n\n \"CET\" => FixedOffset::east(1 * hour), // Central European Time\n\n \"CHADT\" => FixedOffset::east(13 * hour + 45 * minute), // Chatham Daylight Time\n\n \"CHAST\" => FixedOffset::east(12 * hour + 45 * minute), // Chatham Standard Time\n\n \"CHOT\" => FixedOffset::east(8 * hour), // Choibalsan Standard Time\n\n \"CHOST\" => FixedOffset::east(9 * hour), // Choibalsan Summer Time\n\n \"CHST\" => FixedOffset::east(10 * hour), // Chamorro Standard Time\n\n \"CHUT\" => FixedOffset::east(10 * hour), // Chuuk Time\n\n \"CIST\" => FixedOffset::west(8 * hour), // Clipperton Island Standard Time\n\n \"CIT\" => FixedOffset::east(8 * hour), // Central Indonesia Time\n\n \"CKT\" => FixedOffset::west(10 * hour), // Cook Island Time\n\n \"CLST\" => FixedOffset::west(3 * hour), // Chile Summer Time\n\n \"CLT\" => FixedOffset::west(4 * hour), // Chile Standard Time\n\n \"COST\" => FixedOffset::west(4 * hour), // Colombia Summer Time\n\n \"COT\" => FixedOffset::west(5 * hour), // Colombia Time\n\n \"CST\" => FixedOffset::west(6 * hour), // Central Standard Time (North America)\n", "file_path": "src/commands/commands/time.rs", "rank": 95, "score": 36393.45031924608 }, { "content": " return Ok(MessageResult::Error(\"Could not parse zone\".into()))\n\n }\n\n };\n\n format!(\n\n \"Current Time: {}\",\n\n now.with_timezone(&zone)\n\n .to_rfc3339_opts(SecondsFormat::Secs, false)\n\n )\n\n }\n\n }))\n\n }\n\n })\n\n .about(\"Get the current time\")\n\n .done()\n\n}\n\n\n", "file_path": "src/commands/commands/time.rs", "rank": 96, "score": 36393.45031924608 }, { "content": "/// century = \"century\" [\"s\"]\n\nfn parse_century<'a>(i: &'a str) -> IResult<&'a str, Units> {\n\n let (i, _) = alt((tag_no_case(\"centurys\"), tag_no_case(\"century\")))(i)?;\n\n\n\n Ok((i, Units::Century))\n\n}\n\n\n", "file_path": "src/voicemail/parser.rs", "rank": 97, "score": 34545.304279195356 }, { "content": "/// decade = \"decade\" [\"s\"]\n\nfn parse_decade<'a>(i: &'a str) -> IResult<&'a str, Units> {\n\n let (i, _) = alt((tag_no_case(\"decades\"), tag_no_case(\"decade\")))(i)?;\n\n\n\n Ok((i, Units::Decade))\n\n}\n\n\n", "file_path": "src/voicemail/parser.rs", "rank": 98, "score": 34545.304279195356 }, { "content": "#![warn(clippy::result_unwrap_used)]\n\n#![warn(clippy::option_unwrap_used)]\n\n#![warn(clippy::dbg_macro)]\n\n#![warn(clippy::todo)]\n\n\n\n#[macro_use]\n\nextern crate diesel;\n\n#[macro_use]\n\nextern crate log;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\npub mod actions;\n\npub mod commands;\n\npub mod context;\n\npub mod database;\n\npub mod handler;\n\npub mod helpers;\n\npub mod manpages;\n\npub mod message;\n", "file_path": "src/lib.rs", "rank": 99, "score": 14.871621680957558 } ]
Rust
src/layout/gpos1.rs
daltonmaag/fonttools-rs
f6bbfa93cfb0b963432b87b30e4f5dfdfe9db923
use crate::layout::coverage::Coverage; use crate::layout::valuerecord::{coerce_to_same_format, ValueRecord, ValueRecordFlags}; use crate::utils::is_all_the_same; use otspec::types::*; use otspec::Serialize; use otspec::{DeserializationError, Deserialize, Deserializer, ReaderContext, SerializationError}; use otspec_macros::Serialize; use std::collections::BTreeMap; #[derive(Debug, PartialEq, Clone, Serialize)] #[allow(missing_docs, non_snake_case, non_camel_case_types)] pub struct SinglePosFormat1 { #[serde(offset_base)] pub posFormat: uint16, pub coverage: Offset16<Coverage>, pub valueFormat: ValueRecordFlags, pub valueRecord: ValueRecord, } #[derive(Debug, PartialEq, Clone, Serialize)] #[allow(missing_docs, non_snake_case, non_camel_case_types)] pub struct SinglePosFormat2 { #[serde(offset_base)] pub posFormat: uint16, pub coverage: Offset16<Coverage>, pub valueFormat: ValueRecordFlags, #[serde(with = "Counted")] pub valueRecords: Vec<ValueRecord>, } #[derive(Debug, Clone, PartialEq)] pub enum SinglePosInternal { Format1(SinglePosFormat1), Format2(SinglePosFormat2), } impl Serialize for SinglePosInternal { fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> { match self { SinglePosInternal::Format1(s) => s.to_bytes(data), SinglePosInternal::Format2(s) => s.to_bytes(data), } } } #[derive(Debug, PartialEq, Clone)] pub struct SinglePos { pub mapping: BTreeMap<uint16, ValueRecord>, } impl Deserialize for SinglePos { fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError> { let mut mapping = BTreeMap::new(); let format: uint16 = c.de()?; let coverage: Offset16<Coverage> = c.de()?; let value_format: ValueRecordFlags = c.de()?; match format { 1 => { let mut vr: ValueRecord = ValueRecord::from_bytes(c, value_format)?; vr.simplify(); for glyph_id in &coverage.as_ref().unwrap().glyphs { mapping.insert(*glyph_id, vr); } } 2 => { let _count: uint16 = c.de()?; for glyph_id in coverage.as_ref().unwrap().glyphs.iter() { let mut vr: ValueRecord = ValueRecord::from_bytes(c, value_format)?; vr.simplify(); mapping.insert(*glyph_id, vr); } } _ => panic!("Bad single pos format {:?}", format), } Ok(SinglePos { mapping }) } } impl From<&SinglePos> for SinglePosInternal { fn from(val: &SinglePos) -> Self { let mut mapping = val.mapping.clone(); for (_, val) in mapping.iter_mut() { (*val).simplify() } let coverage = Coverage { glyphs: mapping.keys().copied().collect(), }; if is_all_the_same(mapping.values()) { let vr = mapping.values().next().unwrap(); SinglePosInternal::Format1(SinglePosFormat1 { posFormat: 1, coverage: Offset16::to(coverage), valueFormat: vr.flags(), valueRecord: *vr, }) } else { let vrs: Vec<ValueRecord> = mapping.values().copied().collect(); let vrs = coerce_to_same_format(vrs); SinglePosInternal::Format2(SinglePosFormat2 { posFormat: 2, coverage: Offset16::to(coverage), valueFormat: vrs[0].flags(), valueRecords: vrs, }) } } } impl Serialize for SinglePos { fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> { let ssi: SinglePosInternal = self.into(); ssi.to_bytes(data) } } #[cfg(test)] mod tests { use super::*; use crate::{btreemap, valuerecord}; use std::iter::FromIterator; #[test] fn test_single_pos_1_1_serde() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10)), }; let binary_pos = vec![ 0x00, 0x01, 0x00, 0x08, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x01, 0x00, 0x01, 0x00, 66, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!(de, pos); } #[test] fn test_single_pos_1_1_serde2() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xAdvance=10, yPlacement=0), ), }; let binary_pos = vec![ 0x00, 0x01, 0x00, 0x08, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x01, 0x00, 0x02, 0x00, 66, 0x00, 67, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!( de, SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xAdvance=10), ), } ); } #[test] fn test_single_pos_1_2_serde() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xAdvance=-20), ), }; let binary_pos = vec![ 0x00, 0x02, 0x00, 0x0c, 0x00, 0x04, 0x00, 0x02, 0x00, 0x0a, 0xff, 0xec, 0x00, 0x01, 0x00, 0x02, 0x00, 66, 0x00, 67, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!(de, pos); } #[test] fn test_single_pos_1_2_serde2() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xPlacement=-20), ), }; let binary_pos = vec![ 0x00, 0x02, 0x00, 0x10, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0a, 0xff, 0xec, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 66, 0x00, 67, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!(de, pos); } }
use crate::layout::coverage::Coverage; use crate::layout::valuerecord::{coerce_to_same_format, ValueRecord, ValueRecordFlags}; use crate::utils::is_all_the_same; use otspec::types::*; use otspec::Serialize; use otspec::{DeserializationError, Deserialize, Deserializer, ReaderContext, SerializationError}; use otspec_macros::Serialize; use std::collections::BTreeMap; #[derive(Debug, PartialEq, Clone, Serialize)] #[allow(missing_docs, non_snake_case, non_camel_case_types)] pub struct SinglePosFormat1 { #[serde(offset_base)] pub posFormat: uint16, pub coverage: Offset16<Coverage>, pub valueFormat: ValueRecordFlags, pub valueRecord: ValueRecord, } #[derive(Debug, PartialEq, Clone, Serialize)] #[allow(missing_docs, non_snake_case, non_camel_case_types)] pub struct SinglePosFormat2 { #[serde(offset_base)] pub posFormat: uint16, pub coverage: Offset16<Coverage>, pub valueFormat: ValueRecordFlags, #[serde(with = "Counted")] pub valueRecords: Vec<ValueRecord>, } #[derive(Debug, Clone, PartialEq)] pub enum SinglePosInternal { Format1(SinglePosFormat1), Format2(SinglePosFormat2), } impl Serialize for SinglePosInternal { fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> { match self { SinglePosInternal::Format1(s) => s.to_bytes(data), SinglePosInternal::Format2(s) => s.to_bytes(data), } } } #[derive(Debug, PartialEq, Clone)] pub struct SinglePos { pub mapping: BTreeMap<uint16, ValueRecord>, } impl Deserialize for SinglePos { fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError> { let mut mapping = BTreeMap::new(); let format: uint16 = c.de()?; let coverage: Offset16<Coverage> = c.de()?; let value_format: ValueRecordFlags = c.de()?; match format { 1 => { let mut vr: ValueRecord = ValueRecord::from_bytes(c, value_format)?; vr.simplify(); for glyph_id in &coverage.as_ref().unwrap().glyphs { mapping.insert(*glyph_id, vr); } } 2 => { let _count: uint16 = c.de()?; for glyph_id in coverage.as_ref().unwrap().glyphs.iter() { let mut vr: ValueRecord = ValueRecord::from_bytes(c, value_format)?; vr.simplify(); mapping.insert(*glyph_id, vr); } } _ => panic!("Bad single pos format {:?}", format), } Ok(SinglePos { mapping }) } } impl From<&SinglePos> for SinglePosInternal { fn from(val: &SinglePos) -> Self { let mut mapping = val.mapping.clone(); for (_, val) in mapping.iter_mut() { (*val).simplify() } let coverage = Coverage { glyphs: mapping.keys().copied().collect(), }; if is_all_the_same(mapping.values()) { let vr = mapping.values().next().unwrap(); SinglePosInternal::Format1(SinglePosFormat1 { posFormat: 1, coverage: Offset16::to(coverage), valueFormat: vr.flags(), valueRecord: *vr, }) } else { let vrs: Vec<ValueRecord> = mapping.values().copied().collect(); let vrs = coerce_to_same_format(vrs); SinglePosInternal::Format2(SinglePosFormat2 { posFormat: 2, coverage: Offset16::to(coverage), valueFormat: vrs[0].flags(), valueRecords: vrs, }) } } } impl Serialize for SinglePos { fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> { let ssi: SinglePosInternal = self.into(); ssi.to_bytes(data) } } #[cfg(test)] mod tests { use super::*; use crate::{btreemap, valuerecord}; use std::iter::FromIterator; #[test] fn test_single_pos_1_1_serde() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10)), }; let binary_pos = vec![ 0x00, 0x01, 0x00, 0x08, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x01, 0x00, 0x01, 0x00, 66, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!(de, pos); } #[test] fn test_single_pos_1_1_serde2() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xAdvance=10, yPlacement=0), ), }; let binary_pos = vec![ 0x00, 0x01, 0x00, 0x08, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x01, 0x00, 0x02, 0x00, 66, 0x00, 67, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!( de, SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xAdvance=10), ), } ); } #[test] fn test_single_pos_1_2_serde() { let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xAdvance=-20), ), }; let binary_pos = vec![ 0x00, 0x02, 0x00, 0x0c, 0x00, 0x04, 0x00, 0x02, 0x00, 0x0a, 0xff, 0xec, 0x00, 0x01, 0x00, 0x02, 0x00, 66, 0x00, 67, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!(de, pos); } #[test] fn test_single_pos_1_2_serde2() {
}
let pos = SinglePos { mapping: btreemap!(66 => valuerecord!(xAdvance=10), 67 => valuerecord!(xPlacement=-20), ), }; let binary_pos = vec![ 0x00, 0x02, 0x00, 0x10, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0a, 0xff, 0xec, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 66, 0x00, 67, ]; let serialized = otspec::ser::to_bytes(&pos).unwrap(); assert_eq!(serialized, binary_pos); let de: SinglePos = otspec::de::from_bytes(&binary_pos).unwrap(); assert_eq!(de, pos); }
function_block-function_prefix_line
[ { "content": "fn consecutive_slices(data: &[uint16]) -> Vec<&[uint16]> {\n\n let mut slice_start = 0;\n\n let mut result = Vec::new();\n\n for i in 1..data.len() {\n\n if data[i - 1] + 1 != data[i] {\n\n result.push(&data[slice_start..i]);\n\n slice_start = i;\n\n }\n\n }\n\n if !data.is_empty() {\n\n result.push(&data[slice_start..]);\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/layout/coverage.rs", "rank": 0, "score": 262145.69170699327 }, { "content": "/// Ensure that all value records in a list have the same format\n\npub fn coerce_to_same_format(vrs: Vec<ValueRecord>) -> Vec<ValueRecord> {\n\n // Needed?\n\n if is_all_the_same(vrs.iter().map(|x| x.flags())) {\n\n return vrs;\n\n }\n\n let mut new_vec = vec![];\n\n let maximum = highest_format(vrs.iter());\n\n for mut vr in vrs {\n\n vr.coerce_to_format(maximum);\n\n new_vec.push(vr);\n\n }\n\n new_vec\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! valuerecord {\n\n ($($k:ident = $v:expr),* $(,)?) => {{\n\n\t \t#[allow(unused_mut)]\n\n let mut v = ValueRecord::new();\n\n $( v.$k = Some($v); )*\n", "file_path": "src/layout/valuerecord.rs", "rank": 1, "score": 239675.33230291842 }, { "content": "fn consecutive_slices(data: &[(uint16, uint16)]) -> Vec<&[(uint16, uint16)]> {\n\n let mut slice_start = 0;\n\n let mut result = Vec::new();\n\n for i in 1..data.len() {\n\n if data[i - 1].0 + 1 != data[i].0 || data[i - 1].1 != data[i].1 {\n\n result.push(&data[slice_start..i]);\n\n slice_start = i;\n\n }\n\n }\n\n if !data.is_empty() {\n\n result.push(&data[slice_start..]);\n\n }\n\n result\n\n}\n\n\n\nimpl Serialize for ClassDef {\n\n fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> {\n\n let pairs: Vec<(u16, u16)> = self.classes.iter().map(|(k, v)| (*k, *v)).collect();\n\n let as_consecutive = consecutive_slices(&pairs);\n\n if self.classes.is_empty() {\n", "file_path": "src/layout/classdef.rs", "rank": 2, "score": 230217.93858080896 }, { "content": "/// Deserialize the glyf table from a binary buffer.\n\n///\n\n/// loca_offsets must be obtained from the `loca` table.\n\npub fn from_bytes(c: &[u8], loca_offsets: Vec<Option<u32>>) -> Result<glyf, DeserializationError> {\n\n from_rc(&mut ReaderContext::new(c.to_vec()), loca_offsets)\n\n}\n\n\n", "file_path": "src/glyf.rs", "rank": 3, "score": 209448.9262537154 }, { "content": "fn read_f64_from_f2dot14(c: &mut ReaderContext) -> Result<f64, DeserializationError> {\n\n let x: F2DOT14 = c.de()?;\n\n let x_f32: f32 = x.into();\n\n Ok(x_f32 as f64)\n\n}\n\n\n\nimpl Deserialize for Component {\n\n fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError> {\n\n let flags: ComponentFlags = c.de()?;\n\n let glyph_index: uint16 = c.de()?;\n\n let mut match_points: Option<(uint16, uint16)> = None;\n\n let mut x_offset: i16 = 0;\n\n let mut y_offset: i16 = 0;\n\n if !flags.contains(ComponentFlags::ARGS_ARE_XY_VALUES) {\n\n // unsigned point values\n\n if flags.contains(ComponentFlags::ARG_1_AND_2_ARE_WORDS) {\n\n let p1: u16 = c.de()?;\n\n let p2: u16 = c.de()?;\n\n match_points = Some((p1, p2));\n\n } else {\n", "file_path": "src/glyf/component.rs", "rank": 4, "score": 206188.33379468508 }, { "content": "/// Optimize the deltas by removing deltas that be inferred using IUP\n\npub fn optimize_deltas(deltas: Vec<Option<Delta>>, glyph: &Glyph) -> Vec<Option<Delta>> {\n\n let (coords, ends): (Vec<(int16, int16)>, Vec<usize>) = glyph.gvar_coords_and_ends();\n\n\n\n let deltas_xy: Vec<(int16, int16)>;\n\n if !deltas.iter().all(|x| x.is_some()) {\n\n // Perhaps we're re-optimizing an optimized thing already.\n\n // Oh well, IUP it all first.\n\n let mut start = 0;\n\n let mut newdeltas = vec![];\n\n for end in &ends {\n\n let contour_delta = &deltas[start..end + 1];\n\n let contour_orig = &coords[start..end + 1];\n\n start = end + 1;\n\n iup_contour(&mut newdeltas, contour_delta, contour_orig);\n\n }\n\n deltas_xy = newdeltas;\n\n } else {\n\n deltas_xy = deltas\n\n .iter()\n\n .map(|o_d| {\n", "file_path": "src/otvar/iup.rs", "rank": 5, "score": 202431.8102570175 }, { "content": "/// Adds explicit oncurve points to a contour\n\npub fn insert_explicit_oncurves(contour: &mut Vec<Point>) {\n\n for i in (0..contour.len() - 1).rev() {\n\n if !contour[i].on_curve && !contour[i + 1].on_curve {\n\n contour.insert(\n\n i + 1,\n\n Point {\n\n on_curve: true,\n\n x: (contour[i].x + contour[i + 1].x) / 2,\n\n y: (contour[i].y + contour[i + 1].y) / 2,\n\n },\n\n )\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/glyf/contourutils.rs", "rank": 6, "score": 195727.2157196552 }, { "content": "/// Removes implied oncurve points from a contour\n\npub fn remove_implied_oncurves(contour: &mut Vec<Point>) {\n\n let mut i: usize = 0;\n\n while i < contour.len() {\n\n let next_ix = (i + 1) % contour.len();\n\n let prev_ix = if i == 0 { contour.len() - 1 } else { i - 1 };\n\n let this = contour[i];\n\n let next = contour[next_ix];\n\n let prev = contour[prev_ix];\n\n if !this.on_curve\n\n || prev.on_curve\n\n || next.on_curve\n\n || this.x != (prev.x + next.x) / 2\n\n || this.y != (prev.y + next.y) / 2\n\n {\n\n i += 1;\n\n continue;\n\n }\n\n contour.remove(i);\n\n }\n\n}\n\n\n", "file_path": "src/glyf/contourutils.rs", "rank": 7, "score": 195727.2157196552 }, { "content": "fn best_format(_: &PairPositioningMap) -> uint16 {\n\n 1\n\n}\n\n\n\nimpl From<&PairPos> for PairPosInternal {\n\n fn from(val: &PairPos) -> Self {\n\n let mut mapping = val.mapping.clone();\n\n for (_, (val1, val2)) in mapping.iter_mut() {\n\n (*val1).simplify();\n\n (*val2).simplify();\n\n }\n\n let fmt = best_format(&mapping);\n\n let split_mapping = split_into_two_layer(mapping);\n\n let coverage = Coverage {\n\n glyphs: split_mapping.keys().copied().collect(),\n\n };\n\n\n\n let all_pair_vrs: Vec<&(ValueRecord, ValueRecord)> = split_mapping\n\n .values()\n\n .map(|x| x.values())\n", "file_path": "src/layout/gpos2.rs", "rank": 8, "score": 193612.22436804877 }, { "content": "pub fn expand_derive_deserialize(\n\n input: &mut syn::DeriveInput,\n\n) -> Result<TokenStream, Vec<syn::Error>> {\n\n replace_receiver(input);\n\n\n\n let ctxt = Ctxt::new();\n\n let cont = match Container::from_ast(&ctxt, input, Derive::Serialize) {\n\n Some(cont) => cont,\n\n None => return Err(ctxt.check().unwrap_err()),\n\n };\n\n ctxt.check()?;\n\n\n\n let ident = &cont.ident;\n\n let params = Parameters::new(&cont);\n\n let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl();\n\n match &cont.data {\n\n Data::Struct(Style::Struct, fields) => {\n\n let body = deserialize_fields(fields);\n\n let names = fields.iter().map(|f| &f.original.ident);\n\n let do_pop = if fields.iter().any(|f| f.attrs.offset_base) {\n", "file_path": "crates/otspec_macros/src/de.rs", "rank": 9, "score": 174083.63191649463 }, { "content": "pub fn ungroup(mut ty: &Type) -> &Type {\n\n while let Type::Group(group) = ty {\n\n ty = &group.elem;\n\n }\n\n ty\n\n}\n", "file_path": "crates/otspec_macros/src/internals/mod.rs", "rank": 10, "score": 171089.5487869602 }, { "content": "/// Loads a binary font from the given filehandle.\n\npub fn load<T>(mut file: T) -> Result<Font, Box<dyn Error>>\n\nwhere\n\n T: Read,\n\n{\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer)?;\n\n otspec::de::from_bytes(&buffer)\n\n .map_err(|e| e.into())\n\n .map(|mut f: Font| {\n\n let _ = f.get_table(b\"head\");\n\n let _ = f.get_table(b\"loca\");\n\n f\n\n })\n\n}\n\n\n\nimpl PartialEq for Font {\n\n fn eq(&self, other: &Self) -> bool {\n\n if self.sfntVersion != other.sfntVersion || self.tables.len() != other.tables.len() {\n\n return false;\n\n }\n\n for ((k1, v1), (k2, v2)) in self.tables.iter().zip(other.tables.iter()) {\n\n if k1 != k2 || v1 != v2 {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/font.rs", "rank": 11, "score": 169967.95414298063 }, { "content": "fn rotate_list<T: Clone + Sized + std::fmt::Debug>(l: Vec<T>, mut k: i16) -> Vec<T> {\n\n // println!(\"Rotating list {:?} by {:?}\", l, k);\n\n let n = l.len();\n\n k = k.rem_euclid(n as i16);\n\n if k == 0 {\n\n return l;\n\n }\n\n let partition = (n as i16 - k) as usize;\n\n // println!(\"Partition at {:?}\", partition);\n\n let mut first = l[partition..].to_vec();\n\n let second = &l[0..partition];\n\n first.extend(second.to_vec());\n\n first\n\n}\n\n\n", "file_path": "src/otvar/iup.rs", "rank": 12, "score": 167983.994499318 }, { "content": "pub fn save_font(mut font: Font, matches: &clap::ArgMatches) {\n\n if matches.is_present(\"OUTPUT\") {\n\n let mut outfile = File::create(matches.value_of(\"OUTPUT\").unwrap())\n\n .expect(\"Could not open file for writing\");\n\n font.save(&mut outfile);\n\n } else {\n\n font.save(&mut io::stdout());\n\n };\n\n}\n", "file_path": "crates/fonttools-cli/src/lib.rs", "rank": 13, "score": 166136.3799106367 }, { "content": "fn deserialize_fields(fields: &[Field]) -> Vec<TokenStream> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let name = &field.original.ident;\n\n let ty = &field.original.ty;\n\n let start = if field.attrs.offset_base {\n\n quote!(c.push();)\n\n } else {\n\n quote!()\n\n };\n\n if let Some(path) = field.attrs.deserialize_with() {\n\n if path.path.is_ident(\"Counted\") {\n\n if let syn::Type::Path(subvec) = ty {\n\n let vec_type = &subvec.path.segments.first().unwrap().ident;\n\n let subpath = get_vector_arg(subvec);\n\n if *vec_type == \"VecOffset16\" {\n\n quote! {\n\n #start\n\n let wrapped: otspec::Counted<Offset16<#subpath>> = c.de()?;\n", "file_path": "crates/otspec_macros/src/de.rs", "rank": 14, "score": 153413.94391137597 }, { "content": "pub fn get_serde_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {\n\n if attr.path != SERDE {\n\n return Ok(Vec::new());\n\n }\n\n\n\n match attr.parse_meta() {\n\n Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),\n\n Ok(other) => {\n\n cx.error_spanned_by(other, \"expected #[serde(...)]\");\n\n Err(())\n\n }\n\n Err(err) => {\n\n cx.syn_error(err);\n\n Err(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 15, "score": 148178.1678837949 }, { "content": "/// Returns the \"highest\" value record format for an iter of valuerecords\n\npub fn highest_format<'a, T>(iter: T) -> ValueRecordFlags\n\nwhere\n\n T: Iterator<Item = &'a ValueRecord>,\n\n{\n\n iter.map(|x| x.flags())\n\n .reduce(|a, b| a | b)\n\n .unwrap_or_else(ValueRecordFlags::empty)\n\n}\n\n\n", "file_path": "src/layout/valuerecord.rs", "rank": 16, "score": 147633.96202015737 }, { "content": "/// Tests if all elements of an iterator have the same content\n\npub fn is_all_the_same<T, U>(mut iter: T) -> bool\n\nwhere\n\n T: Iterator<Item = U>,\n\n U: PartialEq,\n\n{\n\n if let Some(first) = iter.next() {\n\n for n in iter {\n\n if first != n {\n\n return false;\n\n }\n\n }\n\n }\n\n true\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! btreemap {\n\n ($($k:expr => $v:expr),* $(,)?) => {\n\n std::collections::BTreeMap::<_, _>::from_iter(std::array::IntoIter::new([$(($k, $v),)*]))\n\n };\n\n }\n", "file_path": "src/utils.rs", "rank": 17, "score": 145287.6236818833 }, { "content": "pub fn replace_receiver(input: &mut DeriveInput) {\n\n let self_ty = {\n\n let ident = &input.ident;\n\n let ty_generics = input.generics.split_for_impl().1;\n\n parse_quote!(#ident #ty_generics)\n\n };\n\n let mut visitor = ReplaceReceiver(&self_ty);\n\n visitor.visit_generics_mut(&mut input.generics);\n\n visitor.visit_data_mut(&mut input.data);\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/receiver.rs", "rank": 18, "score": 144162.5583542102 }, { "content": "pub fn expand_derive_serialize(\n\n input: &mut syn::DeriveInput,\n\n) -> Result<TokenStream, Vec<syn::Error>> {\n\n replace_receiver(input);\n\n\n\n let ctxt = Ctxt::new();\n\n let cont = match Container::from_ast(&ctxt, input, Derive::Serialize) {\n\n Some(cont) => cont,\n\n None => return Err(ctxt.check().unwrap_err()),\n\n };\n\n ctxt.check()?;\n\n\n\n let ident = &cont.ident;\n\n let params = Parameters::new(&cont);\n\n let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl();\n\n match &cont.data {\n\n Data::Struct(Style::Struct, fields) => {\n\n let sizes = serialize_sizes(fields);\n\n let offset_fields = serialize_offset_fields(fields);\n\n let embed_fields = serialize_embed_fields(fields);\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 19, "score": 142904.03104751118 }, { "content": "pub fn resolve_offsets_and_serialize<T>(\n\n obj: T,\n\n output: &mut Vec<u8>,\n\n do_top: bool,\n\n) -> Result<(), SerializationError>\n\nwhere\n\n T: Serialize,\n\n{\n\n let root = Offset16::to(obj);\n\n let mut mgr = OffsetManager::new(&root);\n\n mgr.resolve();\n\n mgr.serialize(output, do_top)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate as otspec;\n\n use crate::types::*;\n\n use crate::ReaderContext;\n", "file_path": "crates/otspec/src/offsetmanager.rs", "rank": 20, "score": 139097.61258278342 }, { "content": "/// Loads and parses a designspace file\n\npub fn from_file(filename: &str) -> Result<Designspace, serde_xml_rs::Error> {\n\n from_reader(File::open(filename).unwrap())\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(rename = \"designspace\")]\n\n/// A designspace object\n\npub struct Designspace {\n\n /// The format of this designspace file (we support 2 and 3)\n\n pub format: f32,\n\n /// An axes element (contains individual axes)\n\n pub axes: Axes,\n\n /// An sources element (contains individual sources)\n\n pub sources: Sources,\n\n /// An instance element (optional, contains individual instances)\n\n pub instances: Option<Instances>,\n\n // pub rules: Rules,\n\n}\n\n\n", "file_path": "crates/designspace/src/lib.rs", "rank": 21, "score": 131440.28401251015 }, { "content": "pub fn open_font(matches: &clap::ArgMatches) -> Font {\n\n if matches.is_present(\"INPUT\") {\n\n let filename = matches.value_of(\"INPUT\").unwrap();\n\n let infile = File::open(filename).unwrap();\n\n font::load(infile)\n\n } else {\n\n font::load(io::stdin())\n\n }\n\n .expect(\"Could not parse font\")\n\n}\n\n\n", "file_path": "crates/fonttools-cli/src/lib.rs", "rank": 22, "score": 130444.52654696202 }, { "content": "fn serialize_sizes(fields: &[Field]) -> Vec<TokenStream> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let name = &field.original.ident;\n\n if let Some(path) = field.attrs.serialize_with() {\n\n if path.path.is_ident(\"Counted\") {\n\n quote! {\n\n + {\n\n let wrapped = otspec::Counted(self.#name.clone().into());\n\n wrapped.ot_binary_size()\n\n }\n\n }\n\n } else if path.path.is_ident(\"Counted32\") {\n\n quote! {\n\n + {\n\n let wrapped = otspec::Counted32(self.#name.clone().into());\n\n wrapped.ot_binary_size()\n\n }\n\n }\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 23, "score": 125226.2688325276 }, { "content": "fn serialize_fields(fields: &[Field]) -> Vec<TokenStream> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let name = &field.original.ident;\n\n if let Some(path) = field.attrs.serialize_with() {\n\n if path.path.is_ident(\"Counted\") {\n\n quote! {\n\n let wrapped = otspec::Counted(obj.#name.clone().into());\n\n wrapped.to_bytes(data)?;\n\n }\n\n } else if path.path.is_ident(\"Counted32\") {\n\n quote! {\n\n let wrapped = otspec::Counted32(obj.#name.clone().into());\n\n wrapped.to_bytes(data)?;\n\n }\n\n } else {\n\n quote! {\n\n let wrapped = #path(obj.#name);\n\n wrapped.to_bytes(data)?;\n\n }\n\n }\n\n } else {\n\n quote! { obj.#name.to_bytes(data)?; }\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 24, "score": 125226.2688325276 }, { "content": "/// Deserialize the glyf table from a `ReaderContext` object.\n\n///\n\n/// loca_offsets must be obtained from the `loca` table.\n\npub fn from_rc(\n\n c: &mut ReaderContext,\n\n loca_offsets: Vec<Option<u32>>,\n\n) -> Result<glyf, DeserializationError> {\n\n let mut res = glyf { glyphs: Vec::new() };\n\n for item in loca_offsets {\n\n match item {\n\n None => res.glyphs.push(Glyph {\n\n contours: vec![],\n\n components: vec![],\n\n overlap: false,\n\n xMax: 0,\n\n xMin: 0,\n\n yMax: 0,\n\n yMin: 0,\n\n instructions: vec![],\n\n }),\n\n Some(item) => {\n\n let old = c.ptr;\n\n c.ptr = item as usize;\n", "file_path": "src/glyf.rs", "rank": 25, "score": 125005.60551660664 }, { "content": "/// Deserializes a Horizontal Metrics Table given a binary vector and the\n\n/// `numberOfHMetrics` field of the `hhea` table.\n\npub fn from_bytes(\n\n c: &mut ReaderContext,\n\n number_of_h_metrics: uint16,\n\n) -> Result<hmtx, DeserializationError> {\n\n let mut res = hmtx {\n\n metrics: Vec::new(),\n\n };\n\n for _ in 0..number_of_h_metrics {\n\n let metric: Metric = c.de()?;\n\n res.metrics.push(metric)\n\n }\n\n let maybe_other_metrics: Result<Vec<int16>, DeserializationError> = c.de();\n\n if let Ok(other_metrics) = maybe_other_metrics {\n\n let last = res\n\n .metrics\n\n .last()\n\n .expect(\"Must be one advance width in hmtx!\")\n\n .advanceWidth;\n\n res.metrics.extend(other_metrics.iter().map(|x| Metric {\n\n lsb: *x,\n", "file_path": "src/hmtx.rs", "rank": 26, "score": 125005.5569327838 }, { "content": "/// Constructs a `gvar` object from a binary table, given a set of coordinates\n\n/// and end-of-contour indices. These can be extracted from the `glyf` table by\n\n/// calling the `gvar_coords_and_ends` method on each glyph.\n\npub fn from_bytes(\n\n s: &[u8],\n\n coords_and_ends: CoordsAndEndsVec,\n\n) -> Result<gvar, DeserializationError> {\n\n let mut c = ReaderContext::new(s.to_vec());\n\n c.push();\n\n let core: gvarcore = c.de()?;\n\n let offset_count = (core.glyphCount + 1) as usize;\n\n let data_offsets: Vec<u32> = if core.flags & 0x1 == 0 {\n\n // u16 offsets, need doubling\n\n let u16_and_halved: Vec<u16> = c.de_counted(offset_count)?;\n\n u16_and_halved.iter().map(|x| (x * 2).into()).collect()\n\n } else {\n\n c.de_counted(offset_count)?\n\n };\n\n // println!(\"Offsets {:?}\", dataOffsets);\n\n let axis_count = core.axisCount as usize;\n\n\n\n /* Shared tuples */\n\n let mut shared_tuples: Vec<Tuple> = Vec::with_capacity(core.sharedTupleCount as usize);\n", "file_path": "src/gvar.rs", "rank": 27, "score": 125005.24436326622 }, { "content": "pub fn from_bytes(\n\n c: &mut ReaderContext,\n\n loca_is_32bit: bool,\n\n) -> Result<loca, DeserializationError> {\n\n let mut res = loca {\n\n indices: Vec::new(),\n\n };\n\n let raw_indices: Vec<u32> = if loca_is_32bit {\n\n c.de()?\n\n } else {\n\n let x: Vec<u16> = c.de()?;\n\n x.iter().map(|x| (*x as u32) * 2).collect()\n\n };\n\n if raw_indices.is_empty() {\n\n // No glyphs, eh?\n\n return Ok(res);\n\n }\n\n for ab in raw_indices.windows(2) {\n\n if let [a, b] = ab {\n\n if *a == *b {\n", "file_path": "src/loca.rs", "rank": 28, "score": 125001.4972219015 }, { "content": "fn serialize_offset_fields(fields: &[Field]) -> Vec<TokenStream> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let name = &field.original.ident;\n\n let ty = &field.original.ty;\n\n if let syn::Type::Path(path) = ty {\n\n if path.path.segments.first().unwrap().ident == \"Offset16\" {\n\n quote! { &self.#name, }\n\n } else {\n\n quote! {}\n\n }\n\n } else {\n\n quote! {}\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 29, "score": 122622.2965573149 }, { "content": "fn serialize_embed_fields(fields: &[Field]) -> Vec<TokenStream> {\n\n fields\n\n .iter()\n\n .map(|field| {\n\n let name = &field.original.ident;\n\n let ty = &field.original.ty;\n\n let is_vec = if let syn::Type::Path(path) = ty {\n\n path.path.segments.first().unwrap().ident == \"VecOffset16\"\n\n } else {\n\n false\n\n };\n\n\n\n if field.attrs.embedded || is_vec {\n\n quote! {\n\n v.extend(self.#name.offset_fields());\n\n }\n\n } else {\n\n quote! {}\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 30, "score": 122622.2965573149 }, { "content": "#[allow(dead_code)] // We *do* use it, I promise. Like, just a few lines below.\n\npub fn from_bytes(\n\n c: &mut ReaderContext,\n\n num_points: usize,\n\n) -> Result<PackedDeltas, DeserializationError> {\n\n let mut res = vec![];\n\n while res.len() < num_points {\n\n let control_byte: u8 = c.de()?;\n\n let deltas_are_words = (control_byte & DELTAS_ARE_WORDS) > 0;\n\n // \"The low 6 bits specify the number of delta values in the run minus 1.\"\n\n // MINUS ONE.\n\n let run_count = (control_byte & DELTA_RUN_COUNT_MASK) + 1;\n\n let deltas: Vec<i16>;\n\n if control_byte & DELTAS_ARE_ZERO > 0 {\n\n deltas = std::iter::repeat(0).take(run_count as usize).collect();\n\n } else if deltas_are_words {\n\n deltas = c.de_counted(run_count.into())?;\n\n } else {\n\n let delta_bytes: Vec<i8> = c.de_counted(run_count.into())?;\n\n deltas = delta_bytes.iter().map(|x| *x as i16).collect();\n\n }\n", "file_path": "src/otvar/packeddeltas.rs", "rank": 31, "score": 121919.93546376092 }, { "content": "/// Construct a vector of points from a `kurbo::BezPath` object\n\n///\n\n/// Cubic paths will be converted to quadratic paths using the given error tolerance.\n\npub fn kurbo_contour_to_glyf_contour(kurbo_path: &kurbo::BezPath, error: f32) -> Vec<Point> {\n\n let mut points: Vec<Point> = vec![];\n\n if let PathEl::MoveTo(pt) = kurbo_path.elements()[0] {\n\n points.push(Point {\n\n x: pt.x as i16,\n\n y: pt.y as i16,\n\n on_curve: true,\n\n });\n\n }\n\n for seg in kurbo_path.segments() {\n\n match seg {\n\n PathSeg::Line(l) => points.push(Point {\n\n x: l.p1.x as i16,\n\n y: l.p1.y as i16,\n\n on_curve: true,\n\n }),\n\n PathSeg::Quad(q) => points.extend(vec![\n\n Point {\n\n x: q.p1.x as i16,\n\n y: q.p1.y as i16,\n", "file_path": "src/glyf/contourutils.rs", "rank": 32, "score": 121842.71626925946 }, { "content": "#[proc_macro_derive(Deserialize, attributes(serde))]\n\npub fn derive_deserialize(input: TokenStream) -> TokenStream {\n\n let mut input = parse_macro_input!(input as DeriveInput);\n\n de::expand_derive_deserialize(&mut input)\n\n .unwrap_or_else(to_compile_errors)\n\n .into()\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/lib.rs", "rank": 33, "score": 121519.5494465804 }, { "content": "#[proc_macro_derive(Serialize, attributes(serde))]\n\npub fn derive_serialize(input: TokenStream) -> TokenStream {\n\n let mut input = parse_macro_input!(input as DeriveInput);\n\n ser::expand_derive_serialize(&mut input)\n\n .unwrap_or_else(to_compile_errors)\n\n .into()\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/lib.rs", "rank": 34, "score": 121492.29867589788 }, { "content": "/// Perform Interpolation of Unreferenced Points on a set of deltas and coordinates\n\npub fn iup_contour(\n\n newdeltas: &mut Vec<(i16, i16)>,\n\n deltas: &[Option<Delta>],\n\n coords: &[(i16, i16)],\n\n) {\n\n if deltas.iter().all(|x| x.is_some()) {\n\n newdeltas.extend::<Vec<(i16, i16)>>(\n\n deltas\n\n .iter()\n\n .map(|x| x.as_ref().unwrap().get_2d())\n\n .collect(),\n\n );\n\n return;\n\n }\n\n let n = deltas.len();\n\n let indices: Vec<usize> = deltas\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, d)| d.is_some())\n\n .map(|(i, _)| i)\n", "file_path": "src/otvar/iup.rs", "rank": 35, "score": 119073.93916736555 }, { "content": "// Puts the given bound on any generic type parameters that are used in fields\n\n// for which filter returns true.\n\n//\n\n// For example, the following struct needs the bound `A: Serialize, B:\n\n// Serialize`.\n\n//\n\n// struct S<'b, A, B: 'b, C> {\n\n// a: A,\n\n// b: Option<&'b B>\n\n// #[serde(skip_serializing)]\n\n// c: C,\n\n// }\n\npub fn with_bound(\n\n cont: &Container,\n\n generics: &syn::Generics,\n\n filter: fn(&attr::Field, Option<&attr::Variant>) -> bool,\n\n bound: &syn::Path,\n\n) -> syn::Generics {\n\n struct FindTyParams<'ast> {\n\n // Set of all generic type parameters on the current struct (A, B, C in\n\n // the example). Initialized up front.\n\n all_type_params: HashSet<syn::Ident>,\n\n\n\n // Set of generic type parameters used in fields for which filter\n\n // returns true (A and B in the example). Filled in as the visitor sees\n\n // them.\n\n relevant_type_params: HashSet<syn::Ident>,\n\n\n\n // Fields whose type is an associated type of one of the generic type\n\n // parameters.\n\n associated_type_usage: Vec<&'ast syn::TypePath>,\n\n }\n", "file_path": "crates/otspec_macros/src/bound.rs", "rank": 36, "score": 116461.64867820556 }, { "content": "pub fn read_args(name: &str, description: &str) -> clap::ArgMatches<'static> {\n\n App::new(name)\n\n .about(description)\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"Sets the input file to use\")\n\n .required(false),\n\n )\n\n .arg(\n\n Arg::with_name(\"OUTPUT\")\n\n .help(\"Sets the output file to use\")\n\n .required(false),\n\n )\n\n .get_matches()\n\n}\n\n\n", "file_path": "crates/fonttools-cli/src/lib.rs", "rank": 37, "score": 106227.32149628882 }, { "content": "pub fn tag(s: &str) -> Tag {\n\n (*s).as_bytes().try_into().unwrap()\n\n}\n\n\n\nimpl Serialize for Tag {\n\n fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> {\n\n self[0].to_bytes(data)?;\n\n self[1].to_bytes(data)?;\n\n self[2].to_bytes(data)?;\n\n self[3].to_bytes(data)?;\n\n Ok(())\n\n }\n\n fn ot_binary_size(&self) -> usize {\n\n 4\n\n }\n\n}\n\n\n\nimpl Deserialize for Tag {\n\n fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError> {\n\n Ok(c.consume(4)?.try_into().unwrap())\n\n }\n\n}\n\n\n\n#[derive(Shrinkwrap, Debug, PartialEq, Copy, Clone)]\n\npub struct Fixed(pub f32);\n\n\n\npub type Tuple = Vec<f32>;\n\n\n", "file_path": "crates/otspec/src/types.rs", "rank": 38, "score": 104102.42765102818 }, { "content": "fn locations_to_regions(locations: &[Location]) -> Vec<Support> {\n\n let mut axis_minimum: HashMap<Tag, f32> = HashMap::new();\n\n let mut axis_maximum: HashMap<Tag, f32> = HashMap::new();\n\n for (tag, value) in locations.iter().flatten() {\n\n axis_maximum\n\n .entry(*tag)\n\n .and_modify(|v| *v = v.max(*value))\n\n .or_insert(*value);\n\n axis_minimum\n\n .entry(*tag)\n\n .and_modify(|v| *v = v.min(*value))\n\n .or_insert(*value);\n\n }\n\n locations\n\n .iter()\n\n .map(|loc| {\n\n loc.iter()\n\n .map(|(axis, loc_v)| {\n\n (\n\n *axis,\n", "file_path": "src/otvar/locations.rs", "rank": 39, "score": 102995.253647687 }, { "content": "struct Parameters {\n\n generics: syn::Generics,\n\n}\n\n\n\nimpl Parameters {\n\n fn new(cont: &Container) -> Self {\n\n let generics = build_generics(cont);\n\n\n\n Parameters { generics }\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/de.rs", "rank": 40, "score": 100647.2551750442 }, { "content": "/// Convert an array of bits into an integer\n\npub fn int_list_to_num(int_list: &[u8]) -> u32 {\n\n let mut flags = 0;\n\n for flag in int_list {\n\n flags |= 1 << flag;\n\n }\n\n flags\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 41, "score": 99735.88451122347 }, { "content": "pub trait Deserialize {\n\n fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError>\n\n where\n\n Self: std::marker::Sized;\n\n}\n\n\n\nmacro_rules! serde_primitive {\n\n ($t: ty) => {\n\n impl Serialize for $t {\n\n fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> {\n\n data.extend_from_slice(&self.to_be_bytes());\n\n Ok(())\n\n }\n\n\n\n fn ot_binary_size(&self) -> usize {\n\n mem::size_of::<$t>()\n\n }\n\n }\n\n\n\n impl Deserialize for $t {\n", "file_path": "crates/otspec/src/lib.rs", "rank": 42, "score": 96208.35268664034 }, { "content": "pub fn resolve_offsets<T>(obj: T) -> T\n\nwhere\n\n T: Serialize,\n\n{\n\n let root = Offset16::to(obj);\n\n let mut mgr = OffsetManager::new(&root);\n\n mgr.resolve();\n\n root.link.unwrap()\n\n}\n\n\n", "file_path": "crates/otspec/src/offsetmanager.rs", "rank": 43, "score": 95282.01716200543 }, { "content": "pub fn expand_tables(item: TokenStream) -> TokenStream {\n\n let mut output = TokenStream::new();\n\n let mut iter = item.into_iter().peekable();\n\n let mut out_s = String::new();\n\n\n\n loop {\n\n let mut do_debug = true;\n\n // First parse table name\n\n let maybe_table_name = iter.next();\n\n if maybe_table_name.is_none() {\n\n break;\n\n }\n\n\n\n let table_name = expect_ident(maybe_table_name);\n\n\n\n loop {\n\n let next = iter.peek();\n\n if let Some(pragma) = has_pragma(&next) {\n\n if pragma == \"[embedded]\" {\n\n out_s.push_str(\"#[serde(embedded)]\");\n", "file_path": "crates/otspec_macros/src/tables.rs", "rank": 44, "score": 94201.11236236639 }, { "content": "pub fn any_offsets_need_resolving<T>(obj: &T) -> bool\n\nwhere\n\n T: Serialize,\n\n{\n\n let fields = obj.offset_fields();\n\n if fields.is_empty() {\n\n return false;\n\n }\n\n for f in fields {\n\n if f.needs_resolving() {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "crates/otspec/src/offsetmanager.rs", "rank": 45, "score": 93440.99293489283 }, { "content": "/// Returns a kurbo BezPath object representing this glyf contour\n\npub fn glyf_contour_to_kurbo_contour(contour: &[Point]) -> kurbo::BezPath {\n\n let mut path = kurbo::BezPath::new();\n\n let mut contour = contour.to_vec();\n\n insert_explicit_oncurves(&mut contour);\n\n path.move_to((contour[0].x as f64, contour[0].y as f64));\n\n let mut segment: Vec<&Point> = vec![];\n\n for pt in &contour[1..] {\n\n segment.push(pt);\n\n if pt.on_curve {\n\n match segment.len() {\n\n 1 => {\n\n path.line_to((segment[0].x as f64, segment[0].y as f64));\n\n }\n\n 2 => {\n\n path.quad_to(\n\n (segment[0].x as f64, segment[0].y as f64),\n\n (segment[1].x as f64, segment[1].y as f64),\n\n );\n\n }\n\n _ => {}\n", "file_path": "src/glyf/contourutils.rs", "rank": 46, "score": 91714.09211608401 }, { "content": "pub trait Deserializer<T>\n\nwhere\n\n T: Deserialize,\n\n{\n\n fn de(&mut self) -> Result<T, DeserializationError>;\n\n fn de_counted(&mut self, s: usize) -> Result<Vec<T>, DeserializationError>;\n\n}\n\n\n\nimpl<T> Deserializer<T> for ReaderContext\n\nwhere\n\n T: Deserialize,\n\n{\n\n fn de(&mut self) -> Result<T, DeserializationError> {\n\n T::from_bytes(self)\n\n }\n\n fn de_counted(&mut self, s: usize) -> Result<Vec<T>, DeserializationError> {\n\n (0..s)\n\n .map(|_| {\n\n let c: Result<T, DeserializationError> = self.de();\n\n c\n", "file_path": "crates/otspec/src/lib.rs", "rank": 47, "score": 91534.57060509807 }, { "content": "pub trait Serializer<T>\n\nwhere\n\n T: Serialize,\n\n{\n\n fn put(&mut self, data: T) -> Result<(), SerializationError>;\n\n}\n\n\n\nimpl<T> Serializer<T> for Vec<u8>\n\nwhere\n\n T: Serialize,\n\n{\n\n fn put(&mut self, data: T) -> Result<(), SerializationError> {\n\n data.to_bytes(self)\n\n }\n\n}\n\n\n", "file_path": "crates/otspec/src/lib.rs", "rank": 48, "score": 91501.10810401905 }, { "content": "// Remove the default from every type parameter because in the generated impls\n\n// they look like associated types: \"error: associated type bindings are not\n\n// allowed here\".\n\npub fn without_defaults(generics: &syn::Generics) -> syn::Generics {\n\n syn::Generics {\n\n params: generics\n\n .params\n\n .iter()\n\n .map(|param| match param {\n\n syn::GenericParam::Type(param) => syn::GenericParam::Type(syn::TypeParam {\n\n eq_token: None,\n\n default: None,\n\n ..param.clone()\n\n }),\n\n _ => param.clone(),\n\n })\n\n .collect(),\n\n ..generics.clone()\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/bound.rs", "rank": 49, "score": 89520.74966803342 }, { "content": "fn needs_serialize_bound(field: &attr::Field, _variant: Option<&attr::Variant>) -> bool {\n\n field.serialize_with().is_none()\n\n}\n", "file_path": "crates/otspec_macros/src/de.rs", "rank": 50, "score": 89390.2968528902 }, { "content": "fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {\n\n let stream = syn::parse_str(&s.value())?;\n\n Ok(respan(stream, s.span()))\n\n}\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 51, "score": 87746.36712335385 }, { "content": "fn enum_from_ast<'a>(\n\n cx: &Ctxt,\n\n variants: &'a Punctuated<syn::Variant, Token![,]>,\n\n) -> Vec<Variant<'a>> {\n\n variants\n\n .iter()\n\n .map(|variant| {\n\n let attrs = attr::Variant::from_ast(cx, variant);\n\n let (style, fields) = struct_from_ast(cx, &variant.fields, Some(&attrs));\n\n Variant {\n\n ident: variant.ident.clone(),\n\n attrs,\n\n style,\n\n fields,\n\n original: variant,\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/ast.rs", "rank": 52, "score": 87586.4171184163 }, { "content": "/// Returns B-tree search range parameters.\n\n///\n\n/// Various OpenType tables (the font table header, `cmap` format 4 subtables)\n\n/// contain fields which are intended to be used to optimize B-tree search\n\n/// algorithms. These are generally not used by implementations in practice, as\n\n/// trusting user-supplied data to determine the algorithm's activity is unwise.\n\n/// However, we still want to correctly generate these values to produce\n\n/// specification-compliant fonts.\n\npub fn get_search_range(n: u16, itemsize: u16) -> (u16, u16, u16) {\n\n let mut max_pow2: u16 = 0;\n\n while 1u16 << (max_pow2 + 1) <= n {\n\n max_pow2 += 1;\n\n }\n\n let search_range = (1 << max_pow2) * itemsize;\n\n let range_shift = cmp::max(search_range, n * itemsize) - search_range;\n\n (search_range, max_pow2, range_shift)\n\n}\n\n\n\nimpl Serialize for Font {\n\n fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> {\n\n let lenu16: u16 = self.tables.len().try_into().unwrap();\n\n let (search_range, max_pow2, range_shift) = get_search_range(lenu16, 16);\n\n let mut output: Vec<u8> = vec![];\n\n let mut output_tables: Vec<u8> = vec![];\n\n output.extend(&(self.sfntVersion as u32).to_be_bytes());\n\n output.extend(&lenu16.to_be_bytes());\n\n output.extend(&search_range.to_be_bytes());\n\n output.extend(&max_pow2.to_be_bytes());\n", "file_path": "src/font.rs", "rank": 53, "score": 87546.81723530457 }, { "content": "fn struct_from_ast<'a>(\n\n cx: &Ctxt,\n\n fields: &'a syn::Fields,\n\n attrs: Option<&attr::Variant>,\n\n) -> (Style, Vec<Field<'a>>) {\n\n match fields {\n\n syn::Fields::Named(fields) => (Style::Struct, fields_from_ast(cx, &fields.named, attrs)),\n\n syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {\n\n (Style::Newtype, fields_from_ast(cx, &fields.unnamed, attrs))\n\n }\n\n syn::Fields::Unnamed(fields) => (Style::Tuple, fields_from_ast(cx, &fields.unnamed, attrs)),\n\n syn::Fields::Unit => (Style::Unit, Vec::new()),\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/ast.rs", "rank": 54, "score": 87540.82659325487 }, { "content": "fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {\n\n let compile_errors = errors.iter().map(syn::Error::to_compile_error);\n\n quote!(#(#compile_errors)*)\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/lib.rs", "rank": 55, "score": 87375.42628872453 }, { "content": "fn respan_token(mut token: TokenTree, span: Span) -> TokenTree {\n\n if let TokenTree::Group(g) = &mut token {\n\n *g = Group::new(g.delimiter(), respan(g.stream(), span));\n\n }\n\n token.set_span(span);\n\n token\n\n}\n", "file_path": "crates/otspec_macros/src/internals/respan.rs", "rank": 56, "score": 86191.28254041457 }, { "content": "#[proc_macro]\n\npub fn tables(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n tables::expand_tables(input)\n\n}\n", "file_path": "crates/otspec_macros/src/lib.rs", "rank": 57, "score": 84923.73809727622 }, { "content": "fn piecewise_linear_map(mapping: HashMap<i32, f32>, value: i32) -> f32 {\n\n if mapping.contains_key(&value) {\n\n return *mapping.get(&value).unwrap();\n\n }\n\n if mapping.keys().len() == 0 {\n\n return value as f32;\n\n }\n\n let min = *mapping.keys().min().unwrap();\n\n if value < min {\n\n return value as f32 + *mapping.get(&min).unwrap() - (min as f32);\n\n }\n\n let max = *mapping.keys().max().unwrap();\n\n if value > max {\n\n return value as f32 + mapping.get(&max).unwrap() - (max as f32);\n\n }\n\n let a = mapping.keys().filter(|k| *k < &value).max().unwrap();\n\n let b = mapping.keys().filter(|k| *k > &value).min().unwrap();\n\n let va = mapping.get(a).unwrap();\n\n let vb = mapping.get(b).unwrap();\n\n va + (vb - va) * (value - a) as f32 / (*b - *a) as f32\n", "file_path": "crates/designspace/src/lib.rs", "rank": 58, "score": 84479.28045230058 }, { "content": "fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T>\n\nwhere\n\n T: Parse,\n\n{\n\n let tokens = spanned_tokens(s)?;\n\n syn::parse2(tokens)\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 59, "score": 84278.01616831173 }, { "content": "fn split_into_two_layer(in_hash: PairPositioningMap) -> SplitPairPositioningMap {\n\n let mut out_hash = BTreeMap::new();\n\n for (&(l, r), &vs) in in_hash.iter() {\n\n out_hash\n\n .entry(l)\n\n .or_insert_with(BTreeMap::new)\n\n .insert(r, vs);\n\n }\n\n out_hash\n\n}\n\n\n", "file_path": "src/layout/gpos2.rs", "rank": 60, "score": 84070.2596407649 }, { "content": "pub trait Serialize: std::fmt::Debug {\n\n fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError>;\n\n fn ot_binary_size(&self) -> usize {\n\n // Lazy implementation that works everywhere\n\n let mut d = vec![];\n\n self.to_bytes(&mut d).unwrap();\n\n d.len()\n\n }\n\n fn offset_fields(&self) -> Vec<&dyn OffsetMarkerTrait> {\n\n vec![]\n\n }\n\n fn to_bytes_shallow(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> {\n\n self.to_bytes(data)\n\n }\n\n}\n\n\n", "file_path": "crates/otspec/src/lib.rs", "rank": 61, "score": 83519.55716811793 }, { "content": "fn rotate_set(s: HashSet<i16>, mut k: i16, n: usize) -> HashSet<i16> {\n\n k %= n as i16;\n\n if k == 0 {\n\n return s;\n\n }\n\n s.iter().map(|v| ((*v + k) % (n as i16))).collect()\n\n}\n\n\n", "file_path": "src/otvar/iup.rs", "rank": 62, "score": 82247.7414098266 }, { "content": "// TODO: delete when `is_sorted` stablizes: https://github.com/rust-lang/rust/issues/53485\n\n// copied from stdlib\n\nfn is_sorted<T: Ord>(slice: &[T]) -> bool {\n\n let mut iter = slice.iter();\n\n let mut prev = match iter.next() {\n\n Some(x) => x,\n\n None => return true,\n\n };\n\n for next in iter {\n\n if next < prev {\n\n return false;\n\n }\n\n prev = next;\n\n }\n\n true\n\n}\n\n\n\nimpl Coverage {\n\n fn most_efficient_format(&self) -> u16 {\n\n let as_consecutive = consecutive_slices(&self.glyphs);\n\n if self.glyphs.is_empty()\n\n || !is_sorted(&self.glyphs)\n", "file_path": "src/layout/coverage.rs", "rank": 63, "score": 79475.34158171575 }, { "content": "fn build_generics(cont: &Container) -> syn::Generics {\n\n let generics = bound::without_defaults(cont.generics);\n\n\n\n bound::with_bound(\n\n cont,\n\n &generics,\n\n needs_serialize_bound,\n\n &parse_quote!(_serde::Serialize),\n\n )\n\n}\n", "file_path": "crates/otspec_macros/src/de.rs", "rank": 64, "score": 76533.92550224332 }, { "content": "// This is purely internal but we need to make it pub because it's shared\n\n// with otspec_macros. I'm not going to rustdoc it, though.\n\n//\n\n// The idea behind this is that we need to be able to build an object\n\n// graph containing different subtable types. To do *that*, we need to erase\n\n// the internal type of the `Offset16<T>`, and so turn it into a trait object.\n\n// So we expose a portion of the Offset16's functionality inside this marker\n\n// trait.\n\npub trait OffsetMarkerTrait: Serialize + std::fmt::Debug {\n\n fn children(&self) -> Vec<&dyn OffsetMarkerTrait>;\n\n fn object_size(&self) -> usize;\n\n fn total_size_with_descendants(&self) -> usize;\n\n fn needs_resolving(&self) -> bool;\n\n fn set(&self, off: uint16);\n\n fn serialize_contents(&self, output: &mut Vec<u8>) -> Result<(), SerializationError>;\n\n fn serialize_offset(&self, output: &mut Vec<u8>) -> Result<(), SerializationError>;\n\n}\n\n\n\nimpl<T: Serialize + std::fmt::Debug> OffsetMarkerTrait for Offset16<T> {\n\n // When building the tree, we need to know which of my fields also have\n\n // offsets.\n\n fn children(&self) -> Vec<&dyn OffsetMarkerTrait> {\n\n self.link.as_ref().map_or(vec![], |l| l.offset_fields())\n\n }\n\n // And when computing the offset for the *next* link, we need to know\n\n // how big this object is.\n\n fn object_size(&self) -> usize {\n\n self.link.as_ref().map_or(0, |l| l.ot_binary_size())\n", "file_path": "crates/otspec/src/offsets.rs", "rank": 65, "score": 76238.72889574699 }, { "content": "fn get_vector_arg(path: &syn::TypePath) -> TokenStream {\n\n if let syn::PathArguments::AngleBracketed(brackets) =\n\n &path.path.segments.first().unwrap().arguments\n\n {\n\n let g = brackets.args.first().unwrap();\n\n let mut t = TokenStream::new();\n\n g.to_tokens(&mut t);\n\n t\n\n } else {\n\n let mut t = TokenStream::new();\n\n path.to_tokens(&mut t);\n\n panic!(\"Vector wasn't generic in {:?}\", t);\n\n }\n\n}\n\n\n\n// #[proc_macro_derive(Deserialize)]\n\n// pub fn deserialize_derive(input: TokenStream) -> TokenStream {\n\n// let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n\n\n// let fields = match &ast.data {\n", "file_path": "crates/otspec_macros/src/de.rs", "rank": 66, "score": 71326.91315774518 }, { "content": "#[derive(Debug, PartialEq, Serialize)]\n\n#[allow(non_camel_case_types, non_snake_case)]\n\nstruct cmap0 {\n\n format: uint16,\n\n length: uint16,\n\n language: uint16,\n\n glyphIdArray: Vec<u8>,\n\n}\n\n\n\nimpl cmap0 {\n\n fn from_mapping(_language_id: uint16, _map: &BTreeMap<uint32, uint16>) -> Self {\n\n unimplemented!();\n\n // Self {\n\n // format: 0,\n\n // length: 0,\n\n // language: languageID,\n\n // glyphIdArray: Vec::new(),\n\n // }\n\n }\n\n fn to_mapping(&self) -> BTreeMap<uint32, uint16> {\n\n BTreeMap::new()\n\n }\n", "file_path": "src/cmap.rs", "rank": 67, "score": 70502.42471586427 }, { "content": "fn get_lit_str<'a>(cx: &Ctxt, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {\n\n get_lit_str2(cx, attr_name, attr_name, lit)\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 68, "score": 69505.73147324004 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct TableRecord {\n\n tag: Tag,\n\n checksum: uint32,\n\n offset: uint32,\n\n length: uint32,\n\n}\n\n/// The header of the font's table directory\n", "file_path": "src/font.rs", "rank": 69, "score": 68806.66614013717 }, { "content": "#[derive(Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct TableHeader {\n\n sfntVersion: u32,\n\n numTables: u16,\n\n _searchRange: u16,\n\n _entrySelector: u16,\n\n _rangeShift: u16,\n\n}\n\n\n\n/// An OpenType font object\n\n#[derive(Debug)]\n\n#[allow(non_snake_case)]\n\npub struct Font {\n\n /// Font version (TrueType/OpenType)\n\n sfntVersion: SfntVersion,\n\n /// Dictionary of tables in the font\n\n pub tables: BTreeMap<Tag, Table>,\n\n _numGlyphs: Option<u16>,\n\n}\n\n\n\nuse otspec::ser;\n", "file_path": "src/font.rs", "rank": 70, "score": 68802.01722293516 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum ComponentScalingMode {\n\n ScaledOffset,\n\n UnscaledOffset,\n\n Default,\n\n}\n\n*/\n\n\n\n/// A high-level representation of a component within a glyph\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Component {\n\n /// The glyph ID that this component references.\n\n pub glyph_index: uint16,\n\n /// An affine transformation applied to the component's contours.\n\n pub transformation: Affine,\n\n /// Alternate, and rarely used, method of positioning components using contour point numbers.\n\n pub match_points: Option<(uint16, uint16)>,\n\n /// Flags.\n\n /// Most of these are calculated automatically on serialization. Those which can be\n\n /// meaningfully manually set are `ROUND_XY_TO_GRID`, `USE_MY_METRICS`,\n\n /// `SCALED_COMPONENT_OFFSET`, `UNSCALED_COMPONENT_OFFSET` and `OVERLAP_COMPOUND`.\n", "file_path": "src/glyf/component.rs", "rank": 71, "score": 66182.84031841162 }, { "content": "struct Parameters {\n\n generics: syn::Generics,\n\n}\n\n\n\nimpl Parameters {\n\n fn new(cont: &Container) -> Self {\n\n let generics = build_generics(cont);\n\n\n\n Parameters { generics }\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 72, "score": 65799.94965956022 }, { "content": "fn needs_serialize_bound(field: &attr::Field, _variant: Option<&attr::Variant>) -> bool {\n\n field.serialize_with().is_none()\n\n}\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 73, "score": 64424.898744922546 }, { "content": "fn split_range(\n\n start_code: u16,\n\n end_code: u16,\n\n map: &BTreeMap<uint32, uint16>,\n\n) -> (Vec<u16>, Vec<u16>) {\n\n if start_code == end_code {\n\n return (vec![], vec![end_code]);\n\n }\n\n let mut last_id = map[&(start_code as u32)];\n\n let mut last_code = start_code;\n\n let mut in_order = None;\n\n let mut ordered_begin = None;\n\n let mut subranges = Vec::new();\n\n for code in (start_code + 1)..(end_code + 1) {\n\n let glyph_id = *map.get(&(code as u32)).unwrap_or(&0);\n\n if glyph_id > 0 && glyph_id - 1 == last_id {\n\n if in_order.is_none() || in_order == Some(0) {\n\n in_order = Some(1);\n\n ordered_begin = Some(last_code);\n\n }\n", "file_path": "src/cmap.rs", "rank": 74, "score": 62795.84518754024 }, { "content": "fn iup_segment(\n\n newdeltas: &mut Vec<(i16, i16)>,\n\n coords: &[(i16, i16)],\n\n rc1: (i16, i16),\n\n rd1: &Option<Delta>,\n\n rc2: (i16, i16),\n\n rd2: &Option<Delta>,\n\n) {\n\n let rd1 = rd1.as_ref().unwrap().get_2d();\n\n let rd2 = rd2.as_ref().unwrap().get_2d();\n\n let mut out_arrays: Vec<Vec<i16>> = vec![vec![], vec![]];\n\n for (j, out_array) in out_arrays.iter_mut().enumerate() {\n\n let (mut x1, mut x2, mut d1, mut d2) = if j == 0 {\n\n (rc1.0, rc2.0, rd1.0, rd2.0)\n\n } else {\n\n (rc1.1, rc2.1, rd1.1, rd2.1)\n\n };\n\n if x1 == x2 {\n\n let n = coords.len();\n\n out_array.extend(std::iter::repeat(if d1 == d2 { d1 } else { 0 }).take(n));\n", "file_path": "src/otvar/iup.rs", "rank": 75, "score": 61260.054173662844 }, { "content": "fn can_iup_between(\n\n deltas: &[(i16, i16)],\n\n coords: &[(i16, i16)],\n\n i_i16: i16,\n\n j_i16: i16,\n\n tolerance: f32,\n\n) -> bool {\n\n assert!(j_i16 - i_i16 >= 2);\n\n let i = i_i16.rem_euclid((deltas.len()) as i16) as usize;\n\n let j = j_i16.rem_euclid((deltas.len()) as i16) as usize;\n\n let mut coord_portion: Vec<(i16, i16)>;\n\n let mut delta_portion: Vec<(i16, i16)>;\n\n if i + 1 > j {\n\n coord_portion = coords[i + 1..].to_vec();\n\n coord_portion.extend(coords[0..j].to_vec());\n\n delta_portion = deltas[i + 1..].to_vec();\n\n delta_portion.extend(deltas[0..j].to_vec());\n\n } else {\n\n coord_portion = coords[i + 1..j].to_vec();\n\n delta_portion = deltas[i + 1..j].to_vec();\n", "file_path": "src/otvar/iup.rs", "rank": 76, "score": 61260.054173662844 }, { "content": "fn iup_contour_optimize(\n\n deltas_slice: &[(i16, i16)],\n\n coords_slice: &[(i16, i16)],\n\n tolerance: f32,\n\n) -> Vec<Option<Delta>> {\n\n let mut deltas = deltas_slice.to_vec();\n\n let mut coords = coords_slice.to_vec();\n\n let n = deltas.len();\n\n let mut rv = vec![];\n\n if deltas\n\n .iter()\n\n .all(|(x, y)| (x.abs() as f32) <= tolerance && (y.abs() as f32) <= tolerance)\n\n {\n\n for _ in 0..n {\n\n rv.push(None);\n\n }\n\n return rv;\n\n }\n\n\n\n if n == 1 {\n", "file_path": "src/otvar/iup.rs", "rank": 77, "score": 59845.84379335515 }, { "content": "type Coords = Vec<(int16, int16)>;\n\npub(crate) type CoordsAndEndsVec = Vec<(Coords, Vec<usize>)>;\n\n\n\ntables!( gvarcore {\n\n uint16 majorVersion\n\n uint16 minorVersion\n\n uint16 axisCount\n\n uint16 sharedTupleCount\n\n u32 sharedTuplesOffset\n\n uint16 glyphCount\n\n uint16 flags\n\n u32 glyphVariationDataArrayOffset\n\n}\n\n);\n\n\n\n/// How a glyph's points vary at one region of the design space.\n\n///\n\n/// (This is the user-friendly version of what is serialized as a TupleVariation)\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct DeltaSet {\n", "file_path": "src/gvar.rs", "rank": 78, "score": 58800.64420849542 }, { "content": "struct Attr<'c, T> {\n\n cx: &'c Ctxt,\n\n name: Symbol,\n\n value: Option<T>,\n\n}\n\n\n\nimpl<'c, T> Attr<'c, T> {\n\n fn none(cx: &'c Ctxt, name: Symbol) -> Self {\n\n Attr {\n\n cx,\n\n name,\n\n value: None,\n\n }\n\n }\n\n\n\n fn set<A: ToTokens>(&mut self, obj: A, value: T) {\n\n let tokens = obj.into_token_stream();\n\n\n\n if self.value.is_some() {\n\n self.cx\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 79, "score": 58544.313702314874 }, { "content": "fn _iup_contour_optimize_dp(\n\n deltas: &[(i16, i16)],\n\n coords: &[(i16, i16)],\n\n forced: &HashSet<i16>,\n\n tolerance: f32,\n\n lookback_o: Option<i16>,\n\n) -> (HashMap<i16, i16>, HashMap<i16, i16>) {\n\n let n = deltas.len();\n\n let lookback = lookback_o.unwrap_or(n as i16);\n\n let mut costs: HashMap<i16, i16> = HashMap::new();\n\n let mut chain: HashMap<i16, i16> = HashMap::new();\n\n // println!(\"Doing DP. Forced={:?}\", forced);\n\n costs.insert(-1, 0);\n\n for i in 0..n {\n\n // println!(\" i={:?}\", i);\n\n let i_i16 = i as i16;\n\n let mut best_cost = costs.get(&(i_i16 - 1)).unwrap() + 1;\n\n costs.insert(i_i16, best_cost);\n\n chain.insert(i_i16, i_i16 - 1);\n\n // println!(\" best_cost={:?}\", best_cost);\n", "file_path": "src/otvar/iup.rs", "rank": 80, "score": 58539.326388492256 }, { "content": "fn _iup_contour_bound_forced_set(\n\n deltas: &[(i16, i16)],\n\n coords: &[(i16, i16)],\n\n tolerance: f32,\n\n) -> HashSet<i16> {\n\n assert_eq!(deltas.len(), coords.len());\n\n let mut forced = HashSet::new();\n\n let mut nd = deltas[0];\n\n let mut nc = coords[0];\n\n let mut i = (deltas.len() - 1) as i16;\n\n let mut ld = deltas[i as usize];\n\n let mut lc = coords[i as usize];\n\n while i > -1 {\n\n let d = ld;\n\n let c = lc;\n\n // Use Euclidean remainders here to get i=0 case\n\n ld = deltas[((i - 1).rem_euclid(deltas.len() as i16)) as usize];\n\n lc = coords[((i - 1).rem_euclid(coords.len() as i16)) as usize];\n\n for j in 0..2 {\n\n let cj = if j == 0 { c.0 } else { c.1 } as f32;\n", "file_path": "src/otvar/iup.rs", "rank": 81, "score": 57328.6518285928 }, { "content": "fn checksum(x: &[u8]) -> u32 {\n\n let mut sum = Wrapping(0u32);\n\n for slice in x.chunks(4) {\n\n if slice.len() == 4 {\n\n let maybe_array: [u8; 4] = slice.try_into().unwrap();\n\n sum += Wrapping(u32::from_be_bytes(maybe_array));\n\n } else {\n\n let mut final_bit = [0u8; 4];\n\n for (&x, p) in slice.iter().zip(final_bit.iter_mut()) {\n\n *p = x;\n\n }\n\n sum += Wrapping(u32::from_be_bytes(final_bit));\n\n }\n\n }\n\n sum.0\n\n}\n\n\n", "file_path": "src/font.rs", "rank": 82, "score": 55854.07079710036 }, { "content": "struct BoolAttr<'c>(Attr<'c, ()>);\n\n\n\nimpl<'c> BoolAttr<'c> {\n\n fn none(cx: &'c Ctxt, name: Symbol) -> Self {\n\n BoolAttr(Attr::none(cx, name))\n\n }\n\n\n\n fn set_true<A: ToTokens>(&mut self, obj: A) {\n\n self.0.set(obj, ());\n\n }\n\n\n\n fn get(&self) -> bool {\n\n self.0.value.is_some()\n\n }\n\n}\n\n\n\n/// Represents struct or enum attribute information.\n\npub struct Container {\n\n pub is_embedded: bool,\n\n}\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 83, "score": 55549.58858717077 }, { "content": "fn fields_from_ast<'a>(\n\n cx: &Ctxt,\n\n fields: &'a Punctuated<syn::Field, Token![,]>,\n\n attrs: Option<&attr::Variant>,\n\n) -> Vec<Field<'a>> {\n\n fields\n\n .iter()\n\n .enumerate()\n\n .map(|(i, field)| Field {\n\n member: match &field.ident {\n\n Some(ident) => syn::Member::Named(ident.clone()),\n\n None => syn::Member::Unnamed(i.into()),\n\n },\n\n attrs: attr::Field::from_ast(cx, i, field, attrs),\n\n ty: &field.ty,\n\n original: field,\n\n })\n\n .collect()\n\n}\n", "file_path": "crates/otspec_macros/src/internals/ast.rs", "rank": 84, "score": 55230.487059120074 }, { "content": "fn parse_lit_into_expr_path(\n\n cx: &Ctxt,\n\n attr_name: Symbol,\n\n lit: &syn::Lit,\n\n) -> Result<syn::ExprPath, ()> {\n\n let string = get_lit_str(cx, attr_name, lit)?;\n\n parse_lit_str(string).map_err(|_| {\n\n cx.error_spanned_by(lit, format!(\"failed to parse path: {:?}\", string.value()))\n\n })\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 85, "score": 55155.529569530976 }, { "content": "struct ReplaceReceiver<'a>(&'a TypePath);\n\n\n\nimpl ReplaceReceiver<'_> {\n\n fn self_ty(&self, span: Span) -> TypePath {\n\n let tokens = self.0.to_token_stream();\n\n let respanned = respan(tokens, span);\n\n syn::parse2(respanned).unwrap()\n\n }\n\n\n\n fn self_to_qself(&self, qself: &mut Option<QSelf>, path: &mut Path) {\n\n if path.leading_colon.is_some() || path.segments[0].ident != \"Self\" {\n\n return;\n\n }\n\n\n\n if path.segments.len() == 1 {\n\n self.self_to_expr_path(path);\n\n return;\n\n }\n\n\n\n let span = path.segments[0].ident.span();\n", "file_path": "crates/otspec_macros/src/internals/receiver.rs", "rank": 86, "score": 54484.54487124902 }, { "content": "fn is_contiguous_list(l: &[u16]) -> bool {\n\n for ab in l.windows(2) {\n\n if let [a, b] = ab {\n\n if *b != *a + 1 {\n\n return false;\n\n }\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/cmap.rs", "rank": 87, "score": 54439.86041679267 }, { "content": "fn get_lit_str2<'a>(\n\n cx: &Ctxt,\n\n attr_name: Symbol,\n\n meta_item_name: Symbol,\n\n lit: &'a syn::Lit,\n\n) -> Result<&'a syn::LitStr, ()> {\n\n if let syn::Lit::Str(lit) = lit {\n\n Ok(lit)\n\n } else {\n\n cx.error_spanned_by(\n\n lit,\n\n format!(\n\n \"expected serde {} attribute to be a string: `{} = \\\"...\\\"`\",\n\n attr_name, meta_item_name\n\n ),\n\n );\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/internals/attr.rs", "rank": 88, "score": 54105.482292350454 }, { "content": "fn ot_round(value: f32) -> i32 {\n\n (value + 0.5).floor() as i32\n\n}\n\n\n\nimpl Serialize for Fixed {\n\n fn to_bytes(&self, data: &mut Vec<u8>) -> Result<(), SerializationError> {\n\n let packed: i32 = ot_round(self.0 * 65536.0);\n\n packed.to_bytes(data)\n\n }\n\n fn ot_binary_size(&self) -> usize {\n\n 4\n\n }\n\n}\n\nimpl Deserialize for Fixed {\n\n fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError> {\n\n let packed: i32 = c.de()?;\n\n Ok(Fixed(packed as f32 / 65536.0))\n\n }\n\n}\n\n\n", "file_path": "crates/otspec/src/types.rs", "rank": 89, "score": 50797.6636852607 }, { "content": "fn special_type(t: &str) -> Option<String> {\n\n match t {\n\n /* We don't use types from the fixed crate here because fixed-point\n\n arithmetic is an artefact of the storage format of OpenType, and\n\n not something we want to foist on the user. It's more ergonomic\n\n for them to be able to manipulate plain f32s. */\n\n \"Fixed\" => Some(\"f32\".to_string()),\n\n \"F2DOT14\" => Some(\"f32\".to_string()),\n\n /* But we *do* use fixed point here, because we want to be able to\n\n compare fractional version numbers for equality without having to\n\n do epsilon dances. */\n\n \"Version16Dot16\" => Some(\"U16F16\".to_string()),\n\n \"Offset16\" => Some(\"u16\".to_string()),\n\n \"Offset32\" => Some(\"u32\".to_string()),\n\n \"LONGDATETIME\" => Some(\"chrono::NaiveDateTime\".to_string()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/tables.rs", "rank": 90, "score": 48422.74129581331 }, { "content": "fn build_generics(cont: &Container) -> syn::Generics {\n\n let generics = bound::without_defaults(cont.generics);\n\n\n\n bound::with_bound(\n\n cont,\n\n &generics,\n\n needs_serialize_bound,\n\n &parse_quote!(_serde::Serialize),\n\n )\n\n}\n", "file_path": "crates/otspec_macros/src/ser.rs", "rank": 91, "score": 47443.889356713524 }, { "content": "fn support_scalar(loc: &Location, support: &Support) -> f32 {\n\n let mut scalar = 1.0;\n\n for (&axis, &(lower, peak, upper)) in support.iter() {\n\n if peak == 0.0 {\n\n continue;\n\n }\n\n if lower > peak || peak > upper {\n\n continue;\n\n }\n\n if lower < 0.0 && upper > 0.0 {\n\n continue;\n\n }\n\n let v: f32 = *loc.get(&axis).unwrap_or(&0.0);\n\n if (v - peak).abs() < f32::EPSILON {\n\n continue;\n\n }\n\n if v <= lower || upper <= v {\n\n scalar = 0.0;\n\n break;\n\n }\n\n if v < peak {\n\n scalar *= (v - lower) / (peak - lower)\n\n } else {\n\n scalar *= (v - upper) / (peak - upper)\n\n }\n\n }\n\n scalar\n\n}\n\n\n", "file_path": "src/otvar/locations.rs", "rank": 92, "score": 47260.05721106577 }, { "content": "#[cfg(not(nightly))]\n\nfn expect_ident(item: Option<TokenTree>) -> String {\n\n match item {\n\n Some(TokenTree::Ident(i)) => i.to_string(),\n\n None => {\n\n panic!(\"Expected identifier, found end of macro\")\n\n }\n\n Some(i) => {\n\n panic!(\"Syntax error: expected ident, found tokens: '{:?}'\", i);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/tables.rs", "rank": 93, "score": 46527.656400959546 }, { "content": "fn get_encoding(platform_id: u16, encoding_id: u16) -> EncodingRef {\n\n if platform_id == 0 {\n\n return UTF_16BE;\n\n }\n\n if platform_id == 1 {\n\n if encoding_id == 7 {\n\n return MAC_CYRILLIC;\n\n } else {\n\n return MAC_ROMAN; // XXX NO THIS IS WRONG.\n\n }\n\n }\n\n if platform_id == 2 {\n\n match encoding_id {\n\n 0 => return WINDOWS_1252,\n\n 1 => return UTF_16BE,\n\n 2 => return WINDOWS_1252,\n\n _ => unimplemented!(),\n\n };\n\n }\n\n if platform_id == 3 {\n", "file_path": "src/name.rs", "rank": 94, "score": 45364.972316212 }, { "content": "fn has_pragma(item: &Option<&TokenTree>) -> Option<String> {\n\n match item {\n\n Some(TokenTree::Group(i)) => {\n\n if i.delimiter() == Delimiter::Bracket {\n\n return Some(i.to_string());\n\n }\n\n None\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/tables.rs", "rank": 95, "score": 45364.972316212 }, { "content": "#[cfg(not(nightly))]\n\nfn expect_group(item: Option<TokenTree>, delimiter: Delimiter) -> TokenStream {\n\n match item {\n\n Some(TokenTree::Group(i)) => {\n\n if i.delimiter() == delimiter {\n\n i.stream()\n\n } else {\n\n let tokens =\n\n quote::quote_spanned!(i.span().into()=>compile_error!(\"expected bool\"));\n\n tokens.into()\n\n }\n\n }\n\n None => {\n\n let tokens = quote::quote! {\n\n compile_error!(\"Expected delimiter, found end of macro\")\n\n };\n\n tokens.into()\n\n }\n\n Some(i) => {\n\n let tokens =\n\n quote::quote_spanned!(i.span().into()=>compile_error!(\"expected an ident\"));\n\n tokens.into()\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/otspec_macros/src/tables.rs", "rank": 96, "score": 41909.91931770173 }, { "content": "fn in_run(gid: u16, last_gid: u16, code: u32, last_code: u32) -> bool {\n\n (gid == 1 + last_gid) && (code == 1 + last_code)\n\n}\n\nimpl cmap12 {\n\n /// Creates a new cmap12 subtable for a given language ID, from a mapping of\n\n /// Unicode codepoints to glyph IDs\n\n pub fn from_mapping(language_id: uint16, map: &BTreeMap<uint32, uint16>) -> Self {\n\n let mut char_codes: Vec<uint32> = map.keys().cloned().collect();\n\n char_codes.sort_unstable();\n\n let mut iter = char_codes.iter();\n\n let mut start_code: u32 = *(iter.next().unwrap());\n\n if start_code == 0 {\n\n // Try again\n\n start_code = *(iter.next().unwrap());\n\n }\n\n let mut last_code = start_code - 1;\n\n let mut start_gid = map.get(&start_code).unwrap();\n\n let mut last_gid = start_gid - 1;\n\n let mut groups: Vec<SequentialMapGroup> = vec![];\n\n for &code in iter {\n", "file_path": "src/cmap.rs", "rank": 97, "score": 40199.99951519015 }, { "content": " data.put(&self.componentGlyphIDs)\n\n }\n\n}\n\n\n\nimpl Deserialize for Ligature {\n\n fn from_bytes(c: &mut ReaderContext) -> Result<Self, DeserializationError> {\n\n let ligature_glyph: uint16 = c.de()?;\n\n let component_count: uint16 = c.de()?;\n\n let components: Vec<uint16> = c.de_counted(component_count as usize - 1)?;\n\n Ok(Ligature {\n\n ligatureGlyph: ligature_glyph,\n\n componentGlyphIDs: components,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::iter::FromIterator;\n", "file_path": "src/layout/gsub4.rs", "rank": 99, "score": 52.65273470004196 } ]
Rust
src/bytecode/src/loader.rs
ales-tsurko/koto
040c255d2170ac44e6743f6b381c7fb639e492a2
use { crate::{Chunk, Compiler, CompilerError, CompilerSettings}, koto_parser::{format_error_with_excerpt, Parser, ParserError}, std::{collections::HashMap, error, fmt, path::PathBuf, sync::Arc}, }; #[derive(Clone, Debug)] pub enum LoaderErrorType { ParserError(ParserError), CompilerError(CompilerError), IoError(String), } #[derive(Clone, Debug)] pub struct LoaderError { error: LoaderErrorType, source: String, source_path: Option<PathBuf>, } impl LoaderError { pub fn from_parser_error( error: ParserError, source: &str, source_path: Option<PathBuf>, ) -> Self { Self { error: LoaderErrorType::ParserError(error), source: source.into(), source_path, } } pub fn from_compiler_error( error: CompilerError, source: &str, source_path: Option<PathBuf>, ) -> Self { Self { error: LoaderErrorType::CompilerError(error), source: source.into(), source_path, } } pub fn io_error(error: String) -> Self { Self { error: LoaderErrorType::IoError(error), source: "".into(), source_path: None, } } pub fn is_indentation_error(&self) -> bool { match &self.error { LoaderErrorType::ParserError(e) => e.is_indentation_error(), _ => false, } } } impl fmt::Display for LoaderError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use LoaderErrorType::*; if f.alternate() { match &self.error { ParserError(koto_parser::ParserError { error, .. }) => { f.write_str(&error.to_string()) } CompilerError(crate::CompilerError { message, .. }) => f.write_str(message), IoError(e) => f.write_str(&e), } } else { match &self.error { ParserError(koto_parser::ParserError { error, span }) => { f.write_str(&format_error_with_excerpt( Some(&error.to_string()), &self.source_path, &self.source, span.start, span.end, )) } CompilerError(crate::CompilerError { message, span }) => { f.write_str(&format_error_with_excerpt( Some(&message), &self.source_path, &self.source, span.start, span.end, )) } IoError(e) => f.write_str(&e), } } } } impl error::Error for LoaderError {} #[derive(Clone, Default)] pub struct Loader { chunks: HashMap<PathBuf, Arc<Chunk>>, } impl Loader { fn compile( &mut self, script: &str, script_path: Option<PathBuf>, compiler_settings: CompilerSettings, ) -> Result<Arc<Chunk>, LoaderError> { match Parser::parse(&script) { Ok((ast, constants)) => { let (bytes, mut debug_info) = match Compiler::compile(&ast, compiler_settings) { Ok((bytes, debug_info)) => (bytes, debug_info), Err(e) => return Err(LoaderError::from_compiler_error(e, script, script_path)), }; debug_info.source = script.to_string(); Ok(Arc::new(Chunk::new( bytes, constants, script_path, debug_info, ))) } Err(e) => Err(LoaderError::from_parser_error(e, script, script_path)), } } pub fn compile_repl(&mut self, script: &str) -> Result<Arc<Chunk>, LoaderError> { self.compile(script, None, CompilerSettings { repl_mode: true }) } pub fn compile_script( &mut self, script: &str, script_path: &Option<PathBuf>, ) -> Result<Arc<Chunk>, LoaderError> { self.compile(script, script_path.clone(), CompilerSettings::default()) } pub fn compile_module( &mut self, name: &str, load_from_path: Option<PathBuf>, ) -> Result<(Arc<Chunk>, PathBuf), LoaderError> { let path = match &load_from_path { Some(path) => match path.canonicalize() { Ok(canonicalized) if canonicalized.is_file() => match canonicalized.parent() { Some(parent_dir) => parent_dir.to_path_buf(), None => { return Err(LoaderError::io_error( "Failed to get parent of provided path".to_string(), )) } }, Ok(canonicalized) => canonicalized, Err(e) => return Err(LoaderError::io_error(e.to_string())), }, None => match std::env::current_dir() { Ok(path) => path, Err(e) => return Err(LoaderError::io_error(e.to_string())), }, }; let mut load_module_from_path = |module_path: PathBuf| match self.chunks.get(&module_path) { Some(chunk) => Ok((chunk.clone(), module_path.clone())), None => match std::fs::read_to_string(&module_path) { Ok(script) => { let chunk = self.compile( &script, Some(module_path.clone()), CompilerSettings::default(), )?; self.chunks.insert(module_path.clone(), chunk.clone()); Ok((chunk, module_path)) } Err(_) => Err(LoaderError::io_error(format!( "File not found: {}", module_path.to_string_lossy() ))), }, }; let extension = "koto"; let named_path = path.join(name); let module_path = named_path.with_extension(extension); if module_path.exists() { load_module_from_path(module_path) } else { let module_path = named_path.join("main").with_extension(extension); if module_path.exists() { load_module_from_path(module_path) } else { Err(LoaderError::io_error(format!( "Unable to find module '{}'", name ))) } } } }
use { crate::{Chunk, Compiler, CompilerError, CompilerSettings}, koto_parser::{format_error_with_excerpt, Parser, ParserError}, std::{collections::HashMap, error, fmt, path::PathBuf, sync::Arc}, }; #[derive(Clone, Debug)] pub enum LoaderErrorType { ParserError(ParserError), CompilerError(CompilerError), IoError(String), } #[derive(Clone, Debug)] pub struct LoaderError { error: LoaderErrorType, source: String, source_path: Option<PathBuf>, } impl LoaderError { pub fn from_parser_error( error: ParserError, source: &str, source_path: Option<PathBuf>, ) -> Self { Self { error: LoaderErrorType::ParserError(error), source: source.into(), source_path, } }
pub fn io_error(error: String) -> Self { Self { error: LoaderErrorType::IoError(error), source: "".into(), source_path: None, } } pub fn is_indentation_error(&self) -> bool { match &self.error { LoaderErrorType::ParserError(e) => e.is_indentation_error(), _ => false, } } } impl fmt::Display for LoaderError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use LoaderErrorType::*; if f.alternate() { match &self.error { ParserError(koto_parser::ParserError { error, .. }) => { f.write_str(&error.to_string()) } CompilerError(crate::CompilerError { message, .. }) => f.write_str(message), IoError(e) => f.write_str(&e), } } else { match &self.error { ParserError(koto_parser::ParserError { error, span }) => { f.write_str(&format_error_with_excerpt( Some(&error.to_string()), &self.source_path, &self.source, span.start, span.end, )) } CompilerError(crate::CompilerError { message, span }) => { f.write_str(&format_error_with_excerpt( Some(&message), &self.source_path, &self.source, span.start, span.end, )) } IoError(e) => f.write_str(&e), } } } } impl error::Error for LoaderError {} #[derive(Clone, Default)] pub struct Loader { chunks: HashMap<PathBuf, Arc<Chunk>>, } impl Loader { fn compile( &mut self, script: &str, script_path: Option<PathBuf>, compiler_settings: CompilerSettings, ) -> Result<Arc<Chunk>, LoaderError> { match Parser::parse(&script) { Ok((ast, constants)) => { let (bytes, mut debug_info) = match Compiler::compile(&ast, compiler_settings) { Ok((bytes, debug_info)) => (bytes, debug_info), Err(e) => return Err(LoaderError::from_compiler_error(e, script, script_path)), }; debug_info.source = script.to_string(); Ok(Arc::new(Chunk::new( bytes, constants, script_path, debug_info, ))) } Err(e) => Err(LoaderError::from_parser_error(e, script, script_path)), } } pub fn compile_repl(&mut self, script: &str) -> Result<Arc<Chunk>, LoaderError> { self.compile(script, None, CompilerSettings { repl_mode: true }) } pub fn compile_script( &mut self, script: &str, script_path: &Option<PathBuf>, ) -> Result<Arc<Chunk>, LoaderError> { self.compile(script, script_path.clone(), CompilerSettings::default()) } pub fn compile_module( &mut self, name: &str, load_from_path: Option<PathBuf>, ) -> Result<(Arc<Chunk>, PathBuf), LoaderError> { let path = match &load_from_path { Some(path) => match path.canonicalize() { Ok(canonicalized) if canonicalized.is_file() => match canonicalized.parent() { Some(parent_dir) => parent_dir.to_path_buf(), None => { return Err(LoaderError::io_error( "Failed to get parent of provided path".to_string(), )) } }, Ok(canonicalized) => canonicalized, Err(e) => return Err(LoaderError::io_error(e.to_string())), }, None => match std::env::current_dir() { Ok(path) => path, Err(e) => return Err(LoaderError::io_error(e.to_string())), }, }; let mut load_module_from_path = |module_path: PathBuf| match self.chunks.get(&module_path) { Some(chunk) => Ok((chunk.clone(), module_path.clone())), None => match std::fs::read_to_string(&module_path) { Ok(script) => { let chunk = self.compile( &script, Some(module_path.clone()), CompilerSettings::default(), )?; self.chunks.insert(module_path.clone(), chunk.clone()); Ok((chunk, module_path)) } Err(_) => Err(LoaderError::io_error(format!( "File not found: {}", module_path.to_string_lossy() ))), }, }; let extension = "koto"; let named_path = path.join(name); let module_path = named_path.with_extension(extension); if module_path.exists() { load_module_from_path(module_path) } else { let module_path = named_path.join("main").with_extension(extension); if module_path.exists() { load_module_from_path(module_path) } else { Err(LoaderError::io_error(format!( "Unable to find module '{}'", name ))) } } } }
pub fn from_compiler_error( error: CompilerError, source: &str, source_path: Option<PathBuf>, ) -> Self { Self { error: LoaderErrorType::CompilerError(error), source: source.into(), source_path, } }
function_block-full_function
[ { "content": "/// Returns a [String] displaying the annotated instructions contained in the compiled [Chunk]\n\npub fn chunk_to_string_annotated(chunk: Arc<Chunk>, source_lines: &[&str]) -> String {\n\n let mut result = String::new();\n\n let mut reader = InstructionReader::new(chunk);\n\n let mut ip = reader.ip;\n\n let mut span: Option<Span> = None;\n\n let mut first = true;\n\n\n\n while let Some(instruction) = reader.next() {\n\n let instruction_span = reader\n\n .chunk\n\n .debug_info\n\n .get_source_span(ip)\n\n .expect(\"Missing source span\");\n\n\n\n let print_source_lines = if let Some(span) = span {\n\n instruction_span.start.line != span.start.line\n\n } else {\n\n true\n\n };\n\n\n", "file_path": "src/bytecode/src/chunk.rs", "rank": 0, "score": 211834.42140114759 }, { "content": "pub fn format_error_with_excerpt(\n\n message: Option<&str>,\n\n source_path: &Option<PathBuf>,\n\n source: &str,\n\n start_pos: Position,\n\n end_pos: Position,\n\n) -> String {\n\n let (excerpt, padding) = {\n\n let excerpt_lines = source\n\n .lines()\n\n .skip((start_pos.line - 1) as usize)\n\n .take((end_pos.line - start_pos.line + 1) as usize)\n\n .collect::<Vec<_>>();\n\n\n\n let line_numbers = (start_pos.line..=end_pos.line)\n\n .map(|n| n.to_string())\n\n .collect::<Vec<_>>();\n\n\n\n let number_width = line_numbers.iter().max_by_key(|n| n.len()).unwrap().len();\n\n\n", "file_path": "src/parser/src/error.rs", "rank": 1, "score": 203749.21076878233 }, { "content": "pub fn format_string(format_string: &str, format_args: &[Value]) -> Result<String, String> {\n\n let mut arg_iter = format_args.iter();\n\n let mut result = String::with_capacity(format_string.len());\n\n\n\n for token in FormatLexer::new(&format_string) {\n\n match token {\n\n FormatToken::String(s) => result.push_str(s),\n\n FormatToken::Placeholder => match arg_iter.next() {\n\n Some(arg) => result.push_str(&arg.to_string()),\n\n None => return Err(\"Not enough arguments for format string\".to_string()),\n\n },\n\n FormatToken::Positional(n) => match format_args.get(n as usize) {\n\n Some(arg) => result.push_str(&arg.to_string()),\n\n None => return Err(format!(\"Missing argument for index {}\", n)),\n\n },\n\n FormatToken::Identifier(id) => match format_args.first() {\n\n Some(Value::Map(map)) => {\n\n // TODO pass in runtime's string cache\n\n match map.data().get_with_string(id) {\n\n Some(value) => result.push_str(&value.to_string()),\n", "file_path": "src/runtime/src/core/string/format.rs", "rank": 2, "score": 192268.0837416304 }, { "content": "pub fn type_as_string(value: &Value) -> String {\n\n use Value::*;\n\n match &value {\n\n Empty => \"Empty\".to_string(),\n\n Bool(_) => \"Bool\".to_string(),\n\n Number(ValueNumber::F64(_)) => \"Float\".to_string(),\n\n Number(ValueNumber::I64(_)) => \"Int\".to_string(),\n\n Num2(_) => \"Num2\".to_string(),\n\n Num4(_) => \"Num4\".to_string(),\n\n List(_) => \"List\".to_string(),\n\n Range { .. } => \"Range\".to_string(),\n\n IndexRange { .. } => \"IndexRange\".to_string(),\n\n Map(_) => \"Map\".to_string(),\n\n Str(_) => \"String\".to_string(),\n\n Tuple(_) => \"Tuple\".to_string(),\n\n Function { .. } => \"Function\".to_string(),\n\n Generator { .. } => \"Generator\".to_string(),\n\n ExternalFunction(_) => \"ExternalFunction\".to_string(),\n\n ExternalValue(value) => value.read().value_type(),\n\n Iterator(_) => \"Iterator\".to_string(),\n\n TemporaryTuple { .. } => \"TemporaryTuple\".to_string(),\n\n ExternalDataId => \"ExternalDataId\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/runtime/src/value.rs", "rank": 3, "score": 165367.17479667047 }, { "content": "/// Returns a [String] displaying the instructions contained in the compiled [Chunk]\n\npub fn chunk_to_string(chunk: Arc<Chunk>) -> String {\n\n let mut result = String::new();\n\n let mut reader = InstructionReader::new(chunk);\n\n let mut ip = reader.ip;\n\n\n\n while let Some(instruction) = reader.next() {\n\n result += &format!(\"{}\\t{}\\n\", ip, &instruction.to_string());\n\n ip = reader.ip;\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/bytecode/src/chunk.rs", "rank": 4, "score": 157601.99808690776 }, { "content": "pub trait ExternalValue: fmt::Debug + fmt::Display + Send + Sync + Downcast {\n\n fn value_type(&self) -> String;\n\n}\n\n\n\nimpl_downcast!(ExternalValue);\n\n\n\npub struct Args {\n\n pub register: u8,\n\n pub count: u8,\n\n}\n\n\n\n// Once Trait aliases are stabilized this can be simplified a bit,\n\n// see: https://github.com/rust-lang/rust/issues/55628\n\n#[allow(clippy::type_complexity)]\n\npub struct ExternalFunction {\n\n pub function: Arc<dyn Fn(&mut Vm, &Args) -> RuntimeResult + Send + Sync + 'static>,\n\n pub is_instance_function: bool,\n\n}\n\n\n\nimpl ExternalFunction {\n", "file_path": "src/runtime/src/external.rs", "rank": 5, "score": 148362.44444619672 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"chars\", |vm, args| match vm.get_args(args) {\n\n [Str(s)] => Ok(Iterator(ValueIterator::with_string(s.clone()))),\n\n _ => external_error!(\"string.chars: Expected a string as argument\"),\n\n });\n\n\n\n result.add_fn(\"contains\", |vm, args| match vm.get_args(args) {\n\n [Str(s1), Str(s2)] => Ok(Bool(s1.contains(s2.as_str()))),\n\n _ => external_error!(\"string.contains: Expected two strings as arguments\"),\n\n });\n\n\n\n result.add_fn(\"escape\", |vm, args| match vm.get_args(args) {\n\n [Str(s)] => Ok(Str(s.escape_default().to_string().into())),\n\n _ => external_error!(\"string.escape: Expected string as argument\"),\n\n });\n\n\n", "file_path": "src/runtime/src/core/string.rs", "rank": 6, "score": 144259.0890678803 }, { "content": "pub fn make_external_value(value: impl ExternalValue) -> Value {\n\n Value::ExternalValue(Arc::new(RwLock::new(value)))\n\n}\n\n\n", "file_path": "src/runtime/src/value.rs", "rank": 7, "score": 139919.0303573827 }, { "content": "fn version_string() -> String {\n\n format!(\"Koto {}\", env!(\"CARGO_PKG_VERSION\"))\n\n}\n\n\n", "file_path": "src/cli/src/main.rs", "rank": 8, "score": 135788.1573412252 }, { "content": "fn help_string() -> String {\n\n format!(\n\n \"{version}\n\n\n\nUSAGE:\n\n koto [FLAGS] [script] [<args>...]\n\n\n\nFLAGS:\n\n -i, --show_instructions Show compiled instructions annotated with source lines\n\n -b, --show_bytecode Show the script's compiled bytecode\n\n -t, --tests Run the script's tests before running the script\n\n -h, --help Prints help information\n\n -v, --version Prints version information\n\n\n\nARGS:\n\n <script> The koto script to run\n\n <args>... Arguments to pass into the script\n\n\",\n\n version = version_string()\n\n )\n\n}\n\n\n", "file_path": "src/cli/src/main.rs", "rank": 9, "score": 135788.1573412252 }, { "content": "fn version_string() -> String {\n\n format!(\"{} {}\", env!(\"CARGO_PKG_NAME\"), env!(\"CARGO_PKG_VERSION\"))\n\n}\n\n\n", "file_path": "examples/poetry/src/main.rs", "rank": 10, "score": 135788.1573412252 }, { "content": "fn help_string() -> String {\n\n format!(\n\n \"{version}\n\n\n\nGenerate poetry with Koto\n\n\n\nUSAGE:\n\n {name} [FLAGS]\n\n\n\nFLAGS:\n\n -s, --script The script to run\n\n -w, --watch Watch the script file for changes\n\n -h, --help Prints help information\n\n -v, --version Prints version information\n\n\",\n\n name = env!(\"CARGO_PKG_NAME\"),\n\n version = version_string()\n\n )\n\n}\n\n\n", "file_path": "examples/poetry/src/main.rs", "rank": 11, "score": 135788.1573412252 }, { "content": "#[derive(Debug, Default)]\n\nstruct Frame {\n\n // If a frame contains yield then it represents a generator function\n\n contains_yield: bool,\n\n // IDs that have been assigned within the current frame\n\n ids_assigned_in_scope: HashSet<ConstantIndex>,\n\n // IDs and lookup roots which have been accessed without being locally assigned previously\n\n accessed_non_locals: HashSet<ConstantIndex>,\n\n // While an expression is being parsed we keep track of lhs assignments and rhs accesses.\n\n // At the end of the expresson (see `finish_expression`) accessed IDs that aren't locally\n\n // assigned are then counted as non-local accesses.\n\n pending_accesses: HashSet<ConstantIndex>,\n\n pending_assignments: HashSet<ConstantIndex>,\n\n}\n\n\n\nimpl Frame {\n\n fn local_count(&self) -> usize {\n\n self.ids_assigned_in_scope.len()\n\n }\n\n\n\n // Non-locals accessed in a nested frame need to be declared as also accessed in this\n", "file_path": "src/parser/src/parser.rs", "rank": 12, "score": 118833.44196366292 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct CompileResult {\n\n register: u8,\n\n is_temporary: bool,\n\n}\n\n\n\nimpl CompileResult {\n\n fn with_assigned(register: u8) -> Self {\n\n Self {\n\n register,\n\n is_temporary: false,\n\n }\n\n }\n\n\n\n fn with_temporary(register: u8) -> Self {\n\n Self {\n\n register,\n\n is_temporary: true,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 13, "score": 117783.24275730838 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct ExpressionContext {\n\n // e.g.\n\n //\n\n // match x\n\n // foo.bar if x == 0 then...\n\n //\n\n // Without the flag, `if f == 0...` would be parsed as being an argument for a call to foo.bar.\n\n allow_space_separated_call: bool,\n\n // e.g. f = |x|\n\n // x + x\n\n // This function can have an indented body.\n\n //\n\n // foo\n\n // bar,\n\n // baz\n\n // This function call can be broken over lines.\n\n //\n\n // while x < f y\n\n // ...\n\n // Here, `f y` can't be broken over lines as the while expression expects an indented block.\n", "file_path": "src/parser/src/parser.rs", "rank": 14, "score": 116182.29582322741 }, { "content": "fn run_koto_repl_test(inputs_and_expected_outputs: &[(&str, Option<&str>)]) {\n\n let mut process = Command::new(env!(\"CARGO_BIN_EXE_koto\"))\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .expect(\"failed to execute child\");\n\n\n\n let stdin = process.stdin.as_mut().expect(\"failed to get stdin\");\n\n\n\n for (input, _) in inputs_and_expected_outputs.iter() {\n\n stdin\n\n .write_all(input.as_bytes())\n\n .expect(\"Failed to write to stdin\");\n\n stdin.write_all(b\"\\n\").expect(\"Failed to write to stdin\");\n\n }\n\n\n\n let output = process.wait_with_output().expect(\"Failed to get output\");\n\n let stdout = String::from_utf8(output.stdout).expect(\"Failed to get output\");\n\n let mut output_lines = stdout.lines().skip_while(|line| line != &\"» \");\n\n\n", "file_path": "src/cli/tests/repl_tests.rs", "rank": 15, "score": 114611.91316352131 }, { "content": "enum ConstantIndexOrWildcard {\n\n Index(ConstantIndex),\n\n Wildcard,\n\n}\n\n\n", "file_path": "src/parser/src/parser.rs", "rank": 16, "score": 113917.22133507252 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"temp_path\", {\n\n |_, _| match tempfile::NamedTempFile::new() {\n\n Ok(file) => match file.keep() {\n\n Ok((_temp_file, path)) => Ok(Str(path.to_string_lossy().as_ref().into())),\n\n Err(e) => external_error!(\"io.temp_file: Error while making temp path: {}\", e),\n\n },\n\n Err(e) => external_error!(\"io.temp_file: Error while making temp path: {}\", e),\n\n }\n\n });\n\n\n\n result.add_fn(\"temp_file\", {\n\n move |_, _| {\n\n let (temp_file, path) = match tempfile::NamedTempFile::new() {\n\n Ok(file) => match file.keep() {\n\n Ok((temp_file, path)) => (temp_file, path),\n", "file_path": "libs/tempfile/src/lib.rs", "rank": 17, "score": 112612.4444699559 }, { "content": "pub fn visit_external_value<T>(\n\n map: &ValueMap,\n\n mut f: impl FnMut(&mut T) -> RuntimeResult,\n\n) -> RuntimeResult\n\nwhere\n\n T: ExternalValue,\n\n{\n\n match map.data().get(&Value::ExternalDataId) {\n\n Some(Value::ExternalValue(maybe_external)) => {\n\n let mut value = maybe_external.as_ref().write();\n\n match value.downcast_mut::<T>() {\n\n Some(external) => f(external),\n\n None => external_error!(\n\n \"Invalid type for external value, found '{}'\",\n\n value.value_type(),\n\n ),\n\n }\n\n }\n\n _ => external_error!(\"External value not found\"),\n\n }\n\n}\n\n\n", "file_path": "src/runtime/src/external.rs", "rank": 18, "score": 112612.4444699559 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"from_string\", |vm, args| match vm.get_args(args) {\n\n [Str(s)] => match toml::from_str(s) {\n\n Ok(toml) => match toml_to_koto_value(&toml) {\n\n Ok(result) => Ok(result),\n\n Err(e) => external_error!(\"toml.from_string: Error while parsing input: {}\", e),\n\n },\n\n Err(e) => external_error!(\n\n \"toml.from_string: Error while parsing input: {}\",\n\n e.to_string()\n\n ),\n\n },\n\n _ => external_error!(\"toml.from_string expects a string as argument\"),\n\n });\n\n\n\n result.add_fn(\"to_string\", |vm, args| match vm.get_args(args) {\n\n [value] => match toml::to_string_pretty(&SerializableValue(value)) {\n\n Ok(result) => Ok(Str(result.into())),\n\n Err(e) => external_error!(\"toml.to_string: {}\", e),\n\n },\n\n _ => external_error!(\"toml.to_string expects a single argument\"),\n\n });\n\n\n\n result\n\n}\n", "file_path": "libs/toml/src/lib.rs", "rank": 19, "score": 112612.4444699559 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"from_string\", |vm, args| match vm.get_args(args) {\n\n [Str(s)] => match serde_json::from_str(&s) {\n\n Ok(value) => match json_value_to_koto_value(&value) {\n\n Ok(result) => Ok(result),\n\n Err(e) => external_error!(\"json.from_string: Error while parsing input: {}\", e),\n\n },\n\n Err(e) => external_error!(\n\n \"json.from_string: Error while parsing input: {}\",\n\n e.to_string()\n\n ),\n\n },\n\n _ => external_error!(\"json.from_string expects a string as argument\"),\n\n });\n\n\n\n result.add_fn(\"to_string\", |vm, args| match vm.get_args(args) {\n\n [value] => match serde_json::to_string_pretty(&SerializableValue(value)) {\n\n Ok(result) => Ok(Str(result.into())),\n\n Err(e) => external_error!(\"json.to_string: {}\", e),\n\n },\n\n _ => external_error!(\"json.to_string expects a single argument\"),\n\n });\n\n\n\n result\n\n}\n", "file_path": "libs/json/src/lib.rs", "rank": 20, "score": 112612.4444699559 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n // The random module contains a default generator\n\n let mut result = ChaChaRng::make_value_map(ChaCha20Rng::from_entropy());\n\n\n\n // random.generator is available to create custom generators\n\n result.add_fn(\"generator\", |vm, args| match vm.get_args(args) {\n\n [] => Ok(Map(ChaChaRng::make_value_map(ChaCha20Rng::from_entropy()))),\n\n [Number(n)] => Ok(Map(ChaChaRng::make_value_map(ChaCha20Rng::seed_from_u64(\n\n n.to_bits(),\n\n )))),\n\n _ => external_error!(\"random.generator - expected no arguments, or seed number\"),\n\n });\n\n\n\n result\n\n}\n\n\n", "file_path": "libs/random/src/lib.rs", "rank": 21, "score": 112612.4444699559 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::{Bool, Map, Str};\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"exists\", |vm, args| match vm.get_args(args) {\n\n [Str(path)] => Ok(Bool(Path::new(path.as_str()).exists())),\n\n _ => external_error!(\"io.exists: Expected path string as argument\"),\n\n });\n\n\n\n result.add_fn(\"read_to_string\", |vm, args| match vm.get_args(args) {\n\n [Str(path)] => match fs::read_to_string(Path::new(path.as_str())) {\n\n Ok(result) => Ok(Str(result.into())),\n\n Err(e) => external_error!(\"io.read_to_string: Unable to read file '{}': {}\", path, e),\n\n },\n\n _ => external_error!(\"io.read_to_string: Expected path string as argument\"),\n\n });\n\n\n\n result.add_fn(\"open\", {\n\n move |vm, args| match vm.get_args(args) {\n", "file_path": "src/runtime/src/core/io.rs", "rank": 22, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"clear\", |vm, args| match vm.get_args(args) {\n\n [Map(m)] => {\n\n m.data_mut().clear();\n\n Ok(Empty)\n\n }\n\n _ => external_error!(\"map.clear: Expected map as argument\"),\n\n });\n\n\n\n result.add_fn(\"contains_key\", |vm, args| match vm.get_args(args) {\n\n [Map(m), key] => Ok(Bool(m.data().contains_key(key))),\n\n [other_a, other_b, ..] => external_error!(\n\n \"map.contains_key: Expected map and key as arguments, found '{}' and '{}'\",\n\n type_as_string(other_a),\n\n type_as_string(other_b)\n\n ),\n", "file_path": "src/runtime/src/core/map.rs", "rank": 23, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"contains\", |vm, args| match vm.get_args(args) {\n\n [Range(r), Number(n)] => Ok(Bool(*n >= r.start && n.ceil() < r.end)),\n\n _ => external_error!(\"range.contains: Expected range and number as arguments\"),\n\n });\n\n\n\n result.add_fn(\"end\", |vm, args| match vm.get_args(args) {\n\n [Range(r)] => Ok(Number(r.end.into())),\n\n _ => external_error!(\"range.end: Expected range as argument\"),\n\n });\n\n\n\n result.add_fn(\"expanded\", |vm, args| match vm.get_args(args) {\n\n [Range(r), Number(n)] => {\n\n let n = isize::from(n);\n\n if r.is_ascending() {\n\n Ok(Range(IntRange {\n", "file_path": "src/runtime/src/core/range.rs", "rank": 24, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"clear\", |vm, args| match vm.get_args(args) {\n\n [List(l)] => {\n\n l.data_mut().clear();\n\n Ok(Empty)\n\n }\n\n _ => external_error!(\"list.clear: Expected list as argument\"),\n\n });\n\n\n\n result.add_fn(\"contains\", |vm, args| match vm.get_args(args) {\n\n [List(l), value] => Ok(Bool(l.data().contains(value))),\n\n _ => external_error!(\"list.contains: Expected list and value as arguments\"),\n\n });\n\n\n\n result.add_fn(\"copy\", |vm, args| match vm.get_args(args) {\n\n [List(l)] => Ok(List(ValueList::with_data(l.data().clone()))),\n", "file_path": "src/runtime/src/core/list.rs", "rank": 25, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n macro_rules! number_fn {\n\n ($fn:ident) => {\n\n number_fn!(stringify!($fn), $fn)\n\n };\n\n ($name:expr, $fn:ident) => {\n\n result.add_fn($name, |vm, args| match vm.get_args(args) {\n\n [Number(n)] => Ok(Number(n.$fn())),\n\n [other] => external_error!(\n\n \"number.{}: Expected Number as argument, found '{}'\",\n\n $name,\n\n type_as_string(other)\n\n ),\n\n _ => external_error!(\"number.{} expects a Number as argument\", $name),\n\n });\n\n };\n", "file_path": "src/runtime/src/core/number.rs", "rank": 26, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::{Map, Str};\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"new\", {\n\n |vm, args| match vm.get_args(args) {\n\n [Str(text)] => {\n\n let mut poetry = Poetry::default();\n\n poetry.add_links(text);\n\n Ok(Map(KotoPoetry::make_value_map(poetry)))\n\n }\n\n [unexpected] => external_error!(\n\n \"poetry.new: Expected a String as argument, found '{}'\",\n\n type_as_string(&unexpected),\n\n ),\n\n _ => external_error!(\"poetry.new: Expected a String as argument\"),\n\n }\n\n });\n\n\n", "file_path": "examples/poetry/src/koto_bindings.rs", "rank": 27, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"contains\", |vm, args| match vm.get_args(args) {\n\n [Tuple(t), value] => Ok(Bool(t.data().contains(value))),\n\n _ => external_error!(\"tuple.contains: Expected tuple and value as arguments\"),\n\n });\n\n\n\n result.add_fn(\"deep_copy\", |vm, args| match vm.get_args(args) {\n\n [value @ Tuple(_)] => Ok(deep_copy_value(value)),\n\n _ => external_error!(\"tuple.deep_copy: Expected tuple as argument\"),\n\n });\n\n\n\n result.add_fn(\"first\", |vm, args| match vm.get_args(args) {\n\n [Tuple(t)] => match t.data().first() {\n\n Some(value) => Ok(value.clone()),\n\n None => Ok(Value::Empty),\n\n },\n", "file_path": "src/runtime/src/core/tuple.rs", "rank": 28, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"all\", |vm, args| match vm.get_args(args) {\n\n [iterable, f] if value_is_iterable(iterable) && value_is_callable(f) => {\n\n let f = f.clone();\n\n let iter = make_iterator(iterable).unwrap().map(collect_pair);\n\n let mut vm = vm.spawn_shared_vm();\n\n\n\n for iter_output in iter {\n\n match iter_output {\n\n Ok(Output::Value(value)) => match vm.run_function(f.clone(), &[value]) {\n\n Ok(Bool(result)) => {\n\n if !result {\n\n return Ok(Bool(false));\n\n }\n\n }\n\n Ok(unexpected) => {\n", "file_path": "src/runtime/src/core/iterator.rs", "rank": 29, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::Number;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"cpu_count\", |_vm, _args| Ok(Number(num_cpus::get().into())));\n\n\n\n result.add_fn(\"physical_cpu_count\", |_vm, _args| {\n\n Ok(Number(num_cpus::get_physical().into()))\n\n });\n\n\n\n result\n\n}\n", "file_path": "src/runtime/src/core/os.rs", "rank": 30, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_value(\"args\", List(ValueList::default()));\n\n\n\n result.add_fn(\"current_dir\", |_, _| {\n\n let result = match std::env::current_dir() {\n\n Ok(path) => Str(path.to_string_lossy().to_string().into()),\n\n Err(_) => Empty,\n\n };\n\n Ok(result)\n\n });\n\n\n\n result.add_value(\"script_dir\", Str(\"\".into()));\n\n result.add_value(\"script_path\", Str(\"\".into()));\n\n\n\n result.add_fn(\"type\", |vm, args| match vm.get_args(args) {\n\n [value] => Ok(Str(type_as_string(value).into())),\n\n _ => external_error!(\"koto.type: Expected single argument\"),\n\n });\n\n\n\n result\n\n}\n", "file_path": "src/runtime/src/core/koto.rs", "rank": 31, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"sum\", |vm, args| match vm.get_args(args) {\n\n [Num4(n)] => Ok(Number(\n\n (n[0] as f64 + n[1] as f64 + n[2] as f64 + n[3] as f64).into(),\n\n )),\n\n [unexpected] => external_error!(\n\n \"num4.sum: Expected Num4, found '{}'\",\n\n type_as_string(unexpected)\n\n ),\n\n _ => external_error!(\"num4.sum: Expected a Num4 as argument\"),\n\n });\n\n\n\n result\n\n}\n", "file_path": "src/runtime/src/core/num4.rs", "rank": 32, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"assert\", |vm, args| {\n\n for value in vm.get_args(args).iter() {\n\n match value {\n\n Bool(b) => {\n\n if !b {\n\n return external_error!(\"Assertion failed\");\n\n }\n\n }\n\n unexpected => {\n\n return external_error!(\n\n \"assert expects booleans as arguments, found '{}'\",\n\n type_as_string(unexpected),\n\n )\n\n }\n\n }\n", "file_path": "src/runtime/src/core/test.rs", "rank": 33, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::*;\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"sum\", |vm, args| match vm.get_args(args) {\n\n [Num2(n)] => Ok(Number((n[0] + n[1]).into())),\n\n [unexpected] => external_error!(\n\n \"num2.sum: Expected Num2, found '{}'\",\n\n type_as_string(unexpected)\n\n ),\n\n _ => external_error!(\"num2.sum: Expected a Num2 as argument\"),\n\n });\n\n\n\n result\n\n}\n", "file_path": "src/runtime/src/core/num2.rs", "rank": 34, "score": 110377.19202620181 }, { "content": "pub fn make_module() -> ValueMap {\n\n use Value::{Empty, Number};\n\n\n\n let mut result = ValueMap::new();\n\n\n\n result.add_fn(\"create\", |vm, args| match vm.get_args(args) {\n\n [f] if value_is_callable(f) => {\n\n let f = f.clone();\n\n let join_handle = thread::spawn({\n\n let mut thread_vm = vm.spawn_shared_concurrent_vm();\n\n move || match thread_vm.run_function(f, &[]) {\n\n Ok(result) => Ok(result),\n\n Err(e) => Err(e.with_prefix(\"thread.create\")),\n\n }\n\n });\n\n\n\n Ok(Thread::make_thread_map(join_handle))\n\n }\n\n [unexpected] => external_error!(\n\n \"thread.create: Expected callable value as argument, found '{}'\",\n", "file_path": "src/runtime/src/core/thread.rs", "rank": 35, "score": 110377.19202620181 }, { "content": "enum Arg {\n\n Local(ConstantIndex),\n\n Unpacked(ConstantIndex),\n\n Placeholder,\n\n}\n\n\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 36, "score": 109632.25860814494 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Loop {\n\n start_ip: usize,\n\n jump_placeholders: Vec<usize>,\n\n}\n\n\n\nimpl Loop {\n\n fn new(start_ip: usize) -> Self {\n\n Self {\n\n start_ip,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 37, "score": 109401.96559199554 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Frame {\n\n loop_stack: Vec<Loop>,\n\n register_stack: Vec<u8>,\n\n local_registers: Vec<LocalRegister>,\n\n temporary_base: u8,\n\n temporary_count: u8,\n\n last_op: Option<Op>, // used to decide if an additional return instruction is needed\n\n}\n\n\n\nimpl Frame {\n\n fn new(local_count: u8, args: &[Arg], captures: &[ConstantIndex]) -> Self {\n\n let temporary_base = local_count\n\n + captures.len() as u8\n\n + args\n\n .iter()\n\n .filter(|arg| matches!(arg, Arg::Placeholder))\n\n .count() as u8;\n\n\n\n // First, assign registers to the 'top-level' args, including placeholder registers\n\n let mut local_registers = Vec::with_capacity(args.len() + captures.len());\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 38, "score": 109401.96559199554 }, { "content": "fn load_and_run_script(script_path: &str) {\n\n let mut path = PathBuf::new();\n\n path.push(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.push(\"../../koto/tests\");\n\n path.push(script_path);\n\n if !path.exists() {\n\n panic!(format!(\"Path doesn't exist: {:?}\", path));\n\n }\n\n let script =\n\n read_to_string(&path).unwrap_or_else(|_| panic!(\"Unable to load path '{:?}'\", &path));\n\n\n\n run_script(&script, Some(path), false);\n\n}\n\n\n\nmacro_rules! koto_test {\n\n ($name:ident) => {\n\n #[test]\n\n fn $name() {\n\n load_and_run_script(&format!(\"{}.koto\", stringify!($name)));\n\n }\n", "file_path": "src/koto/tests/koto_tests.rs", "rank": 39, "score": 108855.50335160094 }, { "content": "pub fn make_file_map() -> ValueMap {\n\n use Value::{Number, Str};\n\n\n\n fn file_fn(\n\n fn_name: &str,\n\n args: &[Value],\n\n mut file_op: impl FnMut(&mut File) -> RuntimeResult,\n\n ) -> RuntimeResult {\n\n get_external_instance!(args, \"File\", fn_name, File, file_ref, { file_op(file_ref) })\n\n }\n\n\n\n let mut file_map = ValueMap::new();\n\n\n\n file_map.add_instance_fn(\"path\", |vm, args| {\n\n file_fn(\"path\", vm.get_args(args), |file_handle| {\n\n Ok(Str(file_handle.path.to_string_lossy().as_ref().into()))\n\n })\n\n });\n\n\n\n file_map.add_instance_fn(\"write\", |vm, args| {\n", "file_path": "src/runtime/src/core/io.rs", "rank": 40, "score": 108280.45077012078 }, { "content": "fn compile_and_run(koto: &mut Koto, script_path: &Path) -> Result<(), Box<dyn Error>> {\n\n let script = fs::read_to_string(script_path)?;\n\n match koto.compile(&script) {\n\n Ok(_) => match koto.run() {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(PoetryError {\n\n prefix: \"Error while running script\".into(),\n\n error: e.into(),\n\n }\n\n .into()),\n\n },\n\n Err(e) => Err(PoetryError {\n\n prefix: \"Error while compiling script\".into(),\n\n error: e.into(),\n\n }\n\n .into()),\n\n }\n\n}\n", "file_path": "examples/poetry/src/main.rs", "rank": 41, "score": 107885.48143054507 }, { "content": "#[derive(Debug)]\n\nstruct PoetryError {\n\n prefix: String,\n\n error: Box<dyn Error>,\n\n}\n\n\n\nimpl fmt::Display for PoetryError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}: {}\", self.prefix, self.error)\n\n }\n\n}\n\n\n\nimpl Error for PoetryError {}\n\n\n", "file_path": "examples/poetry/src/main.rs", "rank": 42, "score": 107075.74027850872 }, { "content": "fn load_and_run_script(script_path: &str) {\n\n let mut path = PathBuf::new();\n\n path.push(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.push(\"../../koto/tests/libs\");\n\n path.push(script_path);\n\n if !path.exists() {\n\n panic!(\"Path doesn't exist: {:?}\", path);\n\n }\n\n let script =\n\n read_to_string(&path).unwrap_or_else(|_| panic!(\"Unable to load path '{:?}'\", &path));\n\n\n\n run_script(&script, Some(path), false);\n\n}\n\n\n\nmacro_rules! lib_test {\n\n ($name:ident) => {\n\n #[test]\n\n fn $name() {\n\n load_and_run_script(&format!(\"{}.koto\", stringify!($name)));\n\n }\n", "file_path": "libs/lib_tests/tests/lib_tests.rs", "rank": 43, "score": 106881.13803390523 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum ResultRegister {\n\n // No result needed\n\n None,\n\n // The result can be any temporary register, or an assigned register\n\n Any,\n\n // The result must be placed in the specified register\n\n Fixed(u8),\n\n}\n\n\n\n// While compiling a node, ResultRegister::Any might cause a temporary register to be allocated,\n\n// so the result register should be determined before other temporary registers are allocated.\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 44, "score": 106695.323753176 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum LocalRegister {\n\n // The register is assigned to a specific id.\n\n Assigned(ConstantIndex),\n\n // The register is currently being assigned to,\n\n // it will become assigned at the end of the assignment expression.\n\n // Instructions can be deferred until the register is committed,\n\n // e.g. for functions that need to capture themselves after they've been fully assigned\n\n Reserved(ConstantIndex, Vec<u8>),\n\n // The register contains a value not associated with an id, e.g. a wildcard function arg\n\n Allocated,\n\n}\n\n\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 45, "score": 106695.25307156882 }, { "content": "fn parse_arguments() -> Result<KotoArgs, String> {\n\n let mut args = pico_args::Arguments::from_env();\n\n\n\n let help = args.contains([\"-h\", \"--help\"]);\n\n let version = args.contains([\"-v\", \"--version\"]);\n\n let run_tests = args.contains([\"-t\", \"--tests\"]);\n\n let show_bytecode = args.contains([\"-b\", \"--show_bytecode\"]);\n\n let show_annotated = args.contains([\"-i\", \"--show_instructions\"]);\n\n\n\n let script = args\n\n .subcommand()\n\n .map_err(|e| format!(\"Error while parsing arguments: {}\", e))?;\n\n\n\n let script_args = match args.free() {\n\n Ok(extra_args) => extra_args,\n\n Err(e) => {\n\n return Err(match e {\n\n pico_args::Error::UnusedArgsLeft(unused) => {\n\n format!(\"Unsupported argument: {}\", unused.first().unwrap())\n\n }\n", "file_path": "src/cli/src/main.rs", "rank": 46, "score": 105971.49502972707 }, { "content": "fn parse_arguments() -> Result<PoetryArgs, String> {\n\n let mut args = pico_args::Arguments::from_env();\n\n\n\n let help = args.contains([\"-h\", \"--help\"]);\n\n let version = args.contains([\"-v\", \"--version\"]);\n\n let watch = args.contains([\"-w\", \"--watch\"]);\n\n let script = args\n\n .value_from_str([\"-s\", \"--script\"])\n\n .map_err(|_| \"Missing script argument\".to_string())?;\n\n\n\n Ok(PoetryArgs {\n\n help,\n\n version,\n\n script,\n\n watch,\n\n })\n\n}\n\n\n", "file_path": "examples/poetry/src/main.rs", "rank": 47, "score": 105971.49502972707 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let args = match parse_arguments() {\n\n Ok(args) => {\n\n if args.help {\n\n println!(\"{}\", help_string());\n\n return Ok(());\n\n }\n\n if args.version {\n\n println!(\"{}\", version_string());\n\n return Ok(());\n\n }\n\n args\n\n }\n\n Err(error) => {\n\n println!(\"{}\\n\\n{}\", help_string(), error);\n\n return Err(\"Failed to parse arguments\".to_string().into());\n\n }\n\n };\n\n\n\n let mut koto = Koto::with_settings(KotoSettings {\n", "file_path": "examples/poetry/src/main.rs", "rank": 48, "score": 104527.94896176411 }, { "content": "pub fn is_id_continue(c: char) -> bool {\n\n UnicodeXID::is_xid_continue(c)\n\n}\n\n\n", "file_path": "src/lexer/src/lexer.rs", "rank": 49, "score": 104071.64133948748 }, { "content": "pub fn is_id_start(c: char) -> bool {\n\n UnicodeXID::is_xid_start(c)\n\n}\n\n\n", "file_path": "src/lexer/src/lexer.rs", "rank": 50, "score": 104071.64133948748 }, { "content": "#[derive(Default)]\n\nstruct MatchJumpPlaceholders {\n\n // Jumps to the end of the arm\n\n arm_end: Vec<usize>,\n\n // Jumps to the end of the arm's match patterns,\n\n // used after a successful match to skip over remaining alternatives\n\n match_end: Vec<usize>,\n\n // Jumps to the end of the current arm alternative,\n\n // e.g.\n\n // match x\n\n // 0 or 1 or 2 then y\n\n // ^~~~ a match failure here should attempt matching on the next alternative\n\n alternative_end: Vec<usize>,\n\n}\n\n\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 51, "score": 103724.23846024694 }, { "content": "#[derive(Clone, Debug, Hash, PartialEq)]\n\nenum ConstantInfo {\n\n F64(usize),\n\n I64(usize),\n\n Str(Range<usize>),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Constant<'a> {\n\n F64(f64),\n\n I64(i64),\n\n Str(&'a str),\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ConstantPool {\n\n index: Vec<ConstantInfo>,\n\n // Constant strings concatanated into one\n\n strings: String,\n\n floats: Vec<f64>,\n\n ints: Vec<i64>,\n", "file_path": "src/parser/src/constant_pool.rs", "rank": 52, "score": 102769.31270048852 }, { "content": "pub fn value_is_iterable(value: &Value) -> bool {\n\n use Value::*;\n\n matches!(\n\n value,\n\n Range(_) | List(_) | Tuple(_) | Map(_) | Str(_) | Iterator(_)\n\n )\n\n}\n\n\n", "file_path": "src/runtime/src/value.rs", "rank": 53, "score": 101836.3888957334 }, { "content": "pub fn value_size(value: &Value) -> usize {\n\n use Value::*;\n\n\n\n match value {\n\n List(l) => l.len(),\n\n Str(s) => s.len(),\n\n Tuple(t) => t.data().len(),\n\n TemporaryTuple(RegisterSlice { count, .. }) => *count as usize,\n\n Map(m) => m.len(),\n\n Num2(_) => 2,\n\n Num4(_) => 4,\n\n Range(IntRange { start, end }) => (end - start) as usize,\n\n _ => 1,\n\n }\n\n}\n\n\n", "file_path": "src/runtime/src/value.rs", "rank": 54, "score": 101836.3888957334 }, { "content": "pub fn value_is_callable(value: &Value) -> bool {\n\n use Value::*;\n\n matches!(value, Function { .. } | ExternalFunction(_))\n\n}\n\n\n", "file_path": "src/runtime/src/value.rs", "rank": 55, "score": 101836.3888957334 }, { "content": "pub fn value_is_immutable(value: &Value) -> bool {\n\n use Value::*;\n\n matches!(\n\n value,\n\n Empty | ExternalDataId | Bool(_) | Number(_) | Num2(_) | Num4(_) | Range(_) | Str(_)\n\n )\n\n}\n\n\n", "file_path": "src/runtime/src/value.rs", "rank": 56, "score": 101836.3888957334 }, { "content": "pub fn koto_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"fib\", |b| {\n\n let mut runner = BenchmarkRunner::new(\"fib_recursive.koto\", &[]);\n\n b.iter(|| {\n\n runner.run();\n\n })\n\n });\n\n c.bench_function(\"num4\", |b| {\n\n let mut runner = BenchmarkRunner::new(\"num4.koto\", &[]);\n\n b.iter(|| {\n\n runner.run();\n\n })\n\n });\n\n c.bench_function(\"enumerate\", |b| {\n\n let mut runner = BenchmarkRunner::new(\"enumerate.koto\", &[]);\n\n b.iter(|| {\n\n runner.run();\n\n })\n\n });\n\n c.bench_function(\"string_formatting\", |b| {\n", "file_path": "src/koto/benches/koto_benchmark.rs", "rank": 57, "score": 101836.3888957334 }, { "content": "type CompileNodeResult = Result<Option<CompileResult>, CompilerError>;\n\n\n\n/// The settings used by the [Compiler]\n\n#[derive(Default)]\n\npub struct CompilerSettings {\n\n /// Causes all top level identifiers to be exported to global\n\n pub repl_mode: bool,\n\n}\n\n\n\n/// The compiler used by the Koto language\n\n#[derive(Default)]\n\npub struct Compiler {\n\n bytes: Vec<u8>,\n\n debug_info: DebugInfo,\n\n frame_stack: Vec<Frame>,\n\n span_stack: Vec<Span>,\n\n settings: CompilerSettings,\n\n}\n\n\n\nimpl Compiler {\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 58, "score": 101680.389060581 }, { "content": "pub fn deep_copy_value(value: &Value) -> Value {\n\n use Value::{List, Map, Tuple};\n\n\n\n match value {\n\n List(l) => {\n\n let result = l\n\n .data()\n\n .iter()\n\n .map(|v| deep_copy_value(v))\n\n .collect::<ValueVec>();\n\n List(ValueList::with_data(result))\n\n }\n\n Tuple(t) => {\n\n let result = t\n\n .data()\n\n .iter()\n\n .map(|v| deep_copy_value(v))\n\n .collect::<Vec<_>>();\n\n Tuple(result.into())\n\n }\n", "file_path": "src/runtime/src/value.rs", "rank": 59, "score": 99739.64763965236 }, { "content": "struct MatchArmParameters<'a> {\n\n match_register: u8,\n\n is_last_alternative: bool,\n\n has_last_pattern: bool,\n\n jumps: &'a mut MatchJumpPlaceholders,\n\n}\n", "file_path": "src/bytecode/src/compiler.rs", "rank": 60, "score": 99552.93138039127 }, { "content": "fn consume_and_count(chars: &mut Peekable<Chars>, predicate: impl Fn(char) -> bool) -> usize {\n\n let mut char_bytes = 0;\n\n\n\n while let Some(c) = chars.peek() {\n\n if !predicate(*c) {\n\n break;\n\n }\n\n char_bytes += 1;\n\n chars.next();\n\n }\n\n\n\n char_bytes\n\n}\n\n\n", "file_path": "src/lexer/src/lexer.rs", "rank": 61, "score": 97885.78684443758 }, { "content": "fn toml_to_koto_value(value: &Toml) -> Result<Value, String> {\n\n let result = match value {\n\n Toml::Boolean(b) => Value::Bool(*b),\n\n Toml::Integer(i) => Value::Number(i.into()),\n\n Toml::Float(f) => Value::Number(f.into()),\n\n Toml::String(s) => Value::Str(s.as_str().into()),\n\n Toml::Array(a) => {\n\n match a\n\n .iter()\n\n .map(|entry| toml_to_koto_value(entry))\n\n .collect::<Result<ValueVec, String>>()\n\n {\n\n Ok(result) => Value::List(ValueList::with_data(result)),\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n Toml::Table(o) => {\n\n let mut map = ValueMap::with_capacity(o.len());\n\n for (key, value) in o.iter() {\n\n map.add_value(key, toml_to_koto_value(value)?);\n\n }\n\n Value::Map(map)\n\n }\n\n Toml::Datetime(dt) => Value::Str(dt.to_string().into()),\n\n };\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "libs/toml/src/lib.rs", "rank": 62, "score": 94939.79709878718 }, { "content": "pub fn is_external_instance<T>(map: &ValueMap) -> bool\n\nwhere\n\n T: ExternalValue,\n\n{\n\n match map.data().get(&Value::ExternalDataId) {\n\n Some(Value::ExternalValue(maybe_external)) => maybe_external.as_ref().read().is::<T>(),\n\n _ => false,\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! get_external_instance {\n\n ($args: ident,\n\n $external_name: expr,\n\n $fn_name: expr,\n\n $external_type: ident,\n\n $match_name: ident,\n\n $body: block) => {{\n\n match &$args {\n\n [Value::Map(instance), ..] => {\n", "file_path": "src/runtime/src/external.rs", "rank": 63, "score": 94663.18327025612 }, { "content": "pub fn make_iterator(value: &Value) -> Result<ValueIterator, ()> {\n\n use Value::*;\n\n let result = match value {\n\n Range(r) => ValueIterator::with_range(*r),\n\n List(l) => ValueIterator::with_list(l.clone()),\n\n Tuple(t) => ValueIterator::with_tuple(t.clone()),\n\n Map(m) => ValueIterator::with_map(m.clone()),\n\n Str(s) => ValueIterator::with_string(s.clone()),\n\n Iterator(i) => i.clone(),\n\n _ => return Err(()),\n\n };\n\n Ok(result)\n\n}\n", "file_path": "src/runtime/src/value_iterator.rs", "rank": 64, "score": 92807.45860266578 }, { "content": "fn operator_precedence(op: Token) -> Option<(u8, u8)> {\n\n use Token::*;\n\n let priority = match op {\n\n Or => (1, 2),\n\n And => (3, 4),\n\n // Chained comparisons require right-associativity\n\n Equal | NotEqual => (8, 7),\n\n Greater | GreaterOrEqual | Less | LessOrEqual => (10, 9),\n\n Add | Subtract => (11, 12),\n\n Multiply | Divide | Modulo => (13, 14),\n\n _ => return None,\n\n };\n\n Some(priority)\n\n}\n", "file_path": "src/parser/src/parser.rs", "rank": 65, "score": 91030.07952488963 }, { "content": "pub fn multiply_values(value_a: &Value, value_b: &Value) -> Option<Value> {\n\n use Value::*;\n\n\n\n let result = match (value_a, value_b) {\n\n (Number(a), Number(b)) => Number(a * b),\n\n (Number(a), Num2(b)) => Num2(a * b),\n\n (Num2(a), Num2(b)) => Num2(a * b),\n\n (Num2(a), Number(b)) => Num2(a * b),\n\n (Number(a), Num4(b)) => Num4(a * b),\n\n (Num4(a), Num4(b)) => Num4(a * b),\n\n (Num4(a), Number(b)) => Num4(a * b),\n\n _ => {\n\n return None;\n\n }\n\n };\n\n\n\n Some(result)\n\n}\n", "file_path": "src/runtime/src/value.rs", "rank": 66, "score": 88029.38692128484 }, { "content": "pub fn add_values(value_a: &Value, value_b: &Value) -> Option<Value> {\n\n use Value::*;\n\n\n\n let result = match (value_a, value_b) {\n\n (Number(a), Number(b)) => Number(a + b),\n\n (Number(a), Num2(b)) => Num2(a + b),\n\n (Num2(a), Num2(b)) => Num2(a + b),\n\n (Num2(a), Number(b)) => Num2(a + b),\n\n (Number(a), Num4(b)) => Num4(a + b),\n\n (Num4(a), Num4(b)) => Num4(a + b),\n\n (Num4(a), Number(b)) => Num4(a + b),\n\n (List(a), List(b)) => {\n\n let mut result = ValueVec::new();\n\n result.extend(a.data().iter().chain(b.data().iter()).cloned());\n\n List(ValueList::with_data(result))\n\n }\n\n (List(a), Tuple(b)) => {\n\n let mut result = ValueVec::new();\n\n result.extend(a.data().iter().chain(b.data().iter()).cloned());\n\n List(ValueList::with_data(result))\n", "file_path": "src/runtime/src/value.rs", "rank": 67, "score": 88029.38692128484 }, { "content": "fn json_value_to_koto_value(value: &serde_json::Value) -> Result<Value, String> {\n\n let result = match value {\n\n JsonValue::Null => Value::Empty,\n\n JsonValue::Bool(b) => Value::Bool(*b),\n\n JsonValue::Number(n) => match n.as_i64() {\n\n Some(n64) => Value::Number(n64.into()),\n\n None => match n.as_f64() {\n\n Some(n64) => Value::Number(n64.into()),\n\n None => return Err(format!(\"Number is out of range: {}\", n)),\n\n },\n\n },\n\n JsonValue::String(s) => Value::Str(s.as_str().into()),\n\n JsonValue::Array(a) => {\n\n match a\n\n .iter()\n\n .map(|entry| json_value_to_koto_value(entry))\n\n .collect::<Result<ValueVec, String>>()\n\n {\n\n Ok(result) => Value::List(ValueList::with_data(result)),\n\n Err(e) => return Err(e),\n", "file_path": "libs/json/src/lib.rs", "rank": 68, "score": 87068.3846490685 }, { "content": "fn run_script(script: &str, path: Option<PathBuf>, should_fail_at_runtime: bool) {\n\n let mut koto = Koto::with_settings(KotoSettings {\n\n run_tests: true,\n\n ..Default::default()\n\n });\n\n koto.set_script_path(path);\n\n\n\n match koto.compile(&script) {\n\n Ok(_) => match koto.run() {\n\n Ok(_) => {\n\n if should_fail_at_runtime {\n\n panic!(\"Expected failure\");\n\n }\n\n }\n\n Err(error) => {\n\n if !should_fail_at_runtime {\n\n panic!(\"{}\", error);\n\n }\n\n }\n\n },\n\n Err(error) => {\n\n panic!(\"{}\", error);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/koto/tests/koto_tests.rs", "rank": 69, "score": 83443.90276076575 }, { "content": "}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ParserError {\n\n pub error: ErrorType,\n\n pub span: Span,\n\n}\n\n\n\nimpl ParserError {\n\n pub fn new(error: ErrorType, span: Span) -> Self {\n\n Self { error, span }\n\n }\n\n\n\n pub fn is_indentation_error(&self) -> bool {\n\n matches!(self.error, ErrorType::ExpectedIndentation(_))\n\n }\n\n}\n\n\n\nimpl fmt::Display for ParserError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/parser/src/error.rs", "rank": 70, "score": 82007.50399199351 }, { "content": " ErrorType::ExpectedIndentation(e)\n\n }\n\n}\n\n\n\nimpl From<SyntaxError> for ErrorType {\n\n fn from(e: SyntaxError) -> ErrorType {\n\n ErrorType::SyntaxError(e)\n\n }\n\n}\n\n\n\nimpl fmt::Display for ErrorType {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use ErrorType::*;\n\n\n\n match &self {\n\n InternalError(error) => write!(f, \"Internal error: {}\", error),\n\n ExpectedIndentation(error) => f.write_str(&error.to_string()),\n\n SyntaxError(error) => f.write_str(&error.to_string()),\n\n }\n\n }\n", "file_path": "src/parser/src/error.rs", "rank": 71, "score": 82000.6479049094 }, { "content": " self.error.fmt(f)\n\n }\n\n}\n\n\n\nimpl error::Error for ParserError {}\n\n\n\nimpl fmt::Display for InternalError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use InternalError::*;\n\n\n\n match self {\n\n ArgumentsParseFailure => f.write_str(\"Failed to parse arguments\"),\n\n AstCapacityOverflow => {\n\n f.write_str(\"There are more nodes in the program than the AST can support\")\n\n }\n\n ExpectedIdInImportItem => f.write_str(\"Expected ID in import item\"),\n\n ForParseFailure => f.write_str(\"Failed to parse for loop\"),\n\n FunctionParseFailure => f.write_str(\"Failed to parse function\"),\n\n IdParseFailure => f.write_str(\"Failed to parse ID\"),\n\n LookupParseFailure => f.write_str(\"Failed to parse lookup\"),\n", "file_path": "src/parser/src/error.rs", "rank": 72, "score": 81997.78448369994 }, { "content": " UnexpectedToken,\n\n UnexpectedTokenAfterExportId,\n\n UnexpectedTokenInImportExpression,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum ErrorType {\n\n InternalError(InternalError),\n\n ExpectedIndentation(ExpectedIndentation),\n\n SyntaxError(SyntaxError),\n\n}\n\n\n\nimpl From<InternalError> for ErrorType {\n\n fn from(e: InternalError) -> ErrorType {\n\n ErrorType::InternalError(e)\n\n }\n\n}\n\n\n\nimpl From<ExpectedIndentation> for ErrorType {\n\n fn from(e: ExpectedIndentation) -> ErrorType {\n", "file_path": "src/parser/src/error.rs", "rank": 73, "score": 81990.15953950858 }, { "content": "use {\n\n koto_lexer::{Position, Span},\n\n std::{error, fmt, path::PathBuf},\n\n};\n\n\n\n#[derive(Clone, Debug)]\n\npub enum InternalError {\n\n ArgumentsParseFailure,\n\n AstCapacityOverflow,\n\n ExpectedIdInImportItem,\n\n ForParseFailure,\n\n FunctionParseFailure,\n\n IdParseFailure,\n\n LookupParseFailure,\n\n MissingAssignmentTarget,\n\n MissingContinuedExpressionLhs,\n\n MissingScope,\n\n NumberParseFailure,\n\n RangeParseFailure,\n\n UnexpectedIdInExpression,\n", "file_path": "src/parser/src/error.rs", "rank": 74, "score": 81989.29373253518 }, { "content": " MissingAssignmentTarget => f.write_str(\"Missing assignment target\"),\n\n MissingContinuedExpressionLhs => f.write_str(\"Missing LHS for continued expression\"),\n\n MissingScope => f.write_str(\"Scope unavailable during parsing\"),\n\n NumberParseFailure => f.write_str(\"Failed to parse number\"),\n\n RangeParseFailure => f.write_str(\"Failed to parse range\"),\n\n UnexpectedIdInExpression => {\n\n f.write_str(\"Unexpected ID encountered while parsing expression\")\n\n }\n\n UnexpectedToken => f.write_str(\"Unexpected token\"),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ExpectedIndentation {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use ExpectedIndentation::*;\n\n\n\n match self {\n\n ExpectedCatchBody => f.write_str(\"Expected indented block for catch expression\"),\n\n ExpectedElseBlock => f.write_str(\"Expected indented block for 'else'.\"),\n", "file_path": "src/parser/src/error.rs", "rank": 75, "score": 81988.50275430916 }, { "content": " ExpectedElseIfBlock => f.write_str(\"Expected indented block for 'else if'.\"),\n\n ExpectedForBody => f.write_str(\"Expected indented block in for loop\"),\n\n ExpectedFinallyBody => f.write_str(\"Expected indented block for finally expression\"),\n\n ExpectedFunctionBody => f.write_str(\"Expected function body\"),\n\n ExpectedLoopBody => f.write_str(\"Expected indented block in loop\"),\n\n ExpectedMatchArm => f.write_str(\"Expected indented arm for match expression\"),\n\n ExpectedSwitchArm => f.write_str(\"Expected indented arm for switch expression\"),\n\n ExpectedRhsExpression => f.write_str(\"Expected expression\"),\n\n ExpectedThenKeywordOrBlock => f.write_str(\n\n \"Error parsing if expression, expected 'then' keyword or indented block.\",\n\n ),\n\n ExpectedTryBody => f.write_str(\"Expected indented block for try expression\"),\n\n ExpectedUntilBody => f.write_str(\"Expected indented block in until loop\"),\n\n ExpectedWhileBody => f.write_str(\"Expected indented block in while loop\"),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for SyntaxError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/parser/src/error.rs", "rank": 76, "score": 81988.42583398042 }, { "content": " UnexpectedToken,\n\n}\n\n\n\n/// Errors that arise from expecting an indented block\n\n///\n\n/// Having these errors separated out is useful for the interactive input,\n\n/// where an indented continuation can be started in response to an indentation error.\n\n#[derive(Clone, Debug)]\n\npub enum ExpectedIndentation {\n\n ExpectedCatchBody,\n\n ExpectedElseBlock,\n\n ExpectedElseIfBlock,\n\n ExpectedFinallyBody,\n\n ExpectedForBody,\n\n ExpectedFunctionBody,\n\n ExpectedLoopBody,\n\n ExpectedMatchArm,\n\n ExpectedRhsExpression,\n\n ExpectedSwitchArm,\n\n ExpectedThenKeywordOrBlock,\n", "file_path": "src/parser/src/error.rs", "rank": 77, "score": 81986.85579613439 }, { "content": " SwitchElseNotInLastArm => {\n\n f.write_str(\"else can only be used in the last arm in a switch expression\")\n\n }\n\n SelfArgNotInFirstPosition => f.write_str(\"self is only allowed as the first argument\"),\n\n TooManyNum2Terms => f.write_str(\"num2 only supports up to 2 terms\"),\n\n TooManyNum4Terms => f.write_str(\"num4 only supports up to 4 terms\"),\n\n UnexpectedElseIndentation => f.write_str(\"Unexpected indentation for else block\"),\n\n UnexpectedElseIfIndentation => f.write_str(\"Unexpected indentation for else if block\"),\n\n UnexpectedEscapeInString => f.write_str(\"Unexpected escape pattern in string\"),\n\n UnexpectedMatchElse => f.write_str(\"Unexpected else in match arm\"),\n\n UnexpectedMatchIf => f.write_str(\"Unexpected if condition in match arm\"),\n\n UnexpectedSwitchElse => f.write_str(\"Unexpected else in switch arm\"),\n\n UnexpectedToken => f.write_str(\"Unexpected token\"),\n\n UnexpectedTokenAfterExportId => f.write_str(\"Unexpected token after export ID\"),\n\n UnexpectedTokenInImportExpression => {\n\n f.write_str(\"Unexpected token in import expression\")\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser/src/error.rs", "rank": 78, "score": 81986.53266599608 }, { "content": " ExpectedSwitchArmExpression => f.write_str(\"Expected expression in switch arm\"),\n\n ExpectedSwitchArmExpressionAfterThen => {\n\n f.write_str(\"Expected expression after then in switch arm\")\n\n }\n\n ExpectedThenExpression => f.write_str(\"Expected 'then' expression.\"),\n\n ExpectedUntilCondition => f.write_str(\"Expected condition in until loop\"),\n\n ExpectedWhileCondition => f.write_str(\"Expected condition in while loop\"),\n\n IfBlockNotAllowedInThisContext => {\n\n f.write_str(\"Non-inline if expression isn't allowed in this context.\")\n\n }\n\n ImportFromExpressionHasTooManyItems => {\n\n f.write_str(\"Too many items listed after 'from' in import expression\")\n\n }\n\n LexerError => f.write_str(\"Found an unexpected token while lexing input\"),\n\n MatchEllipsisOutsideOfNestedPatterns => {\n\n f.write_str(\"Ellipsis found outside of nested match patterns\")\n\n }\n\n MatchElseNotInLastArm => {\n\n f.write_str(\"else can only be used in the last arm in a match expression\")\n\n }\n", "file_path": "src/parser/src/error.rs", "rank": 79, "score": 81983.54837574865 }, { "content": " use SyntaxError::*;\n\n\n\n match self {\n\n ExpectedArgsEnd => f.write_str(\"Expected end of arguments ')'\"),\n\n ExpectedAssignmentTarget => f.write_str(\"Expected target for assignment\"),\n\n ExpectedCatchArgument => f.write_str(\"Expected argument for catch expression\"),\n\n ExpectedCatch => f.write_str(\"Expected catch expression after try\"),\n\n ExpectedCloseParen => f.write_str(\"Expected closing parenthesis\"),\n\n ExpectedElseExpression => f.write_str(\"Expected 'else' expression.\"),\n\n ExpectedElseIfCondition => f.write_str(\"Expected condition for 'else if'.\"),\n\n ExpectedEndOfLine => f.write_str(\"Expected end of line\"),\n\n ExpectedExportExpression => f.write_str(\"Expected ID to export\"),\n\n ExpectedExpression => f.write_str(\"Expected expression\"),\n\n ExpectedExpressionInMainBlock => f.write_str(\"Expected expression\"),\n\n ExpectedForArgs => f.write_str(\"Expected arguments in for loop\"),\n\n ExpectedForCondition => f.write_str(\"Expected condition after 'if' in for loop\"),\n\n ExpectedForInKeyword => f.write_str(\"Expected in keyword in for loop\"),\n\n ExpectedForRanges => f.write_str(\"Expected ranges in for loop\"),\n\n ExpectedFunctionArgsEnd => f.write_str(\"Expected end of function arguments '|'\"),\n\n ExpectedIdInImportExpression => f.write_str(\"Expected ID in import expression\"),\n", "file_path": "src/parser/src/error.rs", "rank": 80, "score": 81983.047252559 }, { "content": " ExpectedTryBody,\n\n ExpectedUntilBody,\n\n ExpectedWhileBody,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum SyntaxError {\n\n ExpectedArgsEnd,\n\n ExpectedAssignmentTarget,\n\n ExpectedCatchArgument,\n\n ExpectedCatch,\n\n ExpectedCloseParen,\n\n ExpectedElseExpression,\n\n ExpectedElseIfCondition,\n\n ExpectedEndOfLine,\n\n ExpectedExportExpression,\n\n ExpectedExpression,\n\n ExpectedExpressionInMainBlock,\n\n ExpectedForArgs,\n\n ExpectedForCondition,\n", "file_path": "src/parser/src/error.rs", "rank": 81, "score": 81982.78564291864 }, { "content": " ExpectedSwitchArmExpression,\n\n ExpectedSwitchArmExpressionAfterThen,\n\n ExpectedThenExpression,\n\n ExpectedUntilCondition,\n\n ExpectedWhileCondition,\n\n IfBlockNotAllowedInThisContext,\n\n ImportFromExpressionHasTooManyItems,\n\n LexerError,\n\n MatchEllipsisOutsideOfNestedPatterns,\n\n MatchElseNotInLastArm,\n\n SelfArgNotInFirstPosition,\n\n SwitchElseNotInLastArm,\n\n TooManyNum2Terms,\n\n TooManyNum4Terms,\n\n UnexpectedElseIndentation,\n\n UnexpectedElseIfIndentation,\n\n UnexpectedEscapeInString,\n\n UnexpectedMatchElse,\n\n UnexpectedMatchIf,\n\n UnexpectedSwitchElse,\n", "file_path": "src/parser/src/error.rs", "rank": 82, "score": 81981.76268378328 }, { "content": " (excerpt, padding)\n\n } else {\n\n let mut excerpt = String::new();\n\n\n\n for (excerpt_line, line_number) in excerpt_lines.iter().zip(line_numbers.iter()) {\n\n excerpt += &format!(\n\n \" {:>width$} | {}\\n\",\n\n line_number,\n\n excerpt_line,\n\n width = number_width\n\n );\n\n }\n\n\n\n (excerpt, padding)\n\n }\n\n };\n\n\n\n let position_info = if let Some(path) = source_path {\n\n let display_path = if let Ok(current_dir) = std::env::current_dir() {\n\n if let Ok(stripped) = path.strip_prefix(current_dir) {\n", "file_path": "src/parser/src/error.rs", "rank": 83, "score": 81979.66824352661 }, { "content": " ExpectedIfCondition => f.write_str(\"Expected condition in if expression\"),\n\n ExpectedImportKeywordAfterFrom => f.write_str(\"Expected 'import' after 'from' ID\"),\n\n ExpectedImportModuleId => f.write_str(\"Expected module ID in import expression\"),\n\n ExpectedIndentedLookupContinuation => {\n\n f.write_str(\"Expected indented lookup continuation\")\n\n }\n\n ExpectedIndexEnd => f.write_str(\"Unexpected token while indexing a List, expected ']'\"),\n\n ExpectedIndexExpression => f.write_str(\"Expected index expression\"),\n\n ExpectedListEnd => f.write_str(\"Unexpected token while in List, expected ']'\"),\n\n ExpectedMapEnd => f.write_str(\"Unexpected token in Map, expected '}'\"),\n\n ExpectedMapKey => f.write_str(\"Expected key after '.' in Map access\"),\n\n ExpectedMapValue => f.write_str(\"Expected value after ':' in Map\"),\n\n ExpectedMatchArmExpression => f.write_str(\"Expected expression in match arm\"),\n\n ExpectedMatchArmExpressionAfterThen => {\n\n f.write_str(\"Expected expression after then in match arm\")\n\n }\n\n ExpectedMatchCondition => f.write_str(\"Expected condition after if in match arm\"),\n\n ExpectedMatchExpression => f.write_str(\"Expected expression after match\"),\n\n ExpectedMatchPattern => f.write_str(\"Expected pattern for match arm\"),\n\n ExpectedNegatableExpression => f.write_str(\"Expected negatable expression\"),\n", "file_path": "src/parser/src/error.rs", "rank": 84, "score": 81979.3421310749 }, { "content": " let padding = \" \".repeat(number_width + 2);\n\n\n\n if start_pos.line == end_pos.line {\n\n let mut excerpt = format!(\n\n \" {:>width$} | {}\\n\",\n\n line_numbers.first().unwrap(),\n\n excerpt_lines.first().unwrap(),\n\n width = number_width\n\n );\n\n\n\n excerpt += &format!(\n\n \"{}|{}\",\n\n padding,\n\n format!(\n\n \"{}{}\",\n\n \" \".repeat(start_pos.column as usize),\n\n \"^\".repeat((end_pos.column - start_pos.column) as usize)\n\n ),\n\n );\n\n\n", "file_path": "src/parser/src/error.rs", "rank": 85, "score": 81973.53392687828 }, { "content": " ExpectedForInKeyword,\n\n ExpectedForRanges,\n\n ExpectedFunctionArgsEnd,\n\n ExpectedIdInImportExpression,\n\n ExpectedIfCondition,\n\n ExpectedImportKeywordAfterFrom,\n\n ExpectedImportModuleId,\n\n ExpectedIndentedLookupContinuation,\n\n ExpectedIndexEnd,\n\n ExpectedIndexExpression,\n\n ExpectedListEnd,\n\n ExpectedMapEnd,\n\n ExpectedMapKey,\n\n ExpectedMapValue,\n\n ExpectedMatchArmExpression,\n\n ExpectedMatchArmExpressionAfterThen,\n\n ExpectedMatchCondition,\n\n ExpectedMatchExpression,\n\n ExpectedMatchPattern,\n\n ExpectedNegatableExpression,\n", "file_path": "src/parser/src/error.rs", "rank": 86, "score": 81973.53392687828 }, { "content": " stripped.display()\n\n } else {\n\n path.display()\n\n }\n\n } else {\n\n path.display()\n\n };\n\n\n\n format!(\"{} - {}:{}\", display_path, start_pos.line, start_pos.column)\n\n } else {\n\n format!(\"{}:{}\", start_pos.line, start_pos.column)\n\n };\n\n\n\n format!(\n\n \"{message}\\n --- {position_info}\\n{padding}|\\n{excerpt}\",\n\n message = message.unwrap_or(\"\"),\n\n position_info = position_info,\n\n padding = padding,\n\n excerpt = excerpt,\n\n )\n\n}\n", "file_path": "src/parser/src/error.rs", "rank": 87, "score": 81973.53392687828 }, { "content": "fn run_script(script: &str, path: Option<PathBuf>, should_fail_at_runtime: bool) {\n\n let mut koto = Koto::with_settings(KotoSettings {\n\n run_tests: true,\n\n ..Default::default()\n\n });\n\n koto.set_script_path(path);\n\n\n\n let mut prelude = koto.prelude();\n\n prelude.add_map(\"json\", koto_json::make_module());\n\n prelude.add_map(\"random\", koto_random::make_module());\n\n prelude.add_map(\"tempfile\", koto_tempfile::make_module());\n\n prelude.add_map(\"toml\", koto_toml::make_module());\n\n\n\n match koto.compile(&script) {\n\n Ok(_) => match koto.run() {\n\n Ok(_) => {\n\n if should_fail_at_runtime {\n\n panic!(\"Expected failure\");\n\n }\n\n }\n", "file_path": "libs/lib_tests/tests/lib_tests.rs", "rank": 88, "score": 81957.69199175885 }, { "content": "fn main() {\n\n let args = match parse_arguments() {\n\n Ok(args) => args,\n\n Err(error) => {\n\n println!(\"{}\\n\\n{}\", help_string(), error);\n\n return;\n\n }\n\n };\n\n\n\n if args.help {\n\n println!(\"{}\", help_string());\n\n return;\n\n }\n\n\n\n if args.version {\n\n println!(\"{}\", version_string());\n\n return;\n\n }\n\n\n\n let settings = KotoSettings {\n", "file_path": "src/cli/src/main.rs", "rank": 89, "score": 68403.36537206045 }, { "content": "#[derive(Debug)]\n\nstruct Thread {\n\n join_handle: Option<JoinHandle<Result<Value, RuntimeError>>>,\n\n}\n\n\n\nimpl Thread {\n\n fn make_thread_map(join_handle: JoinHandle<Result<Value, RuntimeError>>) -> Value {\n\n let mut result = ValueMap::new();\n\n\n\n result.add_instance_fn(\"join\", |vm, args| {\n\n let args = vm.get_args(args);\n\n get_external_instance!(args, \"Thread\", \"join\", Thread, thread, {\n\n let result = thread.join_handle.take().unwrap().join();\n\n match result {\n\n Ok(Ok(result)) => Ok(result),\n\n Ok(Err(koto_error)) => Err(koto_error),\n\n Err(_) => external_error!(\"thread.join: thread panicked\"),\n\n }\n\n })\n\n });\n\n\n", "file_path": "src/runtime/src/core/thread.rs", "rank": 90, "score": 68169.60376773927 }, { "content": "/// Context shared by all VMs across modules\n\nstruct SharedContext {\n\n pub prelude: ValueMap,\n\n core_lib: CoreLib,\n\n}\n\n\n\nimpl Default for SharedContext {\n\n fn default() -> Self {\n\n let core_lib = CoreLib::default();\n\n\n\n let mut prelude = ValueMap::default();\n\n prelude.add_map(\"io\", core_lib.io.clone());\n\n prelude.add_map(\"iterator\", core_lib.iterator.clone());\n\n prelude.add_map(\"koto\", core_lib.koto.clone());\n\n prelude.add_map(\"list\", core_lib.list.clone());\n\n prelude.add_map(\"map\", core_lib.map.clone());\n\n prelude.add_map(\"os\", core_lib.os.clone());\n\n prelude.add_map(\"number\", core_lib.number.clone());\n\n prelude.add_map(\"range\", core_lib.range.clone());\n\n prelude.add_map(\"string\", core_lib.string.clone());\n\n prelude.add_map(\"test\", core_lib.test.clone());\n", "file_path": "src/runtime/src/vm.rs", "rank": 91, "score": 68164.34083850126 }, { "content": "#[derive(Default)]\n\nstruct KotoArgs {\n\n help: bool,\n\n version: bool,\n\n run_tests: bool,\n\n show_bytecode: bool,\n\n show_annotated: bool,\n\n script: Option<String>,\n\n script_args: Vec<String>,\n\n}\n\n\n", "file_path": "src/cli/src/main.rs", "rank": 92, "score": 68164.34083850126 }, { "content": "struct PoetryArgs {\n\n help: bool,\n\n version: bool,\n\n script: String,\n\n watch: bool,\n\n}\n\n\n", "file_path": "examples/poetry/src/main.rs", "rank": 93, "score": 68164.34083850126 }, { "content": "struct BenchmarkRunner {\n\n runtime: Koto,\n\n}\n\n\n\nimpl BenchmarkRunner {\n\n fn new(script_path: &str, args: &[String]) -> Self {\n\n let mut path = current_dir().unwrap().canonicalize().unwrap();\n\n path.push(\"..\");\n\n path.push(\"..\");\n\n path.push(\"koto\");\n\n path.push(\"benches\");\n\n path.push(script_path);\n\n let script = read_to_string(path).expect(\"Unable to load path\");\n\n\n\n let mut runtime = Koto::new();\n\n match runtime.compile(&script) {\n\n Ok(_) => {\n\n runtime.settings.run_tests = true;\n\n\n\n if let Err(error) = runtime.run_with_args(&args) {\n", "file_path": "src/koto/benches/koto_benchmark.rs", "rank": 94, "score": 66786.28138340874 }, { "content": "fn consume_and_count_utf8(\n\n chars: &mut Peekable<Chars>,\n\n predicate: impl Fn(char) -> bool,\n\n) -> (usize, usize) {\n\n let mut char_bytes = 0;\n\n let mut char_count = 0;\n\n\n\n while let Some(c) = chars.peek() {\n\n if !predicate(*c) {\n\n break;\n\n }\n\n char_bytes += c.len_utf8();\n\n char_count += c.width().unwrap_or(0);\n\n chars.next();\n\n }\n\n\n\n (char_bytes, char_count)\n\n}\n\n\n", "file_path": "src/lexer/src/lexer.rs", "rank": 95, "score": 65559.01313388272 }, { "content": "#[derive(Clone)]\n\nstruct PeekedToken<'a> {\n\n token: Option<Token>,\n\n slice: &'a str,\n\n span: Span,\n\n indent: usize,\n\n source_position: usize,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct KotoLexer<'a> {\n\n lexer: TokenLexer<'a>,\n\n peeked_tokens: Vec<PeekedToken<'a>>,\n\n current_peek_index: usize,\n\n}\n\n\n\nimpl<'a> KotoLexer<'a> {\n\n pub fn new(source: &'a str) -> Self {\n\n Self {\n\n lexer: TokenLexer::new(source),\n\n peeked_tokens: Vec::new(),\n", "file_path": "src/lexer/src/lexer.rs", "rank": 96, "score": 63993.03375864558 }, { "content": "#[derive(Clone)]\n\nstruct TokenLexer<'a> {\n\n source: &'a str,\n\n previous: usize,\n\n current: usize,\n\n indent: usize,\n\n position: Position,\n\n span: Span,\n\n}\n\n\n\nimpl<'a> TokenLexer<'a> {\n\n pub fn new(source: &'a str) -> Self {\n\n Self {\n\n source,\n\n previous: 0,\n\n current: 0,\n\n indent: 0,\n\n position: Position::default(),\n\n span: Span::default(),\n\n }\n\n }\n", "file_path": "src/lexer/src/lexer.rs", "rank": 97, "score": 63993.03375864558 }, { "content": "#[test]\n\nfn one_plus_two() {\n\n let mut koto = Koto::default();\n\n match koto.compile(\"1 + 2\") {\n\n Ok(_) => match koto.run() {\n\n Ok(result) => match result {\n\n Value::Number(n) => assert_eq!(n, 3.0),\n\n other => panic!(\"Unexpected result: {}\", other),\n\n },\n\n Err(runtime_error) => {\n\n panic!(\"Runtime error: {}\", runtime_error);\n\n }\n\n },\n\n Err(compiler_error) => {\n\n panic!(\"Compiler error: {}\", compiler_error);\n\n }\n\n }\n\n}\n", "file_path": "src/koto/tests/one_plus_two.rs", "rank": 98, "score": 63090.45796225083 }, { "content": "pub trait ValueMapKey {\n\n fn to_value_ref(&self) -> ValueRef;\n\n}\n\n\n\nimpl<'a> Hash for dyn ValueMapKey + 'a {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.to_value_ref().hash(state);\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq for dyn ValueMapKey + 'a {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.to_value_ref() == other.to_value_ref()\n\n }\n\n}\n\n\n\nimpl<'a> Eq for dyn ValueMapKey + 'a {}\n\n\n\nimpl ValueMapKey for Value {\n\n fn to_value_ref(&self) -> ValueRef {\n", "file_path": "src/runtime/src/value_map.rs", "rank": 99, "score": 59649.811241532385 } ]
Rust
quic/s2n-quic-transport/src/transmission/application.rs
nsdyoshi/s2n-quic
0635e62cdc58fb968de0a0d576822e09c96cba99
use crate::{ ack::AckManager, connection, contexts::WriteContext, endpoint, path, path::mtu, recovery, space::{datagram, HandshakeStatus}, stream::{AbstractStreamManager, StreamTrait as Stream}, sync::{flag, flag::Ping}, transmission::{self, Mode}, }; use core::ops::RangeInclusive; use s2n_quic_core::packet::number::PacketNumberSpace; pub enum Payload<'a, Config: endpoint::Config> { Normal(Normal<'a, Config::Stream, Config>), MtuProbe(MtuProbe<'a>), PathValidationOnly(PathValidationOnly<'a, Config>), } impl<'a, Config: endpoint::Config> Payload<'a, Config> { #[allow(clippy::too_many_arguments)] pub fn new( path_id: path::Id, path_manager: &'a mut path::Manager<Config>, local_id_registry: &'a mut connection::LocalIdRegistry, transmission_mode: transmission::Mode, ack_manager: &'a mut AckManager, handshake_status: &'a mut HandshakeStatus, ping: &'a mut flag::Ping, stream_manager: &'a mut AbstractStreamManager<Config::Stream>, recovery_manager: &'a mut recovery::Manager<Config>, datagram_manager: &'a mut datagram::Manager<Config>, ) -> Self { if transmission_mode != Mode::PathValidationOnly { debug_assert_eq!(path_id, path_manager.active_path_id()); } match transmission_mode { Mode::LossRecoveryProbing | Mode::Normal => { transmission::application::Payload::Normal(Normal { ack_manager, handshake_status, ping, stream_manager, local_id_registry, path_manager, recovery_manager, datagram_manager, }) } Mode::MtuProbing => transmission::application::Payload::MtuProbe(MtuProbe { mtu_controller: &mut path_manager[path_id].mtu_controller, }), Mode::PathValidationOnly => { transmission::application::Payload::PathValidationOnly(PathValidationOnly { path: &mut path_manager[path_id], }) } } } } impl<'a, Config: endpoint::Config> super::Payload for Payload<'a, Config> { fn size_hint(&self, range: RangeInclusive<usize>) -> usize { (*range.start()).max(1) } fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { match self { Payload::Normal(inner) => inner.on_transmit(context), Payload::MtuProbe(inner) => inner.on_transmit(context), Payload::PathValidationOnly(inner) => inner.on_transmit(context), } } fn packet_number_space(&self) -> PacketNumberSpace { PacketNumberSpace::ApplicationData } } impl<'a, Config: endpoint::Config> transmission::interest::Provider for Payload<'a, Config> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { match self { Payload::Normal(inner) => inner.transmission_interest(query), Payload::MtuProbe(inner) => inner.transmission_interest(query), Payload::PathValidationOnly(inner) => inner.transmission_interest(query), } } } pub struct Normal<'a, S: Stream, Config: endpoint::Config> { ack_manager: &'a mut AckManager, handshake_status: &'a mut HandshakeStatus, ping: &'a mut Ping, stream_manager: &'a mut AbstractStreamManager<S>, local_id_registry: &'a mut connection::LocalIdRegistry, path_manager: &'a mut path::Manager<Config>, recovery_manager: &'a mut recovery::Manager<Config>, datagram_manager: &'a mut datagram::Manager<Config>, } impl<'a, S: Stream, Config: endpoint::Config> Normal<'a, S, Config> { fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { let did_send_ack = self.ack_manager.on_transmit(context); if context.transmission_constraint().can_transmit() || context.transmission_constraint().can_retransmit() { self.handshake_status.on_transmit(context); self.path_manager.active_path_mut().on_transmit(context); self.local_id_registry.on_transmit(context); self.path_manager.on_transmit(context); self.datagram_manager .on_transmit(context, self.stream_manager); let _ = self.stream_manager.on_transmit(context); self.recovery_manager.on_transmit(context); let _ = self.ping.on_transmit(context); } if did_send_ack { self.ack_manager.on_transmit_complete(context); } } } impl<'a, S: Stream, Config: endpoint::Config> transmission::interest::Provider for Normal<'a, S, Config> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { self.ack_manager.transmission_interest(query)?; self.handshake_status.transmission_interest(query)?; self.stream_manager.transmission_interest(query)?; self.datagram_manager.transmission_interest(query)?; self.local_id_registry.transmission_interest(query)?; self.path_manager.transmission_interest(query)?; self.recovery_manager.transmission_interest(query)?; self.path_manager .active_path() .transmission_interest(query)?; self.ping.transmission_interest(query)?; Ok(()) } } pub struct MtuProbe<'a> { mtu_controller: &'a mut mtu::Controller, } impl<'a> MtuProbe<'a> { fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { if context.transmission_constraint().can_transmit() { self.mtu_controller.on_transmit(context) } } } impl<'a> transmission::interest::Provider for MtuProbe<'a> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { self.mtu_controller.transmission_interest(query) } } pub struct PathValidationOnly<'a, Config: endpoint::Config> { path: &'a mut path::Path<Config>, } impl<'a, Config: endpoint::Config> PathValidationOnly<'a, Config> { fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { if context.transmission_constraint().can_transmit() { self.path.on_transmit(context) } } } impl<'a, Config: endpoint::Config> transmission::interest::Provider for PathValidationOnly<'a, Config> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { self.path.transmission_interest(query) } }
use crate::{ ack::AckManager, connection, contexts::WriteContext, endpoint, path, path::mtu, recovery, space::{datagram, HandshakeStatus}, stream::{AbstractStreamManager, StreamTrait as Stream}, sync::{flag, flag::Ping}, transmission::{self, Mode}, }; use core::ops::RangeInclusive; use s2n_quic_core::packet::number::PacketNumberSpace; pub enum Payload<'a, Config: endpoint::Config> { Normal(Normal<'a, Config::Stream, Config>), MtuProbe(MtuProbe<'a>), PathValidationOnly(PathValidationOnly<'a, Config>), } impl<'a, Config: endpoint::Config> Payload<'a, Config> { #[allow(clippy::too_many_arguments)] pub fn new( path_id: path::Id, path_manager: &'a mut path::Manager<Config>, local_id_registry: &'a mut connection::LocalIdRegistry, transmission_mode: transmission::Mode, ack_manager: &'a mut AckManager, handshake_status: &'a mut HandshakeStatus, ping: &'a mut flag::Ping, stream_manager: &'a mut AbstractStreamManager<Config::Stream>, recovery_manager: &'a mut recovery::Manager<Config>, datagram_manager: &'a mut datagram::Manager<Config>, ) -> Self { if transmission_mode != Mode::PathValidationOnly { debug_assert_eq!(path_id, path_manager.active_path_id()); } match transmission_mode { Mode::LossRecoveryProbing | Mode::Normal => { transmission::application::Payload::Normal(Normal { ack_manager, handshake_status, ping, stream_manager, local_id_registry, path_manager, recovery_manager, datagram_manager, }) } Mode::MtuProbing => transmission::application::Payload::MtuProbe(MtuProbe { mtu_controller: &mut path_manager[path_id].mtu_controller, }), Mode::PathValidationOnly => { transmission::application::Payload::PathValidationOnly(PathValidationOnly { path: &mut path_manager[path_id], }) } } } } impl<'a, Config: endpoint::Config> super::Payload for Payload<'a, Config> { fn size_hint(&self, range: RangeInclusive<usize>) -> usize { (*range.start()).max(1) } fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { match self { Payload::Normal(inner) => inner.on_transmit(context), Payload::MtuProbe(inner) => inner.on_transmit(context), Payload::PathValidationOnly(inner) => inner.on_transmit(context), } } fn packet_number_space(&self) -> PacketNumberSpace { PacketNumberSpace::ApplicationData } } impl<'a, Config: endpoint::Config> transmission::interest::Provider for Payload<'a, Config> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { match self { Payload::Normal(inner) => inner.transmission_interest(query), Payload::MtuProbe(inner) => inner.transmission_interest(query), Payload::PathValidationOnly(inner) => inner.transmission_interest(query), } } } pub struct Normal<'a, S: Stream, Config: endpoint::Config> { ack_manager: &'a mut AckManager, handshake_status: &'a mut HandshakeStatus, ping: &'a mut Ping, stream_manager: &'a mut AbstractStreamManager<S>, local_id_registry: &'a mut connection::LocalIdRegistry, path_manager: &'a mut path::Manager<Config>, recovery_manager: &'a mut recovery::Manager<Config>, datagram_manager: &'a mut datagram::Manager<Config>, } impl<'a, S: Stream, Config: endpoint::Config> Normal<'a, S, Config> { fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { let did_send_ack = self.ack_manager.on_transmit(context); if context.transmission_constraint().can_transmit() || context.transmission_constraint().can_retransmit() { self.handshake_status.on_transmit(context); self.path_manager.active_path_mut().on_transmit(context); self.local_id_registry.on_transmit(contex
} impl<'a, S: Stream, Config: endpoint::Config> transmission::interest::Provider for Normal<'a, S, Config> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { self.ack_manager.transmission_interest(query)?; self.handshake_status.transmission_interest(query)?; self.stream_manager.transmission_interest(query)?; self.datagram_manager.transmission_interest(query)?; self.local_id_registry.transmission_interest(query)?; self.path_manager.transmission_interest(query)?; self.recovery_manager.transmission_interest(query)?; self.path_manager .active_path() .transmission_interest(query)?; self.ping.transmission_interest(query)?; Ok(()) } } pub struct MtuProbe<'a> { mtu_controller: &'a mut mtu::Controller, } impl<'a> MtuProbe<'a> { fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { if context.transmission_constraint().can_transmit() { self.mtu_controller.on_transmit(context) } } } impl<'a> transmission::interest::Provider for MtuProbe<'a> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { self.mtu_controller.transmission_interest(query) } } pub struct PathValidationOnly<'a, Config: endpoint::Config> { path: &'a mut path::Path<Config>, } impl<'a, Config: endpoint::Config> PathValidationOnly<'a, Config> { fn on_transmit<W: WriteContext>(&mut self, context: &mut W) { if context.transmission_constraint().can_transmit() { self.path.on_transmit(context) } } } impl<'a, Config: endpoint::Config> transmission::interest::Provider for PathValidationOnly<'a, Config> { fn transmission_interest<Q: transmission::interest::Query>( &self, query: &mut Q, ) -> transmission::interest::Result { self.path.transmission_interest(query) } }
t); self.path_manager.on_transmit(context); self.datagram_manager .on_transmit(context, self.stream_manager); let _ = self.stream_manager.on_transmit(context); self.recovery_manager.on_transmit(context); let _ = self.ping.on_transmit(context); } if did_send_ack { self.ack_manager.on_transmit_complete(context); } }
function_block-function_prefixed
[ { "content": "pub trait Context<Config: endpoint::Config> {\n\n const ENDPOINT_TYPE: endpoint::Type;\n\n\n\n fn is_handshake_confirmed(&self) -> bool;\n\n\n\n fn path(&self) -> &Path<Config>;\n\n\n\n fn path_mut(&mut self) -> &mut Path<Config>;\n\n\n\n fn path_by_id(&self, path_id: path::Id) -> &path::Path<Config>;\n\n\n\n fn path_mut_by_id(&mut self, path_id: path::Id) -> &mut path::Path<Config>;\n\n\n\n fn path_id(&self) -> path::Id;\n\n\n\n fn validate_packet_ack(\n\n &mut self,\n\n timestamp: Timestamp,\n\n packet_number_range: &PacketNumberRange,\n\n ) -> Result<(), transport::Error>;\n", "file_path": "quic/s2n-quic-transport/src/recovery/manager.rs", "rank": 0, "score": 388300.4351551382 }, { "content": "struct EventContext<Config: endpoint::Config> {\n\n /// The [`Connection`]s internal identifier\n\n internal_connection_id: InternalConnectionId,\n\n\n\n /// The QUIC protocol version which is used for this particular connection\n\n quic_version: u32,\n\n\n\n /// Holds the event context associated with the connection\n\n context: <Config::EventSubscriber as event::Subscriber>::ConnectionContext,\n\n}\n\n\n\nimpl<Config: endpoint::Config> fmt::Debug for EventContext<Config> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_struct(\"EventContext\")\n\n .field(\"internal_connection_id\", &self.internal_connection_id)\n\n .field(\"quic_version\", &self.quic_version)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/connection/connection_impl.rs", "rank": 1, "score": 374721.52421379427 }, { "content": "struct RecoveryContext<'a, Config: endpoint::Config> {\n\n ack_manager: &'a mut AckManager,\n\n crypto_stream: &'a mut CryptoStream,\n\n tx_packet_numbers: &'a mut TxPacketNumbers,\n\n handshake_status: &'a HandshakeStatus,\n\n config: PhantomData<Config>,\n\n path_id: path::Id,\n\n path_manager: &'a mut path::Manager<Config>,\n\n}\n\n\n\nimpl<'a, Config: endpoint::Config> recovery::Context<Config> for RecoveryContext<'a, Config> {\n\n const ENDPOINT_TYPE: endpoint::Type = Config::ENDPOINT_TYPE;\n\n\n\n fn is_handshake_confirmed(&self) -> bool {\n\n self.handshake_status.is_confirmed()\n\n }\n\n\n\n fn path(&self) -> &Path<Config> {\n\n &self.path_manager[self.path_id]\n\n }\n", "file_path": "quic/s2n-quic-transport/src/space/handshake.rs", "rank": 2, "score": 357545.70616282657 }, { "content": "struct RecoveryContext<'a, Config: endpoint::Config> {\n\n ack_manager: &'a mut AckManager,\n\n crypto_stream: &'a mut CryptoStream,\n\n tx_packet_numbers: &'a mut TxPacketNumbers,\n\n handshake_status: &'a HandshakeStatus,\n\n config: PhantomData<Config>,\n\n path_id: path::Id,\n\n path_manager: &'a mut path::Manager<Config>,\n\n}\n\n\n\nimpl<'a, Config: endpoint::Config> recovery::Context<Config> for RecoveryContext<'a, Config> {\n\n const ENDPOINT_TYPE: endpoint::Type = Config::ENDPOINT_TYPE;\n\n\n\n fn is_handshake_confirmed(&self) -> bool {\n\n self.handshake_status.is_confirmed()\n\n }\n\n\n\n fn path(&self) -> &Path<Config> {\n\n &self.path_manager[self.path_id]\n\n }\n", "file_path": "quic/s2n-quic-transport/src/space/initial.rs", "rank": 3, "score": 357545.70616282657 }, { "content": "struct RecoveryContext<'a, Config: endpoint::Config> {\n\n ack_manager: &'a mut AckManager,\n\n handshake_status: &'a mut HandshakeStatus,\n\n ping: &'a mut flag::Ping,\n\n stream_manager: &'a mut Config::StreamManager,\n\n local_id_registry: &'a mut connection::LocalIdRegistry,\n\n path_id: path::Id,\n\n path_manager: &'a mut path::Manager<Config>,\n\n tx_packet_numbers: &'a mut TxPacketNumbers,\n\n}\n\n\n\nimpl<'a, Config: endpoint::Config> recovery::Context<Config> for RecoveryContext<'a, Config> {\n\n const ENDPOINT_TYPE: endpoint::Type = Config::ENDPOINT_TYPE;\n\n\n\n fn is_handshake_confirmed(&self) -> bool {\n\n self.handshake_status.is_confirmed()\n\n }\n\n\n\n fn path(&self) -> &Path<Config> {\n\n &self.path_manager[self.path_id]\n", "file_path": "quic/s2n-quic-transport/src/space/application.rs", "rank": 4, "score": 357545.70616282657 }, { "content": "pub fn scenario(config: Config) -> Scenario {\n\n let Config {\n\n connections,\n\n streams,\n\n size: sizes,\n\n time,\n\n } = config;\n\n\n\n Scenario::build(|scenario| {\n\n let server = scenario.create_server();\n\n\n\n scenario.create_client(|client| {\n\n for size in sizes.iter().copied() {\n\n let ping = format!(\"ping {size}\");\n\n let pong = format!(\"pong {size}\");\n\n client.scope(|client| {\n\n for _ in 0..connections {\n\n client.spawn(|client| {\n\n client.connect_to(&server, |conn| {\n\n conn.scope(|conn| {\n", "file_path": "netbench/netbench-scenarios/src/ping.rs", "rank": 5, "score": 315311.4508261703 }, { "content": "pub fn scenario(config: Config) -> Scenario {\n\n let Config { connections } = config;\n\n\n\n Scenario::build(|scenario| {\n\n let server = scenario.create_server();\n\n\n\n scenario.create_client(|client| {\n\n for _ in 0..connections {\n\n client.connect_to(&server, |conn| {\n\n conn.open_bidirectional_stream(\n\n |local| {\n\n local.send(1.bytes());\n\n local.receive(1.bytes());\n\n },\n\n |remote| {\n\n remote.receive(1.bytes());\n\n remote.send(1.bytes());\n\n },\n\n );\n\n });\n\n }\n\n });\n\n })\n\n}\n", "file_path": "netbench/netbench-scenarios/src/connect.rs", "rank": 6, "score": 314815.83268594276 }, { "content": "/// Sets up a test environment for Stream testing with custom parameters\n\npub fn setup_stream_test_env_with_config(config: TestEnvironmentConfig) -> TestEnvironment {\n\n let rx_connection_flow_controller = IncomingConnectionFlowController::new(\n\n VarInt::new(config.initial_connection_receive_window_size).unwrap(),\n\n config.desired_connection_flow_control_window,\n\n );\n\n\n\n let tx_connection_flow_controller = OutgoingConnectionFlowController::new(\n\n VarInt::new(config.initial_connection_send_window_size).unwrap(),\n\n );\n\n\n\n let stream = StreamImpl::new(StreamConfig {\n\n incoming_connection_flow_controller: rx_connection_flow_controller.clone(),\n\n outgoing_connection_flow_controller: tx_connection_flow_controller.clone(),\n\n local_endpoint_type: config.local_endpoint_type,\n\n stream_id: config.stream_id,\n\n initial_receive_window: VarInt::new(config.initial_receive_window).unwrap(),\n\n desired_flow_control_window: config.desired_flow_control_window,\n\n initial_send_window: VarInt::new(config.initial_send_window).unwrap(),\n\n max_send_buffer_size: config.max_send_buffer_size as u32,\n\n });\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 7, "score": 291545.1669492998 }, { "content": "pub trait Connect<Endpoint> {\n\n fn connect_to(&self, handle: &Connection<Endpoint>) -> op::Client;\n\n}\n\n\n\nimpl Connect<Client> for Connection<Server> {\n\n fn connect_to(&self, handle: &Connection<Client>) -> op::Client {\n\n let server_id = self.endpoint_id;\n\n let server = &mut self.state.servers.borrow_mut()[server_id as usize];\n\n\n\n fn push(\n\n connections: &mut Vec<Arc<crate::scenario::Connection>>,\n\n ops: &Vec<op::Connection>,\n\n peer_streams: &Vec<Vec<op::Connection>>,\n\n ) -> u64 {\n\n // try to dedupe the connection operations if one exists\n\n for (id, prev) in connections.iter().enumerate() {\n\n if &prev.ops == ops && &prev.peer_streams == peer_streams {\n\n return id as u64;\n\n }\n\n }\n", "file_path": "netbench/netbench/src/scenario/builder/connection.rs", "rank": 8, "score": 275359.90012737614 }, { "content": "#[inline]\n\nfn poll<F: FnMut(&mut ring::Rx, &mut Context) -> Option<usize>>(\n\n fd: &Fd,\n\n rx: &mut ring::Rx,\n\n cx: &mut Context,\n\n mut on_ready: F,\n\n) -> Poll<Result<(), ()>> {\n\n // limit the number of loops to prevent endless spinning on registering wakers\n\n for iteration in 0..10 {\n\n trace!(\"iteration {}\", iteration);\n\n\n\n // query socket readiness through tokio's polling facilities\n\n match fd.poll_read_ready(cx) {\n\n Poll::Ready(Ok(mut guard)) => {\n\n // try to acquire entries for the queue\n\n let count = rx.acquire(1) as usize;\n\n\n\n trace!(\"acquired {count} items from RX ring\");\n\n\n\n // if we didn't get anything, we need to clear readiness and try again\n\n if count == 0 {\n", "file_path": "tools/xdp/s2n-quic-xdp/src/task/rx/tokio_impl.rs", "rank": 9, "score": 274705.36177235073 }, { "content": "struct SessionInfo<Config: endpoint::Config> {\n\n session: <Config::TLSEndpoint as tls::Endpoint>::Session,\n\n initial_cid: InitialId,\n\n}\n\n\n\npub struct PacketSpaceManager<Config: endpoint::Config> {\n\n session_info: Option<SessionInfo<Config>>,\n\n retry_cid: Option<Box<PeerId>>,\n\n initial: Option<Box<InitialSpace<Config>>>,\n\n handshake: Option<Box<HandshakeSpace<Config>>>,\n\n application: Option<Box<ApplicationSpace<Config>>>,\n\n zero_rtt_crypto:\n\n Option<Box<<<Config::TLSEndpoint as tls::Endpoint>::Session as CryptoSuite>::ZeroRttKey>>,\n\n handshake_status: HandshakeStatus,\n\n /// Server Name Indication\n\n pub server_name: Option<ServerName>,\n\n //= https://www.rfc-editor.org/rfc/rfc9000#section-7\n\n //# Endpoints MUST explicitly negotiate an application protocol.\n\n\n\n //= https://www.rfc-editor.org/rfc/rfc9001#section-8.1\n", "file_path": "quic/s2n-quic-transport/src/space/mod.rs", "rank": 10, "score": 270364.58234309015 }, { "content": "fn helper_generate_path_manager(max_ack_delay: Duration) -> path::Manager<Config> {\n\n helper_generate_path_manager_with_first_addr(max_ack_delay, Default::default())\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/recovery/manager/tests.rs", "rank": 11, "score": 262874.1831102262 }, { "content": "pub fn scenario(config: Config) -> Scenario {\n\n let Config {\n\n request_size,\n\n response_size,\n\n count,\n\n connections,\n\n parallel,\n\n client_send_rate,\n\n client_receive_rate,\n\n server_send_rate,\n\n server_receive_rate,\n\n response_delay,\n\n response_unblock,\n\n } = config;\n\n let response_unblock = response_unblock.min(response_size);\n\n\n\n type Checkpoint = Option<\n\n builder::checkpoint::Checkpoint<builder::Client, builder::Local, builder::checkpoint::Park>,\n\n >;\n\n\n", "file_path": "netbench/netbench-scenarios/src/request_response.rs", "rank": 12, "score": 262190.3229126263 }, { "content": "pub fn gen_pattern_test_chunks(mut offset: VarInt, lens: &[usize]) -> Vec<bytes::Bytes> {\n\n lens.iter()\n\n .map(|size| {\n\n let data = bytes::Bytes::from(gen_pattern_test_data(offset, *size));\n\n offset += *size;\n\n data\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n\n/// Defines whether a wakeup is expected.\n\n/// `None` means there are no expectations. `Some(true)` expects a wakeup,\n\n/// `Some(false)` does not.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct ExpectWakeup(pub Option<bool>);\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 13, "score": 261874.27872017326 }, { "content": "pub trait PacketSpace<Config: endpoint::Config> {\n\n const INVALID_FRAME_ERROR: &'static str;\n\n\n\n fn handle_crypto_frame<Pub: event::ConnectionPublisher>(\n\n &mut self,\n\n frame: CryptoRef,\n\n datagram: &DatagramInfo,\n\n path: &mut Path<Config>,\n\n publisher: &mut Pub,\n\n ) -> Result<(), transport::Error>;\n\n\n\n #[allow(clippy::too_many_arguments)]\n\n fn handle_ack_frame<A: AckRanges, Pub: event::ConnectionPublisher>(\n\n &mut self,\n\n frame: Ack<A>,\n\n timestamp: Timestamp,\n\n path_id: path::Id,\n\n path_manager: &mut path::Manager<Config>,\n\n packet_number: PacketNumber,\n\n handshake_status: &mut HandshakeStatus,\n", "file_path": "quic/s2n-quic-transport/src/space/mod.rs", "rank": 14, "score": 259489.61169191787 }, { "content": "#[test]\n\npub fn consume_new_id_should_return_id() {\n\n let id_1 = id(b\"id01\");\n\n let mut random_generator = random::testing::Generator(123);\n\n let mut mapper = ConnectionIdMapper::new(&mut random_generator, endpoint::Type::Server);\n\n let mut reg = mapper\n\n .create_server_peer_id_registry(InternalConnectionIdGenerator::new().generate_id(), id_1);\n\n\n\n let id_2 = id(b\"id02\");\n\n assert!(reg.on_new_connection_id(&id_2, 1, 0, &TEST_TOKEN_2).is_ok());\n\n reg.registered_ids[1].status = New;\n\n\n\n assert!(reg\n\n .state\n\n .lock()\n\n .unwrap()\n\n .stateless_reset_map\n\n .remove(&TEST_TOKEN_2)\n\n .is_none());\n\n assert_eq!(Some(id_2), reg.consume_new_id_inner());\n\n reg.registered_ids[1].status = InUse;\n\n // this is an indirect way to test that we inserted a reset token when we consumed id_2\n\n assert!(reg\n\n .state\n\n .lock()\n\n .unwrap()\n\n .stateless_reset_map\n\n .remove(&TEST_TOKEN_2)\n\n .is_some());\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/connection/peer_id_registry/tests.rs", "rank": 15, "score": 248753.60718024225 }, { "content": "/// Asserts that a given number of wakeups had been enqueued\n\nfn assert_wakeups(wakeup_queue: &mut WakeupQueue<InternalConnectionId>, expected_wakeups: usize) {\n\n let mut dequeued_wakeups = VecDeque::new();\n\n let (waker, _counter) = new_count_waker();\n\n wakeup_queue.poll_pending_wakeups(&mut dequeued_wakeups, &Context::from_waker(&waker));\n\n\n\n assert_eq!(expected_wakeups, dequeued_wakeups.len());\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/manager/tests.rs", "rank": 16, "score": 247973.0159134426 }, { "content": "pub fn initialize_logger(endpoint: &str) {\n\n use std::sync::Once;\n\n\n\n static TRACING: Once = Once::new();\n\n\n\n // make sure this only gets initialized once (per process)\n\n TRACING.call_once(|| {\n\n // always write to the same file, and don't rotate it. This would be a\n\n // bad idea for a long running process, but is useful to make sure that\n\n // all the logs of our program end up in the same file.\n\n let file_appender = tracing_appender::rolling::never(\"logs\", format!(\"{endpoint}.txt\"));\n\n\n\n tracing_subscriber::fmt()\n\n .with_max_level(Level::DEBUG)\n\n // don't color the output, otherwise the text logs will have odd\n\n // characters\n\n .with_ansi(false)\n\n .with_writer(file_appender)\n\n .init();\n\n });\n", "file_path": "examples/rustls-mtls/src/lib.rs", "rank": 17, "score": 245507.03603732045 }, { "content": "#[test]\n\npub fn consume_new_id_should_error_if_no_ids_are_available() {\n\n let id_1 = id(b\"id01\");\n\n let mut random_generator = random::testing::Generator(123);\n\n let mut mapper = ConnectionIdMapper::new(&mut random_generator, endpoint::Type::Server);\n\n let mut reg = mapper\n\n .create_server_peer_id_registry(InternalConnectionIdGenerator::new().generate_id(), id_1);\n\n\n\n assert_eq!(None, reg.consume_new_id_inner());\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/connection/peer_id_registry/tests.rs", "rank": 18, "score": 245230.59997219063 }, { "content": "#[allow(dead_code)] // don't warn on unused providers for now\n\nstruct EndpointConfig<\n\n CongestionController,\n\n ConnectionCloseFormatter,\n\n ConnectionID,\n\n PacketInterceptor,\n\n PathHandle,\n\n StatelessResetToken,\n\n Random,\n\n Event,\n\n Limits,\n\n Sync,\n\n Tls,\n\n Datagram,\n\n> {\n\n congestion_controller: CongestionController,\n\n connection_close_formatter: ConnectionCloseFormatter,\n\n connection_id: ConnectionID,\n\n packet_interceptor: PacketInterceptor,\n\n stateless_reset_token: StatelessResetToken,\n\n random: Random,\n", "file_path": "quic/s2n-quic/src/client/providers.rs", "rank": 19, "score": 244862.03632415563 }, { "content": "#[allow(dead_code)] // don't warn on unused providers for now\n\nstruct EndpointConfig<\n\n CongestionController,\n\n ConnectionCloseFormatter,\n\n ConnectionID,\n\n PacketInterceptor,\n\n PathHandle,\n\n PathMigration,\n\n StatelessResetToken,\n\n Random,\n\n EndpointLimits,\n\n Event,\n\n Limits,\n\n Sync,\n\n Tls,\n\n AddressToken,\n\n Datagram,\n\n> {\n\n congestion_controller: CongestionController,\n\n connection_close_formatter: ConnectionCloseFormatter,\n\n connection_id: ConnectionID,\n", "file_path": "quic/s2n-quic/src/server/providers.rs", "rank": 20, "score": 244862.03632415563 }, { "content": "pub fn helper_path(peer_id: connection::PeerId) -> ServerPath {\n\n let local_conn_id = connection::LocalId::TEST_ID;\n\n ServerPath::new(\n\n Default::default(),\n\n peer_id,\n\n local_conn_id,\n\n RttEstimator::new(Duration::from_millis(30)),\n\n Default::default(),\n\n false,\n\n DEFAULT_MAX_MTU,\n\n )\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 21, "score": 244029.60550510878 }, { "content": "/// Copies vectored slices from one slice into another\n\n///\n\n/// The number of copied items is limited by the minimum of the lengths of each of the slices.\n\n///\n\n/// Returns the number of bytes that were copied\n\npub fn vectored_copy<A, B, T>(from: &[A], to: &mut [B]) -> usize\n\nwhere\n\n A: Deref<Target = [T]>,\n\n B: Deref<Target = [T]> + DerefMut,\n\n T: Copy,\n\n{\n\n let mut count = 0;\n\n\n\n let mut from_index = 0;\n\n let mut from_offset = 0;\n\n\n\n let mut to_index = 0;\n\n let mut to_offset = 0;\n\n\n\n // The compiler isn't smart enough to remove all of the bounds checks so we resort to\n\n // `get_unchecked`.\n\n //\n\n // https://godbolt.org/z/45cG1v\n\n\n\n // iterate until we reach one of the ends\n", "file_path": "quic/s2n-quic-core/src/slice.rs", "rank": 22, "score": 243381.40326130902 }, { "content": "/// Creates an application space packet number with the given value\n\npub fn pn(nr: usize) -> PacketNumber {\n\n PacketNumberSpace::ApplicationData.new_packet_number(VarInt::new(nr as u64).unwrap())\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 23, "score": 243258.121795925 }, { "content": "struct Transmission<Path: path::Handle> {\n\n path: Path,\n\n // The MINIMUM_MTU size allows for at least 170 supported versions\n\n packet: [u8; MINIMUM_MTU as usize],\n\n packet_len: usize,\n\n}\n\n\n\nimpl<Path: path::Handle> core::fmt::Debug for Transmission<Path> {\n\n fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {\n\n f.debug_struct(\"Transmission\")\n\n .field(\"remote_address\", &self.path.remote_address())\n\n .field(\"local_address\", &self.path.local_address())\n\n .field(\"packet_len\", &self.packet_len)\n\n .field(\"packet\", &&self.packet[0..self.packet_len])\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<Path: path::Handle> Transmission<Path> {\n\n pub fn new(path: Path, initial_packet: &packet::initial::ProtectedInitial) -> Self {\n", "file_path": "quic/s2n-quic-transport/src/endpoint/version.rs", "rank": 24, "score": 239452.8771460866 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"buffer\");\n\n\n\n for size in [1, 100, 1000, 1450] {\n\n let input = vec![42u8; size];\n\n\n\n group.throughput(Throughput::Bytes(input.len() as _));\n\n group.bench_with_input(BenchmarkId::new(\"write_at\", size), &input, |b, input| {\n\n let mut buffer = ReceiveBuffer::new();\n\n let mut offset = VarInt::from_u8(0);\n\n let len = VarInt::new(input.len() as _).unwrap();\n\n b.iter(move || {\n\n buffer.write_at(offset, input).unwrap();\n\n offset += len;\n\n });\n\n });\n\n }\n\n}\n", "file_path": "quic/s2n-quic-bench/src/buffer.rs", "rank": 25, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"inet\");\n\n for block in [1500, 9000, 1 << 16] {\n\n let data = vec![123u8; block];\n\n group.throughput(Throughput::Bytes(block as u64));\n\n group.bench_with_input(\n\n BenchmarkId::new(\"s2n/checksum\", block),\n\n &data,\n\n |b, block| {\n\n let cs = Checksum::default();\n\n let input = black_box(&block[..]);\n\n b.iter(|| {\n\n let mut checksum = cs;\n\n checksum.write(input);\n\n black_box(checksum.finish())\n\n })\n\n },\n\n );\n\n group.bench_with_input(\n\n BenchmarkId::new(\"fuchsia/checksum\", block),\n\n &data,\n\n |b, block| {\n\n let input = black_box(&block[..]);\n\n b.iter(|| black_box(internet_checksum::checksum(input)))\n\n },\n\n );\n\n }\n\n group.finish();\n\n}\n", "file_path": "quic/s2n-quic-bench/src/inet.rs", "rank": 26, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"xdp/encoder\");\n\n let overhead = 100;\n\n\n\n let paths = [\n\n (\n\n \"ipv4\",\n\n true,\n\n path::Tuple {\n\n remote_address: path::RemoteAddress {\n\n mac: Default::default(),\n\n ip: IpV4Address::default().into(),\n\n port: 0,\n\n },\n\n local_address: path::LocalAddress {\n\n mac: Default::default(),\n\n ip: IpV4Address::default().into(),\n\n port: 0,\n\n },\n\n },\n", "file_path": "quic/s2n-quic-bench/src/xdp.rs", "rank": 27, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n codec(c);\n\n}\n\n\n", "file_path": "quic/s2n-quic-bench/src/frame.rs", "rank": 28, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n buffer::benchmarks(c);\n\n crypto::benchmarks(c);\n\n frame::benchmarks(c);\n\n inet::benchmarks(c);\n\n packet::benchmarks(c);\n\n sync::benchmarks(c);\n\n varint::benchmarks(c);\n\n xdp::benchmarks(c);\n\n}\n", "file_path": "quic/s2n-quic-bench/src/lib.rs", "rank": 29, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n round_trip(c);\n\n}\n\n\n", "file_path": "quic/s2n-quic-bench/src/varint.rs", "rank": 30, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n aes::benchmarks(c);\n\n aesgcm::benchmarks(c);\n\n ghash::benchmarks(c);\n\n}\n", "file_path": "quic/s2n-quic-bench/src/crypto.rs", "rank": 31, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n spsc_benches(c);\n\n}\n\n\n", "file_path": "quic/s2n-quic-bench/src/sync.rs", "rank": 32, "score": 238329.77346695442 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n codec(c);\n\n}\n\n\n", "file_path": "quic/s2n-quic-bench/src/packet.rs", "rank": 33, "score": 238329.77346695442 }, { "content": "#[inline(always)]\n\npub fn for_each_block<F: FnMut(&mut [u8; BLOCK_LEN])>(input: &mut [u8], mut f: F) {\n\n for chunk in input.chunks_exact_mut(BLOCK_LEN) {\n\n let block: &mut [u8; BLOCK_LEN] = chunk.try_into().unwrap();\n\n f(block)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod rust_crypto;\n", "file_path": "quic/s2n-quic-crypto/src/aes/testing.rs", "rank": 34, "score": 237061.1439135072 }, { "content": "#[test]\n\nfn new() {\n\n let bbr = BbrCongestionController::new(MINIMUM_MTU);\n\n\n\n assert_eq!(Bandwidth::ZERO, bbr.data_rate_model.max_bw());\n\n assert_eq!(None, bbr.data_volume_model.min_rtt());\n\n assert_eq!(0, bbr.prior_cwnd);\n\n assert!(!bbr.idle_restart);\n\n assert_eq!(0, bbr.data_volume_model.extra_acked());\n\n\n\n // BBRResetCongestionSignals()\n\n bbr::congestion::testing::assert_reset(bbr.congestion_state);\n\n\n\n // BBRResetLowerBounds()\n\n assert_eq!(u64::MAX, bbr.data_volume_model.inflight_lo());\n\n assert_eq!(Bandwidth::INFINITY, bbr.data_rate_model.bw_lo());\n\n\n\n // BBRInitRoundCounting()\n\n assert!(!bbr.round_counter.round_start());\n\n assert_eq!(0, bbr.round_counter.round_count());\n\n\n", "file_path": "quic/s2n-quic-core/src/recovery/bbr/tests.rs", "rank": 35, "score": 236570.1030019108 }, { "content": "type Path = super::Path<Config>;\n\n\n\n//= https://www.rfc-editor.org/rfc/rfc9002#section-6.2.2\n\n//= type=test\n\n//# When no previous RTT is available, the initial RTT\n\n//# SHOULD be set to 333 milliseconds. This results in handshakes\n\n//# starting with a PTO of 1 second, as recommended for TCP's initial\n\n//# RTO; see Section 2 of [RFC6298].\n", "file_path": "quic/s2n-quic-transport/src/recovery/manager/tests.rs", "rank": 36, "score": 236566.47901317777 }, { "content": "// Abandon timer should use max PTO of active and new path(new path uses kInitialRtt)\n\n// Setup 1:\n\n// - create manager with path\n\n// - create datagram for packet on second path\n\n// - call handle_connection_migration with packet for second path\n\n//\n\n// Trigger 1:\n\n// - modify rtt for fist path to detect difference in PTO\n\n//\n\n// Expectation 1:\n\n// - verify PTO of second path > PTO of first path\n\n//\n\n// Setup 2:\n\n// - call on_transmit for second path to send challenge and arm abandon timer\n\n//\n\n// Trigger 2:\n\n// - call second_path.on_timeout with abandon_time - 10ms\n\n//\n\n// Expectation 2:\n\n// - verify challenge is NOT abandoned\n\n//\n\n// Trigger 3:\n\n// - call second_path.on_timeout with abandon_time + 10ms\n\n//\n\n// Expectation 3:\n\n// - verify challenge is abandoned\n\nfn connection_migration_new_path_abandon_timer() {\n\n // Setup 1:\n\n let mut publisher = Publisher::snapshot();\n\n let new_addr: SocketAddr = \"127.0.0.1:8001\".parse().unwrap();\n\n let new_addr = SocketAddress::from(new_addr);\n\n let new_addr = RemoteAddress::from(new_addr);\n\n let first_path = ServerPath::new(\n\n new_addr,\n\n connection::PeerId::try_from_bytes(&[1]).unwrap(),\n\n connection::LocalId::TEST_ID,\n\n RttEstimator::new(Duration::from_millis(30)),\n\n Default::default(),\n\n false,\n\n DEFAULT_MAX_MTU,\n\n );\n\n let mut manager = manager_server(first_path);\n\n\n\n let new_addr: SocketAddr = \"127.0.0.2:8001\".parse().unwrap();\n\n let new_addr = SocketAddress::from(new_addr);\n\n let new_addr = RemoteAddress::from(new_addr);\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 37, "score": 236041.08110893468 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n let impls = [impls!(aes128), impls!(aes256)];\n\n\n\n // create some shared values\n\n let nonce = black_box([123u8; NONCE_LEN]);\n\n let aad = black_box([123u8; 20]);\n\n\n\n for (group, impls) in impls.iter() {\n\n let mut encrypt = c.benchmark_group(format!(\"crypto/aesgcm/{group}/encrypt\"));\n\n for imp in impls.iter() {\n\n for block in testing::BLOCK_SIZES.iter() {\n\n encrypt.throughput(Throughput::Bytes(block.len() as _));\n\n encrypt.bench_with_input(\n\n BenchmarkId::new(imp.name, block),\n\n &block,\n\n move |b, block| {\n\n let key = &imp.key;\n\n\n\n let mut input = block.to_vec();\n\n let payload_len = input.len();\n", "file_path": "quic/s2n-quic-bench/src/crypto/aesgcm.rs", "rank": 38, "score": 234890.25928629417 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n aes128(c);\n\n aes256(c);\n\n}\n", "file_path": "quic/s2n-quic-bench/src/crypto/aes.rs", "rank": 39, "score": 234890.25928629417 }, { "content": "pub fn benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"crypto/ghash\");\n\n for imp in testing::ghash::implementations() {\n\n for block in testing::BLOCK_SIZES.iter() {\n\n group.throughput(Throughput::Bytes(block.len() as _));\n\n group.bench_with_input(BenchmarkId::new(imp.name(), block), &block, |b, block| {\n\n let key = imp.new([1; 16]);\n\n let input = &block[..];\n\n b.iter(|| black_box(key.hash(input)))\n\n });\n\n }\n\n }\n\n group.finish();\n\n}\n", "file_path": "quic/s2n-quic-bench/src/crypto/ghash.rs", "rank": 40, "score": 234890.25928629417 }, { "content": "// Abandon timer should use max PTO of active and new path(new path uses kInitialRtt)\n\n// Setup 1:\n\n// - create manager with path\n\n// - create datagram for packet on second path\n\n//\n\n// Trigger 1:\n\n// - call handle_connection_migration with packet for second path\n\n//\n\n// Expectation 1:\n\n// - assert that new path uses max_ack_delay from the active path\n\nfn connection_migration_use_max_ack_delay_from_active_path() {\n\n // Setup 1:\n\n let mut publisher = Publisher::snapshot();\n\n let new_addr: SocketAddr = \"127.0.0.1:8001\".parse().unwrap();\n\n let new_addr = SocketAddress::from(new_addr);\n\n let new_addr = RemoteAddress::from(new_addr);\n\n let first_path = ServerPath::new(\n\n new_addr,\n\n connection::PeerId::try_from_bytes(&[1]).unwrap(),\n\n connection::LocalId::TEST_ID,\n\n RttEstimator::new(Duration::from_millis(30)),\n\n Default::default(),\n\n false,\n\n DEFAULT_MAX_MTU,\n\n );\n\n let mut manager = manager_server(first_path);\n\n\n\n let new_addr: SocketAddr = \"127.0.0.2:8001\".parse().unwrap();\n\n let new_addr = SocketAddress::from(new_addr);\n\n let new_addr = RemoteAddress::from(new_addr);\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 41, "score": 228757.654235005 }, { "content": "// creates a test path_manager. also check out `helper_manager_with_paths`\n\n// which calls this helper with preset options\n\npub fn helper_manager_with_paths_base(\n\n register_second_conn_id: bool,\n\n validate_path_zero: bool,\n\n publisher: &mut Publisher,\n\n) -> Helper {\n\n let zero_conn_id = connection::PeerId::try_from_bytes(&[0]).unwrap();\n\n let first_conn_id = connection::PeerId::try_from_bytes(&[1]).unwrap();\n\n let second_conn_id = connection::PeerId::try_from_bytes(&[2]).unwrap();\n\n let zero_path_id = path_id(0);\n\n let first_path_id = path_id(1);\n\n let second_path_id = path_id(2);\n\n let mut zero_path = helper_path(zero_conn_id);\n\n if validate_path_zero {\n\n // simulate receiving a handshake packet to force path validation\n\n zero_path.on_handshake_packet();\n\n }\n\n assert!(!zero_path.is_challenge_pending());\n\n\n\n let now = NoopClock {}.get_time();\n\n let challenge_expiration = Duration::from_millis(10_000);\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 42, "score": 228755.28784703492 }, { "content": "#[inline]\n\npub fn remote_port_throttled_index(port: u16) -> Option<usize> {\n\n for (idx, throttled_port) in THROTTLED_PORTS.iter().enumerate() {\n\n if *throttled_port == port {\n\n return Some(idx);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "quic/s2n-quic-core/src/path/mod.rs", "rank": 43, "score": 228111.60665091366 }, { "content": "/// Creates a `STREAM_DATA` frame\n\npub fn stream_data<Data>(\n\n stream_id: StreamId,\n\n offset: VarInt,\n\n data: Data,\n\n is_fin: bool,\n\n) -> StreamFrame<Data> {\n\n StreamFrame {\n\n offset,\n\n data,\n\n stream_id: stream_id.into(),\n\n is_last_frame: false,\n\n is_fin,\n\n }\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 44, "score": 227733.13029046688 }, { "content": "pub fn client(\n\n handle: &Handle,\n\n events: events::Events,\n\n servers: &[SocketAddr],\n\n count: usize,\n\n delay: CliRange<humantime::Duration>,\n\n streams: CliRange<u32>,\n\n stream_data: CliRange<u64>,\n\n) -> Result {\n\n let client = Client::builder()\n\n .with_io(handle.builder().build().unwrap())?\n\n .with_tls(certificates::CERT_PEM)?\n\n .with_event((events, Tracing::default()))?\n\n .start()?;\n\n\n\n for _ in 0..count {\n\n let delay = delay.gen_duration();\n\n\n\n // pick a random server to connect to\n\n let server_addr = *rand::one_of(servers);\n", "file_path": "quic/s2n-quic-sim/src/run/endpoint.rs", "rank": 45, "score": 227485.45782553084 }, { "content": "#[test]\n\nfn stop_using_a_retired_connection_id() {\n\n let mut publisher = Publisher::snapshot();\n\n let id_1 = connection::PeerId::try_from_bytes(b\"id01\").unwrap();\n\n let first_path = ServerPath::new(\n\n Default::default(),\n\n id_1,\n\n connection::LocalId::TEST_ID,\n\n RttEstimator::default(),\n\n Default::default(),\n\n false,\n\n DEFAULT_MAX_MTU,\n\n );\n\n let mut manager = manager_server(first_path);\n\n\n\n let id_2 = connection::PeerId::try_from_bytes(b\"id02\").unwrap();\n\n assert!(manager\n\n .on_new_connection_id(&id_2, 1, 1, &TEST_TOKEN_1, &mut publisher)\n\n .is_ok());\n\n\n\n assert_eq!(id_2, manager.paths[0].peer_connection_id);\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 46, "score": 225071.76151601603 }, { "content": "#[cfg(feature = \"alloc\")]\n\npub trait Context<Crypto: crate::crypto::CryptoSuite> {\n\n fn on_handshake_keys(\n\n &mut self,\n\n key: Crypto::HandshakeKey,\n\n header_key: Crypto::HandshakeHeaderKey,\n\n ) -> Result<(), crate::transport::Error>;\n\n\n\n fn on_zero_rtt_keys(\n\n &mut self,\n\n key: Crypto::ZeroRttKey,\n\n header_key: Crypto::ZeroRttHeaderKey,\n\n application_parameters: ApplicationParameters,\n\n ) -> Result<(), crate::transport::Error>;\n\n\n\n fn on_one_rtt_keys(\n\n &mut self,\n\n key: Crypto::OneRttKey,\n\n header_key: Crypto::OneRttHeaderKey,\n\n application_parameters: ApplicationParameters,\n\n ) -> Result<(), crate::transport::Error>;\n", "file_path": "quic/s2n-quic-core/src/crypto/tls.rs", "rank": 47, "score": 222717.04698943166 }, { "content": "/// Computes the label given the key len\n\npub fn compute_label<T: Extend<u8>>(len: usize, label: &[u8], out: &mut T) {\n\n const TLS_LABEL: &[u8] = b\"tls13 \";\n\n let label_len = TLS_LABEL.len() + label.len();\n\n debug_assert!(label_len <= core::u8::MAX as usize, \"label is too long\");\n\n\n\n out.extend((len as u16).to_be_bytes().iter().cloned());\n\n out.extend(Some(label_len as u8));\n\n out.extend(TLS_LABEL.iter().cloned());\n\n out.extend(label.iter().cloned());\n\n out.extend(Some(0));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn initial_test() {\n\n assert_eq!(compute_vec_label(32, b\"client in\"), CLIENT_IN);\n\n assert_eq!(compute_vec_label(32, b\"server in\"), SERVER_IN);\n", "file_path": "quic/s2n-quic-core/src/crypto/label.rs", "rank": 48, "score": 222653.46246289264 }, { "content": "pub fn parse_duration(s: &str) -> crate::Result<Duration> {\n\n Ok(humantime::parse_duration(s)?)\n\n}\n\n\n", "file_path": "netbench/netbench/src/units/duration.rs", "rank": 49, "score": 220158.89004080597 }, { "content": "/// Sets up a test environment for Stream testing with default parameters\n\npub fn setup_stream_test_env() -> TestEnvironment {\n\n let config = TestEnvironmentConfig::default();\n\n setup_stream_test_env_with_config(config)\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 50, "score": 218770.65369623213 }, { "content": "pub fn ensure_decoding_mut_matches<'a, T: DecoderValueMut<'a> + PartialEq + core::fmt::Debug>(\n\n expected_value: &T,\n\n expected_bytes: &'a mut [u8],\n\n) -> Result<(), Error> {\n\n let (actual_value, remaining) = decode_mut(expected_bytes)?;\n\n ensure!(\n\n expected_value == &actual_value,\n\n \"mut decodings do not match\",\n\n );\n\n remaining.ensure_empty()?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_u8_round_trip_value() {\n\n for i in 0..core::u8::MAX {\n\n ensure_codec_round_trip_value!(u8, i).unwrap();\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_u8_round_trip_bytes() {\n\n let bytes = (0..core::u8::MAX).collect::<Vec<_>>();\n\n ensure_codec_round_trip_bytes!(u8, &bytes).unwrap();\n\n }\n\n}\n", "file_path": "common/s2n-codec/src/testing/mod.rs", "rank": 51, "score": 218704.54008926146 }, { "content": "pub fn try_run(args: &crate::Args) -> Result<Option<()>> {\n\n let mut command = if let Ok(bpftrace) = find_bpftrace() {\n\n eprintln!(\"collecting stats with bpftrace\");\n\n Command::new(bpftrace)\n\n } else {\n\n return Ok(None);\n\n };\n\n\n\n let driver = &args.driver;\n\n let interval = args.interval;\n\n let scenario = args.scenario()?;\n\n let scenario_path = &args.scenario;\n\n\n\n let program = {\n\n let template = handlebars::Handlebars::new();\n\n template.render_template(\n\n PROGRAM,\n\n &json!({\n\n \"bin\": &driver,\n\n \"interval_ms\": interval.as_millis() as u64,\n", "file_path": "netbench/netbench-collector/src/bpftrace.rs", "rank": 52, "score": 217019.09966720018 }, { "content": "/// Creates Stream Interests from an array of strings\n\n///\n\n/// The following interests are supported:\n\n/// - ack => delivery_notifications\n\n/// - tx => transmission::Interest::NewData\n\n/// - fin => finalization\n\n/// - cf => connection_flow_control_credits\n\npub fn stream_interests(interests: &[&str]) -> StreamInterests {\n\n let mut result = StreamInterests {\n\n retained: true,\n\n ..Default::default()\n\n };\n\n for interest in interests {\n\n match *interest {\n\n \"ack\" => result.delivery_notifications = true,\n\n \"tx\" => result.transmission = transmission::Interest::NewData,\n\n \"lost\" => result.transmission = transmission::Interest::LostData,\n\n \"fin\" => result.retained = false,\n\n \"cf\" => result.connection_flow_control_credits = true,\n\n \"sf\" => result.stream_flow_control_credits = true,\n\n other => unreachable!(\"Unsupported interest {}\", other),\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 53, "score": 215965.1117616878 }, { "content": "pub fn start_server(mut server: Server) -> Result<SocketAddr> {\n\n let server_addr = server.local_addr()?;\n\n\n\n // accept connections and echo back\n\n spawn(async move {\n\n while let Some(mut connection) = server.accept().await {\n\n spawn(async move {\n\n while let Ok(Some(mut stream)) = connection.accept_bidirectional_stream().await {\n\n spawn(async move {\n\n while let Ok(Some(chunk)) = stream.receive().await {\n\n let _ = stream.send(chunk).await;\n\n }\n\n });\n\n }\n\n });\n\n }\n\n });\n\n\n\n Ok(server_addr)\n\n}\n\n\n", "file_path": "quic/s2n-quic/src/tests/setup.rs", "rank": 54, "score": 210994.89490331674 }, { "content": "/// Generates test data using a pattern which is identifieable. For a given\n\n/// offset in the Stream the utilized data will always be the same. This allows\n\n/// us to do some simple validation checking whether a receiver received the\n\n/// expected data without exactly knowing the actual sent data.\n\npub fn gen_pattern_test_data(offset: VarInt, len: usize) -> Vec<u8> {\n\n let mut data = Vec::new();\n\n data.reserve(len);\n\n\n\n fn data_for_offset(offset: u64) -> u8 {\n\n (offset % 256) as u8\n\n }\n\n\n\n for i in 0..len {\n\n let current_offset: u64 = Into::<u64>::into(offset) + i as u64;\n\n data.push(data_for_offset(current_offset));\n\n }\n\n\n\n data\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/testing.rs", "rank": 55, "score": 210978.40366686578 }, { "content": "#[inline]\n\npub fn encode_packet<M: Message<Handle = path::Tuple>>(\n\n buffer: &mut EncoderBuffer,\n\n message: &mut M,\n\n state: &mut State,\n\n) -> Result<u16, tx::Error> {\n\n unsafe {\n\n assume!(\n\n buffer.remaining_capacity()\n\n > size_of::<ethernet::Header>()\n\n + size_of::<ipv6::Header>().max(size_of::<ipv4::Header>())\n\n + size_of::<udp::Header>(),\n\n \"buffer too small\"\n\n );\n\n }\n\n\n\n let path = message.path_handle();\n\n match (path.local_address.ip, path.remote_address.ip) {\n\n (IpAddress::Ipv4(local_ip), IpAddress::Ipv4(remote_ip)) => {\n\n buffer.encode(&ethernet::Header {\n\n destination: path.remote_address.mac,\n", "file_path": "quic/s2n-quic-core/src/xdp/encoder.rs", "rank": 56, "score": 210858.60444278404 }, { "content": "#[test]\n\npub fn unknown_id_is_not_active() {\n\n let id_1 = id(b\"id01\");\n\n let mut random_generator = random::testing::Generator(123);\n\n let mut mapper = ConnectionIdMapper::new(&mut random_generator, endpoint::Type::Server);\n\n let reg = mapper\n\n .create_server_peer_id_registry(InternalConnectionIdGenerator::new().generate_id(), id_1);\n\n\n\n assert!(reg.is_active(&id_1));\n\n let id_unknown = id(b\"unknown\");\n\n assert!(!reg.is_active(&id_unknown));\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/connection/peer_id_registry/tests.rs", "rank": 57, "score": 210718.51254233712 }, { "content": "#[test]\n\npub fn initial_id_is_active() {\n\n let id_1 = id(b\"id01\");\n\n let mut random_generator = random::testing::Generator(123);\n\n let mut mapper = ConnectionIdMapper::new(&mut random_generator, endpoint::Type::Server);\n\n let reg = mapper\n\n .create_server_peer_id_registry(InternalConnectionIdGenerator::new().generate_id(), id_1);\n\n\n\n assert!(reg.is_active(&id_1));\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/connection/peer_id_registry/tests.rs", "rank": 58, "score": 210718.51254233712 }, { "content": "#[test]\n\npub fn retired_id_is_not_active() {\n\n let id_1 = id(b\"id01\");\n\n let mut random_generator = random::testing::Generator(123);\n\n let mut mapper = ConnectionIdMapper::new(&mut random_generator, endpoint::Type::Server);\n\n let mut reg = mapper\n\n .create_server_peer_id_registry(InternalConnectionIdGenerator::new().generate_id(), id_1);\n\n\n\n assert!(reg.is_active(&id_1));\n\n reg.registered_ids[0].status = PendingRetirement;\n\n assert!(!reg.is_active(&id_1));\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/connection/peer_id_registry/tests.rs", "rank": 59, "score": 210718.51254233712 }, { "content": "pub fn decode_mut<'a, T: DecoderValueMut<'a>>(\n\n expected_bytes: &'a mut [u8],\n\n) -> Result<(T, DecoderBufferMut<'a>), Error> {\n\n let (actual_value, remaining) = DecoderBufferMut::new(expected_bytes).decode()?;\n\n Ok((actual_value, remaining))\n\n}\n\n\n", "file_path": "common/s2n-codec/src/testing/mod.rs", "rank": 60, "score": 208239.42348272874 }, { "content": "fn helper_manager_with_paths(publisher: &mut Publisher) -> Helper {\n\n helper_manager_with_paths_base(true, true, publisher)\n\n}\n\n\n\npub struct Helper {\n\n pub now: Timestamp,\n\n pub first_expected_data: challenge::Data,\n\n pub second_expected_data: challenge::Data,\n\n pub challenge_expiration: Duration,\n\n pub zero_path_id: Id,\n\n pub first_path_id: Id,\n\n pub second_path_id: Id,\n\n pub manager: ServerManager,\n\n}\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 61, "score": 207315.81939728622 }, { "content": "#[doc(hidden)]\n\npub fn main<C: Configs>() -> Result<()> {\n\n let app = App::new(\"netbench scenarios\")\n\n .after_help(LONG_ABOUT.trim())\n\n .arg(\n\n Arg::with_name(\"out_dir\")\n\n .value_name(\"OUT_DIR\")\n\n .default_value(\"target/netbench\")\n\n .takes_value(true),\n\n );\n\n\n\n let map = C::registry();\n\n let args = map.clap_args().collect::<Vec<_>>();\n\n let matches = app.args(&args).get_matches();\n\n let mut overrides = map.load_overrides(&matches);\n\n\n\n let configs = C::new(&mut overrides);\n\n\n\n let mut has_error = false;\n\n for error in overrides.errors() {\n\n eprintln!(\"{error}\");\n", "file_path": "netbench/netbench-scenarios/src/lib.rs", "rank": 62, "score": 207235.91466840808 }, { "content": "fn invalid_stream_id(local_ep_type: endpoint::Type) -> StreamId {\n\n StreamId::nth(local_ep_type, StreamType::Bidirectional, 100_000).unwrap()\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/manager/tests.rs", "rank": 63, "score": 206957.01454329904 }, { "content": "#[event(\"transport:connection_attempt_failed\")]\n\n#[subject(endpoint)]\n\nstruct EndpointConnectionAttemptFailed {\n\n error: crate::connection::Error,\n\n}\n", "file_path": "quic/s2n-quic-events/events/endpoint.rs", "rank": 64, "score": 206902.34641281524 }, { "content": "pub trait QueryMut {\n\n fn execute_mut(&mut self, context: &mut dyn core::any::Any) -> ControlFlow;\n\n}\n\n\n\n/// Used to tell a query whether it should exit early or go on as usual.\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum ControlFlow {\n\n Continue,\n\n Break,\n\n}\n\n\n\nimpl ControlFlow {\n\n #[inline]\n\n #[must_use]\n\n pub fn and_then(self, f: impl FnOnce() -> Self) -> Self {\n\n match self {\n\n Self::Continue => f(),\n\n Self::Break => Self::Break,\n\n }\n\n }\n", "file_path": "quic/s2n-quic-core/src/query.rs", "rank": 65, "score": 206155.02684325515 }, { "content": "pub trait Connection: crate::driver::timer::Provider {\n\n fn id(&self) -> u64;\n\n\n\n fn poll<T: Trace, Ch: Checkpoints>(\n\n &mut self,\n\n trace: &mut T,\n\n checkpoints: &mut Ch,\n\n now: crate::driver::timer::Timestamp,\n\n cx: &mut Context,\n\n ) -> Poll<Result<()>>;\n\n}\n\n\n\npub struct Driver<'a, C: Client<'a>> {\n\n client: C,\n\n thread: thread::Thread<'a, C>,\n\n addresses: &'a AddressMap,\n\n}\n\n\n\nimpl<'a, C: Client<'a>> Driver<'a, C> {\n\n pub fn new(client: C, scenario: &'a scenario::Client, addresses: &'a AddressMap) -> Self {\n", "file_path": "netbench/netbench/src/client.rs", "rank": 66, "score": 206139.76098298133 }, { "content": "#[inline]\n\nfn notify_empty(fd: &Fd, tx: &mut ring::Tx, cx: &mut Context) -> Poll<()> {\n\n // only notify the socket if it's set the needs wakeup flag\n\n if !tx.needs_wakeup() {\n\n trace!(\"TX ring doesn't need wake, returning early\");\n\n return Poll::Ready(());\n\n }\n\n\n\n // limit the number of loops to prevent endless spinning on registering wakers\n\n for iteration in 0..10 {\n\n trace!(\"iteration {}\", iteration);\n\n\n\n // query socket readiness through tokio's polling facilities\n\n match fd.poll_write_ready(cx) {\n\n Poll::Ready(Ok(mut guard)) => {\n\n // try to acquire entries for the queue\n\n let count = tx.acquire(u32::MAX) as usize;\n\n\n\n trace!(\"acquired {count} items from TX ring\");\n\n\n\n // if we didn't acquire all of the capacity, we need to clear readiness and try again\n", "file_path": "tools/xdp/s2n-quic-xdp/src/task/tx/tokio_impl.rs", "rank": 67, "score": 205793.64734163304 }, { "content": "type Q = fn(&Parameters, &Connection, &[Connection]) -> Option<f64>;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum Type {\n\n Integer,\n\n Percent,\n\n Duration,\n\n Throughput,\n\n Bool,\n\n}\n\n\n\nimpl Type {\n\n pub fn format(&self, [_min, max]: [f64; 2]) -> &'static str {\n\n match self {\n\n Self::Integer => \"~s\",\n\n Self::Percent => \"~%\",\n\n Self::Duration if max > 2000.0 => \"%M:%S\",\n\n Self::Duration => \"%Qms\",\n\n Self::Throughput => \"~s\",\n\n Self::Bool => \"c\",\n", "file_path": "quic/s2n-quic-sim/src/stats.rs", "rank": 68, "score": 205206.15601758417 }, { "content": "#[inline]\n\nfn path_id(id: u8) -> path::Id {\n\n // Safety: The path::Manager is responsible for managing path ID and is thus\n\n // responsible for using them safely\n\n unsafe { path::Id::new(id) }\n\n}\n\n\n\nimpl<Config: endpoint::Config> timer::Provider for Manager<Config> {\n\n #[inline]\n\n fn timers<Q: timer::Query>(&self, query: &mut Q) -> timer::Result {\n\n for path in self.paths.iter() {\n\n path.timers(query)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Iterate over all paths that have an interest in sending PATH_CHALLENGE\n\n/// or PATH_RESPONSE frames.\n\n///\n", "file_path": "quic/s2n-quic-transport/src/path/manager.rs", "rank": 69, "score": 205111.6500427603 }, { "content": "/// Returns a CONNECTION_CLOSE frame for the given connection Error, if any\n\n///\n\n/// The first item will be a close frame for an early (initial, handshake) packet.\n\n/// The second item will be a close frame for a 1-RTT (application data) packet.\n\npub fn as_frame<'a, F: connection::close::Formatter>(\n\n error: Error,\n\n formatter: &'a F,\n\n context: &'a connection::close::Context<'a>,\n\n) -> Option<(ConnectionClose<'a>, ConnectionClose<'a>)> {\n\n match error {\n\n Error::Closed { initiator, .. } => {\n\n // don't send CONNECTION_CLOSE frames on remote-initiated errors\n\n if initiator.is_remote() {\n\n return None;\n\n }\n\n\n\n let error = transport::Error::NO_ERROR;\n\n let early = formatter.format_early_transport_error(context, error);\n\n let one_rtt = formatter.format_transport_error(context, error);\n\n\n\n Some((early, one_rtt))\n\n }\n\n Error::Transport {\n\n code,\n", "file_path": "quic/s2n-quic-core/src/connection/error.rs", "rank": 70, "score": 204160.96374042117 }, { "content": "/// Helper method to move the given BBR congestion controller into the\n\n/// ProbeBW state with the given CyclePhase\n\nfn enter_probe_bw_state<Pub: Publisher>(\n\n bbr: &mut BbrCongestionController,\n\n cycle_phase: CyclePhase,\n\n publisher: &mut Pub,\n\n) {\n\n let now = NoopClock.get_time();\n\n\n\n match bbr.state {\n\n State::Startup => {\n\n bbr.enter_drain(publisher);\n\n bbr.enter_probe_bw(\n\n false,\n\n &mut random::testing::Generator::default(),\n\n now,\n\n publisher,\n\n );\n\n }\n\n State::Drain | State::ProbeRtt(_) => {\n\n bbr.enter_probe_bw(\n\n false,\n", "file_path": "quic/s2n-quic-core/src/recovery/bbr/tests.rs", "rank": 71, "score": 204045.13508814026 }, { "content": "pub fn implementations(mut impls: Vec<Implementation>) -> Vec<Implementation> {\n\n Avx2::call_supported(|| {\n\n impls.push(Implementation {\n\n name: \"s2n_quic/std/avx2\",\n\n new: |key| {\n\n let ghash = x86::GHash::new(key);\n\n Box::new(<Impl<_, Avx2>>::new(ghash))\n\n },\n\n });\n\n impls.push(Implementation {\n\n name: \"s2n_quic/pre_h/avx2\",\n\n new: |key| {\n\n let ghash = <Array<hkey::H, MAX_BLOCKS>>::new(key);\n\n Box::new(<Impl<_, Avx2>>::new(ghash))\n\n },\n\n });\n\n impls.push(Implementation {\n\n name: \"s2n_quic/pre_hr/avx2\",\n\n new: |key| {\n\n let ghash = <Array<hkey::Hr, MAX_BLOCKS>>::new(key);\n\n Box::new(<Impl<_, Avx2>>::new(ghash))\n\n },\n\n });\n\n });\n\n impls\n\n}\n", "file_path": "quic/s2n-quic-crypto/src/ghash/x86/testing.rs", "rank": 72, "score": 203499.08493876213 }, { "content": "#[test]\n\npub fn client_peer_id_registry_should_not_register_cid() {\n\n let mut random_generator = random::testing::Generator(123);\n\n let mut mapper = ConnectionIdMapper::new(&mut random_generator, endpoint::Type::Server);\n\n let reg =\n\n mapper.create_client_peer_id_registry(InternalConnectionIdGenerator::new().generate_id());\n\n\n\n assert!(reg.registered_ids.is_empty());\n\n assert!(reg.is_empty());\n\n}\n", "file_path": "quic/s2n-quic-transport/src/connection/peer_id_registry/tests.rs", "rank": 73, "score": 202465.4824584053 }, { "content": "fn create_stream_manager(local_ep_type: endpoint::Type) -> AbstractStreamManager<MockStream> {\n\n let initial_local_limits = create_default_initial_flow_control_limits();\n\n let initial_peer_limits = create_default_initial_flow_control_limits();\n\n\n\n // set local limits high so that they are not a constraint\n\n let limits = ConnectionLimits::default()\n\n .with_max_open_local_bidirectional_streams(1000)\n\n .unwrap()\n\n .with_max_open_local_unidirectional_streams(1000)\n\n .unwrap();\n\n\n\n AbstractStreamManager::<MockStream>::new(\n\n &limits,\n\n local_ep_type,\n\n initial_local_limits,\n\n initial_peer_limits,\n\n )\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/stream/manager/tests.rs", "rank": 74, "score": 201291.1599765446 }, { "content": "pub fn implementations(mut impls: Vec<Implementation>) -> Vec<Implementation> {\n\n impls.push(Implementation {\n\n name: \"RustCrypto\",\n\n new: |key| Box::new(Impl::new(&key.into())),\n\n });\n\n impls\n\n}\n", "file_path": "quic/s2n-quic-crypto/src/ghash/testing/rust_crypto.rs", "rank": 75, "score": 200853.92168885295 }, { "content": "#[test]\n\nfn can_query_stream_readiness() {\n\n let max_send_buffer_size = 1500;\n\n for size in [None, Some(1000usize), Some(2000)] {\n\n for with_context in [false, true] {\n\n let test_env_config = TestEnvironmentConfig {\n\n max_send_buffer_size,\n\n stream_id: StreamId::initial(endpoint::Type::Client, StreamType::Unidirectional),\n\n local_endpoint_type: endpoint::Type::Client,\n\n ..Default::default()\n\n };\n\n let mut test_env = setup_stream_test_env_with_config(test_env_config);\n\n\n\n dbg!(size);\n\n dbg!(with_context);\n\n\n\n let mut expected_buffer_size = max_send_buffer_size;\n\n\n\n // potentially fill the buffer\n\n if let Some(size) = size {\n\n test_env\n", "file_path": "quic/s2n-quic-transport/src/stream/send_stream/tests.rs", "rank": 76, "score": 200513.433917507 }, { "content": "#[derive(Debug, Default)]\n\nstruct Stream {\n\n buffer: Vec<u8>,\n\n read_waker: Option<Waker>,\n\n write_waker: Option<Waker>,\n\n is_closed: bool,\n\n max_buffer: usize,\n\n}\n\n\n\nimpl Stream {\n\n fn new(max_buffer: usize) -> Self {\n\n Self {\n\n max_buffer,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n fn close(&mut self) {\n\n self.is_closed = true;\n\n self.wake_writer();\n\n self.wake_reader();\n", "file_path": "netbench/netbench/src/testing.rs", "rank": 77, "score": 200291.75054951617 }, { "content": "pub trait Endpoint: 'static + Debug + Send {\n\n type CongestionController: CongestionController;\n\n\n\n fn new_congestion_controller(&mut self, path_info: PathInfo) -> Self::CongestionController;\n\n}\n\n\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct PathInfo<'a> {\n\n pub remote_address: SocketAddress<'a>,\n\n pub application_protocol: Option<&'a [u8]>,\n\n pub max_datagram_size: u16,\n\n}\n\n\n\nimpl<'a> PathInfo<'a> {\n\n #[allow(deprecated)]\n\n pub fn new(remote_address: &'a inet::SocketAddress) -> Self {\n\n Self {\n\n remote_address: remote_address.into_event(),\n\n application_protocol: None,\n\n max_datagram_size: MINIMUM_MTU,\n\n }\n\n }\n\n}\n\n\n", "file_path": "quic/s2n-quic-core/src/recovery/congestion_controller.rs", "rank": 78, "score": 200247.62576470536 }, { "content": "#[test]\n\nfn detach_modes() {\n\n for detached in [false, true] {\n\n let test_env_config = TestEnvironmentConfig {\n\n stream_id: StreamId::initial(endpoint::Type::Client, StreamType::Unidirectional),\n\n local_endpoint_type: endpoint::Type::Client,\n\n ..Default::default()\n\n };\n\n let mut test_env = setup_stream_test_env_with_config(test_env_config);\n\n\n\n let mut chunks = gen_pattern_test_chunks(VarInt::from_u8(0), &[16]);\n\n let mut request = ops::Request::default();\n\n\n\n request.send(&mut chunks).finish();\n\n\n\n if detached {\n\n request.detach_tx();\n\n }\n\n\n\n test_env\n\n .run_request(&mut request, false)\n", "file_path": "quic/s2n-quic-transport/src/stream/send_stream/tests.rs", "rank": 79, "score": 199373.32300219114 }, { "content": "/// Calculates the minimum packet length required such that a packet is indistinguishable from\n\n/// other valid QUIC version 1 packets.\n\npub fn min_indistinguishable_packet_len(max_tag_len: usize) -> usize {\n\n MIN_INDISTINGUISHABLE_PACKET_LEN_WITHOUT_TAG + max_tag_len\n\n}\n\n\n", "file_path": "quic/s2n-quic-core/src/packet/stateless_reset.rs", "rank": 80, "score": 198935.2364668553 }, { "content": "/// Configuration parameters for a QUIC endpoint\n\npub trait Config: 'static + Send + Sized + core::fmt::Debug {\n\n /// The type of the TLS endpoint which is utilized\n\n type TLSEndpoint: tls::Endpoint;\n\n type CongestionControllerEndpoint: congestion_controller::Endpoint;\n\n /// The connections type\n\n type Connection: connection::Trait<Config = Self>;\n\n /// The type of lock that synchronizes connection state between threads\n\n type ConnectionLock: connection::Lock<Self::Connection>;\n\n /// The connection ID format\n\n type ConnectionIdFormat: connection::id::Format;\n\n /// The stateless reset token generator\n\n type StatelessResetTokenGenerator: stateless_reset::token::Generator;\n\n /// The random data generator\n\n type RandomGenerator: random::Generator;\n\n /// The validation token format\n\n type TokenFormat: s2n_quic_core::token::Format;\n\n /// The endpoint limits\n\n type EndpointLimits: endpoint::Limiter;\n\n /// The connection limits\n\n type ConnectionLimits: connection::limits::Limiter;\n", "file_path": "quic/s2n-quic-transport/src/endpoint/config.rs", "rank": 81, "score": 198844.1996235741 }, { "content": "//= https://www.rfc-editor.org/rfc/rfc9000#section-9.3\n\n//# If the recipient permits the migration, it MUST send subsequent\n\n//# packets to the new peer address and MUST initiate path validation\n\n//# (Section 8.2) to verify the peer's ownership of the address if\n\n//# validation is not already underway.\n\nfn initiate_path_challenge_if_new_path_is_not_validated() {\n\n // Setup:\n\n let mut publisher = Publisher::snapshot();\n\n let mut helper = helper_manager_with_paths(&mut publisher);\n\n assert!(!helper.manager[helper.first_path_id].is_validated());\n\n assert!(helper.manager[helper.first_path_id].is_challenge_pending());\n\n\n\n assert!(!helper.manager[helper.second_path_id].is_validated());\n\n helper.manager[helper.second_path_id].abandon_challenge(&mut publisher, 0);\n\n assert!(!helper.manager[helper.second_path_id].is_challenge_pending());\n\n assert_eq!(helper.manager.active_path_id(), helper.first_path_id);\n\n\n\n // Trigger:\n\n helper\n\n .manager\n\n .on_processed_packet(\n\n helper.second_path_id,\n\n None,\n\n path_validation::Probe::NonProbing,\n\n &mut random::testing::Generator(123),\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 82, "score": 198385.7309794359 }, { "content": "#[test]\n\nfn abandon_path_challenges_if_new_path_is_validated() {\n\n // Setup:\n\n let mut publisher = Publisher::snapshot();\n\n let mut helper = helper_manager_with_paths(&mut publisher);\n\n assert!(helper.manager[helper.first_path_id].is_challenge_pending());\n\n assert!(helper.manager[helper.second_path_id].is_challenge_pending());\n\n assert_eq!(helper.manager.active_path_id(), helper.first_path_id);\n\n\n\n // simulate receiving a handshake packet to force path validation\n\n helper.manager[helper.second_path_id].on_handshake_packet();\n\n assert!(helper.manager[helper.second_path_id].is_validated());\n\n\n\n // Trigger:\n\n helper\n\n .manager\n\n .on_processed_packet(\n\n helper.second_path_id,\n\n None,\n\n path_validation::Probe::NonProbing,\n\n &mut random::testing::Generator(123),\n\n &mut publisher,\n\n )\n\n .unwrap();\n\n\n\n // Expectation:\n\n assert_eq!(helper.manager.active_path_id(), helper.second_path_id);\n\n assert!(!helper.manager[helper.first_path_id].is_challenge_pending());\n\n assert!(!helper.manager[helper.second_path_id].is_challenge_pending());\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 83, "score": 198375.72124735743 }, { "content": "struct Stream {\n\n rx: Option<ReceiveStream>,\n\n tx: Option<SendStream>,\n\n}\n\n\n\nimpl Stream {\n\n fn new(stream: impl SplittableStream) -> Self {\n\n let (rx, tx) = stream.split();\n\n let rx = rx.map(ReceiveStream::new);\n\n let tx = tx.map(SendStream::new);\n\n Self { rx, tx }\n\n }\n\n}\n\n\n", "file_path": "netbench/netbench/src/s2n_quic.rs", "rank": 84, "score": 197361.33883774283 }, { "content": "#[xdp(name = \"s2n_quic_xdp\")]\n\npub fn s2n_quic_xdp(ctx: XdpContext) -> u32 {\n\n let action = handle_packet(&ctx);\n\n\n\n #[cfg(feature = \"trace\")]\n\n {\n\n use aya_log_ebpf as log;\n\n match action {\n\n xdp_action::XDP_DROP => log::trace!(&ctx, \"ACTION: DROP\"),\n\n xdp_action::XDP_PASS => log::trace!(&ctx, \"ACTION: PASS\"),\n\n xdp_action::XDP_REDIRECT => log::trace!(&ctx, \"ACTION: REDIRECT\"),\n\n xdp_action::XDP_ABORTED => log::trace!(&ctx, \"ACTION: ABORTED\"),\n\n _ => (),\n\n }\n\n }\n\n\n\n action\n\n}\n\n\n", "file_path": "tools/xdp/ebpf/src/main.rs", "rank": 85, "score": 197314.07625697934 }, { "content": "fn mtl_test<C>(server_cert: &str, f: fn(crate::Connection) -> C)\n\nwhere\n\n C: 'static + core::future::Future<Output = ()> + Send,\n\n{\n\n let model = Model::default();\n\n model.set_delay(Duration::from_millis(50));\n\n\n\n test(model, |handle| {\n\n let server_tls = build_server_mtls_provider(server_cert)?;\n\n let server = Server::builder()\n\n .with_io(handle.builder().build()?)?\n\n .with_tls(server_tls)?\n\n .with_event(events())?\n\n .start()?;\n\n\n\n let addr = start_server(server)?;\n\n\n\n let client_tls = build_client_mtls_provider(certificates::MTLS_CA_CERT)?;\n\n let client = Client::builder()\n\n .with_io(handle.builder().build().unwrap())?\n", "file_path": "quic/s2n-quic/src/tests/client_handshake_confirm.rs", "rank": 86, "score": 196864.317324474 }, { "content": "// add new path when receiving a datagram on different remote address\n\n// Setup:\n\n// - create path manger with one path\n\n//\n\n// Trigger:\n\n// - call on_datagram_received with new remote address\n\n//\n\n// Expectation:\n\n// - assert we have two paths\n\nfn test_adding_new_path() {\n\n // Setup:\n\n let mut publisher = Publisher::snapshot();\n\n let first_conn_id = connection::PeerId::try_from_bytes(&[1]).unwrap();\n\n let new_addr: SocketAddr = \"127.0.0.1:8001\".parse().unwrap();\n\n let new_addr = SocketAddress::from(new_addr);\n\n let new_addr = RemoteAddress::from(new_addr);\n\n let first_path = ServerPath::new(\n\n new_addr,\n\n first_conn_id,\n\n connection::LocalId::TEST_ID,\n\n RttEstimator::default(),\n\n Default::default(),\n\n false,\n\n DEFAULT_MAX_MTU,\n\n );\n\n let mut manager = manager_server(first_path);\n\n\n\n // verify we have one path\n\n assert!(manager.path(&new_addr).is_some());\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 87, "score": 196807.74328825343 }, { "content": "//= https://www.rfc-editor.org/rfc/rfc9000#section-9\n\n//= type=test\n\n//# If a client receives packets from an unknown server address,\n\n//# the client MUST discard these packets.\n\n//\n\n// Setup:\n\n// - create path manager with one path as a client\n\n//\n\n// Trigger:\n\n// - call on_datagram_received with new remote address bit\n\n//\n\n// Expectation:\n\n// - asset on_datagram_received errors\n\n// - assert we have one path\n\nfn do_not_add_new_path_if_client() {\n\n // Setup:\n\n let first_conn_id = connection::PeerId::try_from_bytes(&[1]).unwrap();\n\n let first_path = ClientPath::new(\n\n Default::default(),\n\n first_conn_id,\n\n connection::LocalId::TEST_ID,\n\n RttEstimator::default(),\n\n Default::default(),\n\n false,\n\n DEFAULT_MAX_MTU,\n\n );\n\n let mut manager = manager_client(first_path);\n\n let mut publisher = Publisher::snapshot();\n\n\n\n // verify we have one path\n\n let new_addr: SocketAddr = \"127.0.0.1:8001\".parse().unwrap();\n\n let new_addr = SocketAddress::from(new_addr);\n\n let new_addr = RemoteAddress::from(new_addr);\n\n assert_eq!(manager.paths.len(), 1);\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 88, "score": 196804.85893704457 }, { "content": "enum AttemptState {\n\n /// The attempt is currently waiting for capacity in the `ConnectorSender` to make the `Request`\n\n Connect(Request, ConnectorSender, ConnectionReceiver),\n\n /// The attempt is currently waiting for a response back from the endpoint on the `ConnectionReceiver`\n\n Waiting(ConnectionReceiver),\n\n /// This is an intermediate state and should not persist across calls to `poll`\n\n Unreachable,\n\n}\n\n\n\nimpl Future for Attempt {\n\n type Output = Result<Connection, connection::Error>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n loop {\n\n match core::mem::replace(&mut self.state, AttemptState::Unreachable) {\n\n AttemptState::Connect(request, mut opener, response) => {\n\n match opener.poll_ready(cx) {\n\n Poll::Ready(Ok(())) => {\n\n match opener.try_send(request) {\n\n Ok(_) => {\n", "file_path": "quic/s2n-quic-transport/src/endpoint/connect.rs", "rank": 89, "score": 196747.3969895038 }, { "content": "#[test]\n\nfn on_transmit_queries_streams_for_data() {\n\n fn assert_stream_write_state(\n\n manager: &mut AbstractStreamManager<MockStream>,\n\n stream_id: StreamId,\n\n expected_on_transmit_count: usize,\n\n expected_on_transmit_try_write: usize,\n\n ) {\n\n manager.with_asserted_stream(stream_id, |stream| {\n\n assert_eq!(expected_on_transmit_count, stream.on_transmit_count);\n\n assert_eq!(\n\n expected_on_transmit_try_write,\n\n stream.on_transmit_try_write_frames\n\n );\n\n });\n\n }\n\n\n\n let mut manager = create_stream_manager(endpoint::Type::Server);\n\n let mut frame_buffer = OutgoingFrameBuffer::new();\n\n\n\n // Create some open Streams with interests\n", "file_path": "quic/s2n-quic-transport/src/stream/manager/tests.rs", "rank": 90, "score": 196249.8477291669 }, { "content": "/// ACK range was sent\n\nstruct AckRangeSent {\n\n packet_header: PacketHeader,\n\n path_id: u64,\n\n ack_range: RangeInclusive<u64>,\n\n}\n\n\n\n#[event(\"transport:packet_dropped\")]\n", "file_path": "quic/s2n-quic-events/events/connection.rs", "rank": 91, "score": 196242.99727398847 }, { "content": "//= https://www.rfc-editor.org/rfc/rfc9000#section-9.3\n\n//= type=test\n\n//# After changing the address to which it sends non-probing packets, an\n\n//# endpoint can abandon any path validation for other addresses.\n\n//\n\n// A non-probing (path validation probing) packet will cause the path to become an active\n\n// path but the path is still not validated.\n\nfn dont_abandon_path_challenge_if_new_path_is_not_validated() {\n\n // Setup:\n\n let mut publisher = Publisher::snapshot();\n\n let mut helper = helper_manager_with_paths(&mut publisher);\n\n assert!(!helper.manager[helper.first_path_id].is_validated());\n\n assert!(helper.manager[helper.first_path_id].is_challenge_pending());\n\n\n\n assert!(!helper.manager[helper.second_path_id].is_validated());\n\n assert!(helper.manager[helper.second_path_id].is_challenge_pending());\n\n assert_eq!(helper.manager.active_path_id(), helper.first_path_id);\n\n\n\n // Trigger:\n\n helper\n\n .manager\n\n .on_processed_packet(\n\n helper.second_path_id,\n\n None,\n\n path_validation::Probe::NonProbing,\n\n &mut random::testing::Generator(123),\n\n &mut publisher,\n\n )\n\n .unwrap();\n\n\n\n // Expectation:\n\n assert!(!helper.manager[helper.second_path_id].is_validated());\n\n assert_eq!(helper.manager.active_path_id(), helper.second_path_id);\n\n assert!(helper.manager[helper.second_path_id].is_challenge_pending());\n\n}\n\n\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 92, "score": 195749.60393357745 }, { "content": "/// A new path was created\n\nstruct PathCreated<'a> {\n\n active: Path<'a>,\n\n new: Path<'a>,\n\n}\n\n\n\n#[event(\"transport:frame_sent\")]\n", "file_path": "quic/s2n-quic-events/events/connection.rs", "rank": 93, "score": 195575.42998384597 }, { "content": "#[event(\"transport:tx_stream_progress\")]\n\nstruct TxStreamProgress {\n\n bytes: usize,\n\n}\n\n\n\n#[event(\"connectivity::keep_alive_timer_expired\")]\n\npub struct KeepAliveTimerExpired {\n\n timeout: Duration,\n\n}\n\n\n\n#[event(\"connectivity:mtu_updated\")]\n", "file_path": "quic/s2n-quic-events/events/connection.rs", "rank": 94, "score": 195516.69601101047 }, { "content": "#[event(\"transport:rx_stream_progress\")]\n\nstruct RxStreamProgress {\n\n bytes: usize,\n\n}\n\n\n", "file_path": "quic/s2n-quic-events/events/connection.rs", "rank": 95, "score": 195516.69601101047 }, { "content": "#[test]\n\nfn set_path_challenge_on_active_path_on_connection_migration() {\n\n // Setup:\n\n let mut publisher = Publisher::snapshot();\n\n let mut helper = helper_manager_with_paths(&mut publisher);\n\n helper.manager[helper.zero_path_id].abandon_challenge(&mut publisher, 0);\n\n assert!(!helper.manager[helper.zero_path_id].is_challenge_pending());\n\n assert_eq!(\n\n helper.manager.last_known_active_validated_path.unwrap(),\n\n helper.zero_path_id.as_u8()\n\n );\n\n\n\n // Trigger:\n\n helper\n\n .manager\n\n .update_active_path(\n\n helper.second_path_id,\n\n &mut random::testing::Generator(123),\n\n &mut publisher,\n\n )\n\n .unwrap();\n\n\n\n // Expectation:\n\n assert!(helper.manager[helper.first_path_id].is_challenge_pending());\n\n}\n\n\n\n#[test]\n", "file_path": "quic/s2n-quic-transport/src/path/manager/tests.rs", "rank": 96, "score": 195312.03646986544 }, { "content": "//= https://tools.ietf.org/id/draft-marx-qlog-event-definitions-quic-h3-02#5.4.2\n\n/// Recovery metrics updated\n\nstruct RecoveryMetrics<'a> {\n\n path: Path<'a>,\n\n min_rtt: Duration,\n\n smoothed_rtt: Duration,\n\n latest_rtt: Duration,\n\n rtt_variance: Duration,\n\n max_ack_delay: Duration,\n\n pto_count: u32,\n\n congestion_window: u32,\n\n bytes_in_flight: u32,\n\n congestion_limited: bool,\n\n}\n\n\n\n#[event(\"recovery:congestion\")]\n", "file_path": "quic/s2n-quic-events/events/connection.rs", "rank": 97, "score": 195236.33210881156 }, { "content": "fn execute_instructions(test_env: &mut TestEnvironment, instructions: &[Instruction]) {\n\n println!(\"executing {} instructions\", instructions.len());\n\n for (id, instruction) in instructions.iter().enumerate() {\n\n println!(\"Executing instruction {:?} {:?}\", id, instruction);\n\n match instruction {\n\n Instruction::EnqueueData(offset, size, expect_success) => {\n\n let data = Bytes::from(gen_pattern_test_data(*offset, *size));\n\n\n\n let poll_result = test_env.poll_push(data);\n\n\n\n if *expect_success {\n\n assert_eq!(poll_result, Poll::Ready(Ok(())));\n\n } else {\n\n assert_eq!(poll_result, Poll::Pending);\n\n }\n\n }\n\n Instruction::Finish(expect_success) => {\n\n let poll_result = test_env.poll_finish();\n\n\n\n if *expect_success {\n", "file_path": "quic/s2n-quic-transport/src/stream/send_stream/tests.rs", "rank": 98, "score": 194991.62787254597 }, { "content": "#[test]\n\nfn stream_reports_stream_size_based_on_acquired_connection_window() {\n\n let test_env_config = TestEnvironmentConfig {\n\n max_send_buffer_size: 1500,\n\n initial_send_window: 100 * 1024,\n\n initial_connection_send_window_size: 1000,\n\n stream_id: StreamId::initial(endpoint::Type::Client, StreamType::Unidirectional),\n\n local_endpoint_type: endpoint::Type::Client,\n\n ..Default::default()\n\n };\n\n let mut test_env = setup_stream_test_env_with_config(test_env_config);\n\n\n\n let reset_error_code = ApplicationErrorCode::new(0x3333_4444).unwrap();\n\n\n\n // Enqueue data and get blocked on the flow control window\n\n execute_instructions(\n\n &mut test_env,\n\n &[\n\n Instruction::EnqueueData(VarInt::from_u32(0), 5000, true),\n\n Instruction::EnqueueData(VarInt::from_u32(2000), 1, false),\n\n Instruction::CheckInterests(stream_interests(&[\"tx\"])),\n", "file_path": "quic/s2n-quic-transport/src/stream/send_stream/tests.rs", "rank": 99, "score": 194474.20412004282 } ]
Rust
src/util.rs
anderejd/meshlite
2a3b1f06b0dae801b55b10ee8f78c11c9cee5ebb
use cgmath::Point3; use cgmath::Vector3; use cgmath::prelude::*; use cgmath::Deg; use cgmath::Rad; /* Range of the Dot Product of Two Unit Vectors Dot Angle 1.000 0 degrees 0.966 15 degrees 0.866 30 degrees 0.707 45 degrees 0.500 60 degrees 0.259 75 degrees 0.000 90 degrees -0.259 105 by degrees -0.500 120 degrees -0.707 135 degrees -0.866 150 degrees -0.966 165 degrees -1.000 180 degrees Source: http://chortle.ccsu.edu/vectorlessons/vch09/vch09_6.html */ pub fn norm(p1: Point3<f32>, p2: Point3<f32>, p3: Point3<f32>) -> Vector3<f32> { let side1 = p2 - p1; let side2 = p3 - p1; let perp = side1.cross(side2); perp.normalize() } pub fn almost_eq(v1: Vector3<f32>, v2: Vector3<f32>) -> bool { (v1.x - v2.x).abs() <= 0.01 && (v1.y - v2.y).abs() <= 0.01 && (v1.z - v2.z).abs() <= 0.01 } pub fn point_in_triangle(a: Point3<f32>, b: Point3<f32>, c: Point3<f32>, p: Point3<f32>) -> bool { let u = b - a; let v = c - a; let w = p - a; let v_cross_w = v.cross(w); let v_cross_u = v.cross(u); if v_cross_w.dot(v_cross_u) < 0.0 { return false; } let u_cross_w = u.cross(w); let u_cross_v = u.cross(v); if u_cross_w.dot(u_cross_v) < 0.0 { return false; } let denom = u_cross_v.magnitude(); let r = v_cross_w.magnitude() / denom; let t = u_cross_w.magnitude() / denom; r + t <= 1.0 } pub fn angle360(a: Vector3<f32>, b: Vector3<f32>, direct: Vector3<f32>) -> f32 { let angle = Rad::acos(a.dot(b)); let c = a.cross(b); if c.dot(direct) < 0.0 { 180.0 + Deg::from(angle).0 } else { Deg::from(angle).0 } } #[derive(PartialEq)] #[derive(Debug)] pub enum PointSide { Front, Back, Coincident } pub fn point_side_on_plane(pt: Point3<f32>, pt_on_plane: Point3<f32>, norm: Vector3<f32>) -> PointSide { let line = pt - pt_on_plane; let dot = line.dot(norm); if dot > 0.0 { PointSide::Front } else if dot < 0.0 { PointSide::Back } else { PointSide::Coincident } } #[derive(PartialEq)] #[derive(Debug)] pub enum SegmentPlaneIntersect { NoIntersection, Parallel, LiesIn, Intersection(Point3<f32>), } pub const SMALL_NUM : f32 = 0.00000001; pub fn intersect_of_segment_and_plane(p0: Point3<f32>, p1: Point3<f32>, pt_on_plane: Point3<f32>, norm: Vector3<f32>) -> SegmentPlaneIntersect { let u = p1 - p0; let w = p0 - pt_on_plane; let d = norm.dot(u); let n = -norm.dot(w); if d.abs() < SMALL_NUM { if n == 0.0 { return SegmentPlaneIntersect::LiesIn; } return SegmentPlaneIntersect::Parallel; } let s_i = n / d; if s_i < 0.0 || s_i > 1.0 || s_i.is_nan() || s_i.is_infinite() { return SegmentPlaneIntersect::NoIntersection; } SegmentPlaneIntersect::Intersection(p0 + (s_i * u)) } pub fn is_segment_and_quad_intersect(p0: Point3<f32>, p1: Point3<f32>, quad: &Vec<Point3<f32>>) -> bool { let r1 = p0; let r2 = p1; let s1 = quad[0]; let s2 = quad[1]; let s3 = quad[2]; let ds21 = s2 - s1; let ds31 = s3 - s1; let n = ds21.cross(ds31); let dr = r1 - r2; let ndotdr = n.dot(dr); if ndotdr.abs() < SMALL_NUM { return false; } let t = -n.dot(r1 - s1) / ndotdr; let m = r1 + (dr * t); let dms1 = m - s1; let u = dms1.dot(ds21); let v = dms1.dot(ds31); u >= 0.0 && u <= ds21.dot(ds21) && v >= 0.0 && v <= ds31.dot(ds31) } pub fn is_two_quads_intersect(first_quad: &Vec<Point3<f32>>, second_quad: &Vec<Point3<f32>>) -> bool { for i in 0..second_quad.len() { if is_segment_and_quad_intersect(second_quad[i], second_quad[(i + 1) % second_quad.len()], first_quad) { return true; } } for i in 0..first_quad.len() { if is_segment_and_quad_intersect(first_quad[i], first_quad[(i + 1) % first_quad.len()], second_quad) { return true; } } false } pub fn is_point_on_segment(point: Point3<f32>, seg_begin: Point3<f32>, seg_end: Point3<f32>) -> bool { let v = seg_end - seg_begin; let w = point - seg_begin; let w_dot_v = w.dot(v); if w_dot_v <= 0.0 { return false; } let v_dot_v = v.dot(v); if v_dot_v <= w_dot_v { return false; } let t = seg_begin + (v * (w_dot_v / v_dot_v)); let dist = t.distance(point); dist <= 0.00001 } pub fn is_valid_norm(norm: Vector3<f32>) -> bool { !norm.x.is_nan() && !norm.y.is_nan() && !norm.z.is_nan() } pub fn pick_base_plane_norm(directs: Vec<Vector3<f32>>, positions: Vec<Point3<f32>>, weights: Vec<f32>) -> Option<Vector3<f32>> { if directs.len() <= 1 { None } else if directs.len() <= 2 { if directs[0].dot(directs[1]).abs() < 0.966 { return Some(directs[0].cross(directs[1]).normalize()) } None } else if directs.len() <= 3 { let norm = norm(positions[0], positions[1], positions[2]); if is_valid_norm(norm) { return Some(norm.normalize()); } if directs[0].dot(directs[1]).abs() < 0.966 { return Some(directs[0].cross(directs[1]).normalize()) } else if directs[1].dot(directs[2]).abs() < 0.966 { return Some(directs[1].cross(directs[2]).normalize()) } else if directs[2].dot(directs[0]).abs() < 0.966 { return Some(directs[2].cross(directs[0]).normalize()) } else { None } } else { let mut weighted_indices : Vec<(usize, usize)> = Vec::new(); for i in 0..weights.len() { weighted_indices.push((i, (weights[i] * 100.0) as usize)); } weighted_indices.sort_by(|a, b| b.1.cmp(&a.1)); let i0 = weighted_indices[0].0; let i1 = weighted_indices[1].0; let i2 = weighted_indices[2].0; let norm = norm(positions[i0], positions[i1], positions[i2]); if is_valid_norm(norm) { return Some(norm.normalize()); } if directs[i0].dot(directs[i1]).abs() < 0.966 { return Some(directs[i0].cross(directs[i1]).normalize()) } else if directs[i1].dot(directs[i2]).abs() < 0.966 { return Some(directs[i1].cross(directs[i2]).normalize()) } else if directs[i2].dot(directs[i0]).abs() < 0.966 { return Some(directs[i2].cross(directs[i0]).normalize()) } else { None } } } pub fn world_perp(direct: Vector3<f32>) -> Vector3<f32> { const WORLD_Y_AXIS : Vector3<f32> = Vector3 {x: 0.0, y: 1.0, z: 0.0}; const WORLD_X_AXIS : Vector3<f32> = Vector3 {x: 1.0, y: 0.0, z: 0.0}; if direct.dot(WORLD_X_AXIS).abs() > 0.707 { direct.cross(WORLD_Y_AXIS) } else { direct.cross(WORLD_X_AXIS) } } pub fn calculate_deform_position(vert_position: Point3<f32>, vert_ray: Vector3<f32>, deform_norm: Vector3<f32>, deform_factor: f32) -> Point3<f32> { let revised_norm = if vert_ray.dot(deform_norm) < 0.0 { -deform_norm } else { deform_norm }; let proj = vert_ray.project_on(revised_norm); let scaled_proj = proj * deform_factor; let scaled_vert_ray = Vector3 {x:vert_position.x, y:vert_position.y, z:vert_position.z} + (scaled_proj - proj); Point3 {x: scaled_vert_ray.x, y: scaled_vert_ray.y, z: scaled_vert_ray.z} } pub fn make_quad(position: Point3<f32>, direct: Vector3<f32>, radius: f32, base_norm: Vector3<f32>) -> Vec<Point3<f32>> { let direct_normalized = direct.normalize(); let base_norm_normalized = base_norm.normalize(); let dot = direct_normalized.dot(base_norm); let oriented_base_norm = { if dot > 0.0 { base_norm_normalized } else { -base_norm_normalized } }; let u = { if direct_normalized.dot(oriented_base_norm).abs() > 0.707 { let switched_base_norm = world_perp(oriented_base_norm); direct_normalized.cross(switched_base_norm) } else { direct_normalized.cross(oriented_base_norm) } }; let v = u.cross(direct); let u = u.normalize() * radius; let v = v.normalize() * radius; let origin = position + direct * radius; let f = vec![origin - u - v, origin + u - v, origin + u + v, origin - u + v]; f } pub fn pick_most_not_obvious_vertex(vertices: Vec<Point3<f32>>) -> usize { if vertices.len() <= 1 { return 0; } let mut choosen_index = 0; let mut choosen_x = vertices[0].x; let pick_max = choosen_x < 0.0; for i in 1..vertices.len() { let x = vertices[i].x; if pick_max { if x > choosen_x { choosen_index = i; choosen_x = x; } } else { if x < choosen_x { choosen_index = i; choosen_x = x; } } } choosen_index }
use cgmath::Point3; use cgmath::Vector3; use cgmath::prelude::*; use cgmath::Deg; use cgmath::Rad; /* Range of the Dot Product of Two Unit Vectors Dot Angle 1.000 0 degrees 0.966 15 degrees 0.866 30 degrees 0.707 45 degrees 0.500 60 degrees 0.259 75 degrees 0.000 90 degrees -0.259 105 by degrees -0.500 120 degrees -0.707 135 degrees -0.866 150 degrees -0.966 165 degrees -1.000 180 degrees Source: http://chortle.ccsu.edu/vectorlessons/vch09/vch09_6.html */ pub fn norm(p1: Point3<f32>, p2: Point3<f32>, p3: Point3<f32>) -> Vector3<f32> { let side1 = p2 - p1; let side2 = p3 - p1; let perp = side1.cross(side2); perp.normalize() } pub fn almost_eq(v1: Vector3<f32>, v2: Vector3<f32>) -> bool { (v1.x - v2.x).abs() <= 0.01 && (v1.y - v2.y).abs() <= 0.01 && (v1.z - v2.z).abs() <= 0.01 } pub fn point_in_triangle(a: Point3<f32>, b: Point3<f32>, c: Point3<f32>, p: Point3<f32>) -> bool { let u = b - a; let v = c - a; let w = p - a; let v_cross_w = v.cross(w); let v_cross_u = v.cross(u); if v_cross_w.dot(v_cross_u) < 0.0 { return false; } let u_cross_w = u.cross(w); let u_cross_v = u.cross(v); if u_cross_w.dot(u_cross_v) < 0.0 { return false; } let denom = u_cross_v.magnitude(); let r = v_cross_w.magnitude() / denom; let t = u_cross_w.magnitude() / denom; r + t <= 1.0 } pub fn angle360(a: Vector3<f32>, b: Vector3<f32>, direct: Vector3<f32>) -> f32 { let angle = Rad::acos(a.dot(b)); let c = a.cross(b); if c.dot(direct) < 0.0 { 180.0 + Deg::from(angle).0 } else { Deg::from(angle).0 } } #[derive(PartialEq)] #[derive(Debug)] pub enum PointSide { Front, Back, Coincident } pub fn point_side_on_plane(pt: Point3<f32>, pt_on_plane: Point3<f32>, norm: Vector3<f32>) -> PointSide { let line = pt - pt_on_plane; let dot = line.dot(norm); if dot > 0.0 { PointSide::Front } else if dot < 0.0 { PointSide::Back } else { PointSide::Coincident } } #[derive(PartialEq)] #[derive(Debug)] pub enum SegmentPlaneIntersect { NoIntersection, Parallel, LiesIn, Intersection(Point3<f32>), } pub const SMALL_NUM : f32 = 0.00000001; pub fn intersect_of_segment_and_plane(p0: Point3<f32>, p1: Point3<f32>, pt_on_plane: Point3<f32>, norm: Vector3<f32>) -> SegmentPlaneIntersect { let u = p1 - p0; let w = p0 - pt_on_plane; let d = norm.dot(u); let n = -norm.dot(w); if d.abs() < SMALL_NUM { if n == 0.0 { return SegmentPlaneIntersect::LiesIn; } return SegmentPlaneIntersect::Parallel; } let s_i = n / d; if s_i < 0.0 || s_i > 1.0 || s_i.is_nan() || s_i.is_infinite() { return SegmentPlaneIntersect::NoIntersection; } SegmentPlaneIntersect::Intersection(p0 + (s_i * u)) } pub fn is_segment_and_quad_intersect(p0: Point3<f32>, p1: Point3<f32>, quad: &Vec<Point3<f32>>) -> bool { let r1 = p0; let r2 = p1; let s1 = quad[0]; let s2 = quad[1]; let s3 = quad[2]; let ds21 = s2 - s1; let ds31 = s3 - s1; let n = ds21.cross(ds31); let dr = r1 - r2; let ndotdr = n.dot(dr); if ndotdr.abs() < SMALL_NUM { return false; } let t = -n.dot(r1 - s1) / ndotdr; let m = r1 + (dr * t); let dms1 = m - s1; let u = dms1.dot(ds21); let v = dms1.dot(ds31); u >= 0.0 && u <= ds21.dot(ds21) && v >= 0.0 && v <= ds31.dot(ds31) } pub fn is_two_quads_intersect(first_quad: &Vec<Point3<f32>>, second_quad: &Vec<Point3<f32>>) -> bool { for i in 0..second_quad.len() { if is_segment_and_quad_intersect(second_quad[i], second_quad[(i + 1) % second_quad.len()], first_quad) { return true; } } for i in 0..first_quad.len() { if is_segment_and_quad_intersect(first_quad[i], first_quad[(i + 1) % first_quad.len()], second_quad) { return true; } } false } pub fn is_point_on_segment(point: Point3<f32>, seg_begin: Point3<f32>, seg_end: Point3<f32>) -> bool { let v = seg_end - seg_begin; let w = point - seg_begin; let w_dot_v = w.dot(v); if w_dot_v <= 0.0 { return false; } let v_dot_v = v.dot(v); if v_dot_v <= w_dot_v { return false; } let t = seg_begin + (v * (w_dot_v / v_dot_v)); let dist = t.distance(point); dist <= 0.00001 } pub fn is_valid_norm(norm: Vector3<f32>) -> bool { !norm.x.is_nan() && !norm.y.is_nan() && !norm.z.is_nan() } pub fn pick_base_plane_norm(directs: Vec<Vector3<f32>>, positions: Vec<Point3<f32>>, weights: Vec<f32>) -> Option<Vector3<f32>> { if directs.len() <= 1 { None } else if directs.len() <= 2 { if directs[0].dot(directs[1]).abs() < 0.966 { return Some(directs[0].cross(directs[1]).normalize()) } None } else if directs.len() <= 3 { let norm = norm(positions[0], positions[1], positions[2]); if is_valid_norm(norm) { return Some(norm.normalize()); } if directs[0].dot(directs[1]).abs() < 0.966 { return Some(directs[0].cross(directs[1]).normalize()) } else if directs[1].dot(directs[2]).abs() < 0.966 { return Some(directs[1].cross(directs[2]).normalize()) } else if directs[2].dot(directs[0]).abs() < 0.966 { return Some(directs[2].cross(directs[0]).normalize()) } else { None } } else { let mut weighted_indices : Vec<(usize, usize)> = Vec::new(); for i in 0..weights.len() { weighted_indices.push((i, (weights[i] * 100.0) as usize)); } weighted_indices.sort_by(|a, b| b.1.cmp(&a.1)); let i0 = weighted_indices[0].0; let i1 = weighted_indices[1].0; let i2 = weighted_indices[2].0; let norm = norm(positions[i0], positions[i1], positions[i2]); if is_valid_norm(norm) { return Some(norm.normalize()); } if directs[i0].dot(directs[i1]).abs() < 0.966 { return Some(directs[i0].cross(directs[i1]).normalize()) } else if directs[i1].dot(directs[i2]).abs() < 0.966 { return Some(directs[i1].cross(directs[i2]).normalize()) } else if directs[i2].dot(directs[i0]).abs() < 0.966 { return Some(directs[i2].cross(directs[i0]).normalize()) } else { None } } } pub fn world_perp(direct: Vector3<f32>) -> Vector3<f32> { const WORLD_Y_AXIS : Vector3<f32> = Vector3 {x: 0.0, y: 1.0, z: 0.0}; const WORLD_X_AXIS : Vector3<f32> = Vector3 {x: 1.0, y: 0.0, z: 0.0}; if direct.dot(WORLD_X_AXIS).abs() > 0.707 { direct.cross(WORLD_Y_AXIS) } else { direct.cross(WORLD_X_AXIS) } } pub fn calculate_deform_position(vert_position: Point3<f32>, vert_ray: Vector3<f32>, deform_norm: Vector3<f32>, deform_factor: f32) -> Point3<f32> { let revised_norm = if vert_ray.dot(deform_norm) < 0.0 { -deform_norm } else { deform_norm }; let proj = vert_ray.project_on(revised_norm); let scaled_proj = proj * deform_factor; let scaled_vert_ray = Vector3 {x:vert_position.x, y:vert_position.y, z:vert_position.z} + (scaled_proj - proj); Point3 {x: scaled_vert_ray.x, y: scaled_vert_ray.y, z: scaled_vert_ray.z} } pub fn make_quad(position: Point3<f32>, direct: Vector3<f32>, radius: f32, base_norm: Vector3<f32>) -> Vec<Point3<f32>> { let direct_normalized = direct.normalize(); let base_norm_normalized = base_norm.normalize(); let dot = direct_normalized.dot(base_norm); let oriented_base_norm = { if dot > 0.0 { base_norm_normalized } else { -base_norm_normalized } };
let v = u.cross(direct); let u = u.normalize() * radius; let v = v.normalize() * radius; let origin = position + direct * radius; let f = vec![origin - u - v, origin + u - v, origin + u + v, origin - u + v]; f } pub fn pick_most_not_obvious_vertex(vertices: Vec<Point3<f32>>) -> usize { if vertices.len() <= 1 { return 0; } let mut choosen_index = 0; let mut choosen_x = vertices[0].x; let pick_max = choosen_x < 0.0; for i in 1..vertices.len() { let x = vertices[i].x; if pick_max { if x > choosen_x { choosen_index = i; choosen_x = x; } } else { if x < choosen_x { choosen_index = i; choosen_x = x; } } } choosen_index }
let u = { if direct_normalized.dot(oriented_base_norm).abs() > 0.707 { let switched_base_norm = world_perp(oriented_base_norm); direct_normalized.cross(switched_base_norm) } else { direct_normalized.cross(oriented_base_norm) } };
assignment_statement
[ { "content": "pub fn cube() -> Mesh {\n\n let mut m = Mesh::new();\n\n let face_id = m.add_plane(1.0, 1.0);\n\n let normal = m.face_norm(face_id);\n\n m.extrude_face(face_id, normal, 1.0).translate(0.0, 0.0, -0.5);\n\n m\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 15, "score": 80018.65422955406 }, { "content": "fn edge_data_mut<'a>(\n\n input: &Mesh,\n\n id: Id,\n\n face_data_set: &mut Vec<Option<FaceData>>,\n\n edge_data_set: &'a mut Vec<Option<EdgeData>>,\n\n output: &mut Mesh,\n\n) -> &'a mut EdgeData {\n\n let id = input.peek_same_halfedge(id);\n\n if edge_data_set[id].is_some() {\n\n return edge_data_set[id].as_mut().unwrap();\n\n }\n\n let mid_point = input.edge_center(id);\n\n let (\n\n halfedge_face_id,\n\n opposite_face_id,\n\n next_halfedge_vertex_id,\n\n start_vertex_position,\n\n ) = {\n\n let halfedge = input.halfedge(id).unwrap();\n\n (\n", "file_path": "src/subdivide.rs", "rank": 16, "score": 66898.87575203368 }, { "content": "fn face_data_mut<'a>(\n\n input: &Mesh,\n\n id: Id,\n\n face_data_set: &'a mut Vec<Option<FaceData>>,\n\n output: &mut Mesh,\n\n) -> &'a mut FaceData {\n\n if face_data_set[id].is_some() {\n\n return face_data_set[id].as_mut().unwrap();\n\n }\n\n let average_of_points = input.face_center(id);\n\n face_data_set[id] = Some(FaceData {\n\n average_of_points,\n\n generated_vertex_id: output.add_vertex(average_of_points),\n\n });\n\n face_data_set[id].as_mut().unwrap()\n\n}\n\n\n", "file_path": "src/subdivide.rs", "rank": 17, "score": 66898.87575203368 }, { "content": "fn alloc_skeletonmesh_id(ctx: &mut RustContext) -> i32 {\n\n if ctx.free_skeletonmesh_ids.len() > 0 {\n\n ctx.free_skeletonmesh_ids.swap_remove(0)\n\n } else {\n\n ctx.skeletonmeshes.push(SkeletonMesh::new());\n\n ctx.skeletonmeshes.len() as i32\n\n }\n\n}\n\n\n\n\n\n// Commented out to allow a clean build free from warnings.\n\n//\n\n// fn free_skeletonmesh_id(ctx: &mut RustContext, id: i32) {\n\n// ctx.skeletonmeshes[id as usize - 1] = SkeletonMesh::new();\n\n// ctx.free_skeletonmesh_ids.push(id);\n\n// }\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn meshlite_create_context() -> *mut RustContext {\n\n Box::into_raw(Box::new(RustContext {\n", "file_path": "ffi/src/lib.rs", "rank": 18, "score": 60734.14678585435 }, { "content": "fn alloc_bmesh_id(ctx: &mut RustContext) -> i32 {\n\n if ctx.free_bmesh_ids.len() > 0 {\n\n ctx.free_bmesh_ids.swap_remove(0)\n\n } else {\n\n ctx.bmeshes.push(Bmesh::new());\n\n ctx.bmeshes.len() as i32\n\n }\n\n}\n\n\n", "file_path": "ffi/src/lib.rs", "rank": 19, "score": 60734.14678585435 }, { "content": "fn alloc_mesh_id(ctx: &mut RustContext) -> i32 {\n\n if ctx.free_mesh_ids.len() > 0 {\n\n ctx.free_mesh_ids.swap_remove(0)\n\n } else {\n\n ctx.meshes.push(Mesh::new());\n\n ctx.meshes.len() as i32\n\n }\n\n}\n\n\n", "file_path": "ffi/src/lib.rs", "rank": 20, "score": 60734.14678585435 }, { "content": "fn free_mesh_id(ctx: &mut RustContext, id: i32) {\n\n ctx.meshes[id as usize - 1] = Mesh::new();\n\n ctx.free_mesh_ids.push(id);\n\n}\n\n\n", "file_path": "ffi/src/lib.rs", "rank": 21, "score": 56763.54375934033 }, { "content": "fn free_bmesh_id(ctx: &mut RustContext, id: i32) {\n\n ctx.bmeshes[id as usize - 1] = Bmesh::new();\n\n ctx.free_bmesh_ids.push(id);\n\n}\n\n\n", "file_path": "ffi/src/lib.rs", "rank": 22, "score": 56763.54375934033 }, { "content": "fn predict_triangle_count(pec: &PolygonEdgeCounter) -> usize {\n\n let mut sum = 0;\n\n for i in 3..pec.polygon_edge_counts.len() {\n\n let polygons = pec.polygon_edge_counts[i];\n\n sum += polygons * (i - 2);\n\n }\n\n sum\n\n}\n\n\n", "file_path": "src/triangulate.rs", "rank": 23, "score": 51143.852526761635 }, { "content": "fn main() {\n\n let mut quad_mesh = cube();\n\n let mut all_vps = Vec::new(); // Vertices Per Second\n\n println!(concat!(\n\n \"faces | \",\n\n \"vertices | \",\n\n \"halfedges | \",\n\n \"edges | \",\n\n \"verts/s | \",\n\n \"time (ms)\"\n\n ));\n\n println!(\"----------+-----------+-----------+-----------+-----------+-----------\");\n\n let last = 8;\n\n for i in 0..=last {\n\n let now = Instant::now();\n\n let tri_mesh = quad_mesh.triangulate();\n\n let seconds = to_seconds_f64(&now.elapsed());\n\n let verts_per_second = (tri_mesh.vertex_count as f64 / seconds).round();\n\n all_vps.push(verts_per_second);\n\n println!(\n", "file_path": "examples/benchmark_triangulate/main.rs", "rank": 24, "score": 45671.89860495628 }, { "content": "fn main() {\n\n //let mut m = Mesh::new();\n\n //let face_id = m.add_plane(2.0, 1.0);\n\n //let normal = m.face_norm(face_id);\n\n //m.extrude_face(face_id, normal, 1.0);\n\n //m.save_obj(\"test.obj\").expect(\"save file failed\");\n\n\n\n /*\n\n let mut m1 = cube();\n\n let v1 = Vector3 {x: 0.0, y: -1.0, z: 0.0};\n\n let mut mat1 = Matrix4::from_translation(v1);\n\n let matr = Matrix4::from_angle_x(Rad::from(Deg(-90.0)));\n\n mat1 = mat1 * matr;\n\n m1.transform(&mat1);\n\n\n\n let mut m2 = plane();\n\n let v2 = Vector3 {x: 0.0, y: 2.0, z: 0.0};\n\n let mut mat2 = Matrix4::from_translation(v2);\n\n let matr = Matrix4::from_angle_x(Rad::from(Deg(90.0)));\n\n mat2 = mat2 * matr;\n", "file_path": "examples/obj_export/main.rs", "rank": 25, "score": 45671.89860495628 }, { "content": "fn main() {\n\n let mut mesh = cube();\n\n let mut all_vps = Vec::new(); // Vertices Per Second\n\n println!(concat!(\n\n \"faces | \",\n\n \"vertices | \",\n\n \"halfedges | \",\n\n \"edges | \",\n\n \"verts/s | \",\n\n \"time (ms)\"\n\n ));\n\n println!(\"----------+-----------+-----------+-----------+-----------+-----------\");\n\n for _ in 0..9 {\n\n let now = Instant::now();\n\n let new_mesh = mesh.subdivide();\n\n let seconds = to_seconds_f64(&now.elapsed());\n\n let verts_per_second = (new_mesh.vertex_count as f64 / seconds).round();\n\n all_vps.push(verts_per_second);\n\n println!(\n\n \"{: <9} | {: <9} | {: <9} | {: <9} | {: <9} | {: <9.2}\",\n", "file_path": "examples/benchmark_subdivide/main.rs", "rank": 26, "score": 45671.89860495628 }, { "content": "pub trait Triangulate {\n\n fn triangulate(&self) -> Self;\n\n}\n\n\n", "file_path": "src/triangulate.rs", "rank": 27, "score": 44411.109822415565 }, { "content": "pub trait Import {\n\n fn import(&mut self, filename: &str) -> io::Result<()>;\n\n}\n\n\n\nimpl Clone for Mesh {\n\n fn clone(&self) -> Self {\n\n let mut mesh = Mesh::new();\n\n mesh.add_mesh(self);\n\n mesh\n\n }\n\n}\n", "file_path": "src/mesh.rs", "rank": 28, "score": 44411.109822415565 }, { "content": "pub trait Export {\n\n fn export(&self, filename: &str) -> io::Result<()>;\n\n}\n\n\n", "file_path": "src/mesh.rs", "rank": 29, "score": 44411.109822415565 }, { "content": "pub trait Debug {\n\n fn add_debug_norm(&mut self, origin: Point3<f32>, norm: Vector3<f32>);\n\n}\n\n\n\nimpl Debug for Mesh {\n\n fn add_debug_norm(&mut self, origin: Point3<f32>, norm: Vector3<f32>) {\n\n let mut m = Mesh::new();\n\n let quad = make_quad(origin, norm, 0.01, norm);\n\n let face_id = m.add_positions(quad);\n\n m.extrude_face(face_id, norm, 0.5);\n\n self.add_mesh(&m);\n\n }\n\n}\n", "file_path": "src/debug.rs", "rank": 30, "score": 44411.109822415565 }, { "content": "pub trait Subdivide {\n\n fn subdivide(&self) -> Self;\n\n}\n\n\n\nimpl Subdivide for Mesh {\n\n fn subdivide(&self) -> Self {\n\n CatmullClarkSubdivider::new(self).generate()\n\n }\n\n}\n", "file_path": "src/subdivide.rs", "rank": 31, "score": 44411.109822415565 }, { "content": "#[test]\n\nfn verify_cube_subdivision() {\n\n let mesh = cube();\n\n let sub = mesh.subdivide();\n\n assert_eq!(26, sub.vertex_count);\n\n assert_eq!(24, sub.face_count);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 32, "score": 44275.36019297359 }, { "content": "#[test]\n\nfn verify_cube_triangulation() {\n\n let cube = cube();\n\n let tri = cube.triangulate();\n\n assert_eq!(8, tri.vertex_count);\n\n assert_eq!(12, tri.face_count);\n\n}\n", "file_path": "tests/integration_tests.rs", "rank": 33, "score": 44275.36019297359 }, { "content": "fn to_seconds_f64(d: &Duration) -> f64 {\n\n d.as_secs() as f64 + d.subsec_nanos() as f64 * 1e-9\n\n}\n\n\n", "file_path": "examples/benchmark_subdivide/main.rs", "rank": 34, "score": 34102.82877314609 }, { "content": "fn to_seconds_f64(d: &Duration) -> f64 {\n\n d.as_secs() as f64 + d.subsec_nanos() as f64 * 1e-9\n\n}\n\n\n", "file_path": "examples/benchmark_triangulate/main.rs", "rank": 35, "score": 34102.82877314609 }, { "content": " finalize_finished: false,\n\n }\n\n }\n\n\n\n pub fn add_source_vertex(&mut self, position: Point3<f32>, source_plane: Id, tag: Id) -> usize {\n\n let added_index = self.source_vertices.len();\n\n self.source_vertices.push(SourceVertex {position: position, source_plane: source_plane, tag: tag, index: added_index});\n\n self.candidates.push(added_index);\n\n added_index\n\n }\n\n\n\n fn calculate_face_vector(&self, p1: usize, p2: usize, base_normal: Vector3<f32>) -> Vector3<f32> {\n\n let v1 = &self.source_vertices[p1];\n\n let v2 = &self.source_vertices[p2];\n\n let seg = v2.position - v1.position;\n\n seg.cross(base_normal)\n\n }\n\n\n\n fn add_item(&mut self, p1: usize, p2: usize, base_normal: Vector3<f32>) {\n\n {\n", "file_path": "src/wrap.rs", "rank": 36, "score": 34.74166062247159 }, { "content": " if item.p1 == vertex_index || item.p2 == vertex_index {\n\n return 0.0;\n\n }\n\n let v1 = &self.source_vertices[item.p1].clone();\n\n let v2 = &self.source_vertices[item.p2].clone();\n\n let vp = &self.source_vertices[vertex_index].clone();\n\n if v1.source_plane == v2.source_plane && v1.source_plane == vp.source_plane {\n\n return 0.0;\n\n }\n\n let vd1 = self.calculate_face_vector(item.p1, item.p2, item.base_normal);\n\n let normal = norm(v2.position, v1.position, vp.position);\n\n let vd2 = self.calculate_face_vector(item.p1, item.p2, normal);\n\n let angle = Deg::from(vd2.angle(vd1));\n\n angle.0\n\n }\n\n\n\n pub fn finished(&mut self) -> bool {\n\n if !self.finalize_finished {\n\n return false;\n\n }\n", "file_path": "src/wrap.rs", "rank": 38, "score": 30.947517658885808 }, { "content": " self.items_map.get(&key)\n\n }\n\n\n\n pub fn add_startup(&mut self, p1: usize, p2: usize, base_normal: Vector3<f32>) {\n\n if self.items.len() == 0 {\n\n self.add_item(p1, p2, base_normal);\n\n }\n\n self.generated_face_edges_map.insert(WrapItemKey {p1: p2, p2: p1}, None);\n\n }\n\n\n\n fn is_edge_generated(&self, p1: usize, p2: usize) -> bool {\n\n let key = WrapItemKey {p1: p1, p2: p2};\n\n if self.generated_face_edges_map.get(&key).is_none() {\n\n return false;\n\n }\n\n true\n\n }\n\n\n\n fn angle_of_base_face_and_point(&self, item_index: usize, vertex_index: usize) -> f32 {\n\n let item = &self.items[item_index].clone();\n", "file_path": "src/wrap.rs", "rank": 39, "score": 30.43240830957927 }, { "content": " }\n\n\n\n fn make_cut(&self, position: Point3<f32>, direct: Vector3<f32>, radius: f32, base_norm: Vector3<f32>, subdiv_count: usize) -> Vec<Point3<f32>> {\n\n let mut cut : Vec<Point3<f32>> = make_quad(position, direct, radius, base_norm);\n\n let origin = position + direct * radius;\n\n for _ in 0..subdiv_count {\n\n let mut middle_cut : Vec<Point3<f32>> = Vec::new();\n\n let mut final_cut : Vec<Point3<f32>> = Vec::new();\n\n let length = (cut[0] - origin).magnitude() * 0.8;\n\n for i in 0..cut.len() {\n\n let a = cut[i] - origin;\n\n let b = cut[(i + 1) % cut.len()] - origin;\n\n let c = a + b;\n\n let new_point = origin + c.normalize_to(length);\n\n middle_cut.push(new_point);\n\n }\n\n let length = (middle_cut[0] - origin).magnitude();\n\n for i in 0..middle_cut.len() {\n\n let a = middle_cut[i] - origin;\n\n let b = middle_cut[(i + 1) % middle_cut.len()] - origin;\n", "file_path": "src/bmesh.rs", "rank": 41, "score": 27.46320192492782 }, { "content": " let mut weights : Vec<f32> = Vec::new();\n\n let neighbors = self.graph.neighbors_undirected(node_index);\n\n for other_index in neighbors {\n\n let direct = self.direct_of_nodes(node_index, other_index);\n\n let other = self.graph.node_weight(other_index).unwrap();\n\n directs.push(direct);\n\n positions.push(other.position);\n\n weights.push(other.radius);\n\n }\n\n pick_base_plane_norm(directs, positions, weights)\n\n }\n\n\n\n fn resolve_base_norm_from_node(&mut self, node_index: NodeIndex) {\n\n if self.graph.node_weight(node_index).unwrap().base_norm_resolved {\n\n return;\n\n }\n\n let base_norm = self.calculate_node_base_norm(node_index);\n\n if base_norm.is_none() {\n\n const WORLD_Z_AXIS : Vector3<f32> = Vector3 {x: 0.0, y: 0.0, z: 1.0};\n\n self.resolve_base_norm_for_leaves_from_node(node_index, WORLD_Z_AXIS);\n", "file_path": "src/bmesh.rs", "rank": 42, "score": 26.537103533933468 }, { "content": " let v1 = &self.source_vertices[p1];\n\n let v2 = &self.source_vertices[p2];\n\n if !self.items.is_empty() && v1.source_plane == v2.source_plane {\n\n return;\n\n }\n\n }\n\n if !self.find_item(p1, p2).is_none() || !self.find_item(p2, p1).is_none() {\n\n return;\n\n }\n\n if self.is_edge_generated(p1, p2) || self.is_edge_generated(p2, p1) {\n\n return;\n\n }\n\n let index = self.items.len();\n\n self.items.push(WrapItem {p3: 0, p1: p1, p2: p2, base_normal: base_normal, processed: false});\n\n self.items_map.insert(WrapItemKey {p1: p1, p2: p2}, index);\n\n self.items_list.push_front(index);\n\n }\n\n\n\n pub fn find_item(&self, p1: usize, p2: usize) -> Option<&usize> {\n\n let key = WrapItemKey {p1: p1, p2: p2};\n", "file_path": "src/wrap.rs", "rank": 43, "score": 26.364969645273284 }, { "content": "}\n\n\n\npub struct Face4 {\n\n pub p1: usize,\n\n pub p2: usize,\n\n pub p3: usize,\n\n pub p4: usize,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct SourceVertex {\n\n pub position: Point3<f32>,\n\n pub source_plane: Id,\n\n pub index: usize,\n\n pub tag: Id,\n\n}\n\n\n\npub struct GiftWrapper {\n\n items: Vec<WrapItem>,\n\n items_map: HashMap<WrapItemKey, usize>,\n", "file_path": "src/wrap.rs", "rank": 44, "score": 25.83085441135479 }, { "content": " pub fn set_deform_thickness(&mut self, thickness: f32) {\n\n self.deform_thickness = thickness;\n\n }\n\n\n\n pub fn set_deform_width(&mut self, width: f32) {\n\n self.deform_width = width;\n\n }\n\n\n\n pub fn enable_debug(&mut self, enable: bool) {\n\n self.debug_enabled = enable;\n\n }\n\n\n\n pub fn add_seam_requirement(&mut self) {\n\n self.seam_required = true;\n\n }\n\n\n\n pub fn get_node_base_norm(&self, node_id: usize) -> Vector3<f32> {\n\n self.graph.node_weight(NodeIndex::new(node_id)).unwrap().base_norm\n\n }\n\n\n", "file_path": "src/bmesh.rs", "rank": 45, "score": 24.116855376386077 }, { "content": " }\n\n }\n\n\n\n fn another_vertex_index_of_face3(&self, f: &Face3, p1: usize, p2: usize) -> usize {\n\n let indices = vec![f.p1, f.p2, f.p3];\n\n for index in indices {\n\n if index != p1 && index != p2 {\n\n return index;\n\n }\n\n }\n\n 0\n\n }\n\n\n\n fn find_pair_face3(&self, f: &Face3, used_ids: &HashMap<usize, bool>, q: &mut Vec<Face4>) -> Option<usize> {\n\n let indices = vec![f.p1, f.p2, f.p3];\n\n for i in 0..indices.len() {\n\n let next_i = (i + 1) % indices.len();\n\n let next_next_i = (i + 2) % indices.len();\n\n let paired_face3_id = self.generated_face_edges_map.get(&WrapItemKey {p1: indices[next_i], p2: indices[i]});\n\n if !paired_face3_id.is_none() && !paired_face3_id.unwrap().is_none() {\n", "file_path": "src/wrap.rs", "rank": 46, "score": 22.48055828753526 }, { "content": " }\n\n Point3::centroid(&points)\n\n }\n\n\n\n pub fn face_norm(&self, id: Id) -> Vector3<f32> {\n\n let face = self.face(id).unwrap();\n\n let mut points = Vec::new();\n\n for halfedge_id in FaceHalfedgeIterator::new(self, face.halfedge) {\n\n let halfedge = self.halfedge(halfedge_id).unwrap();\n\n let vertex = self.vertex(halfedge.vertex).unwrap();\n\n points.push(vertex.position);\n\n }\n\n if points.len() < 3 {\n\n return Vector3::zero();\n\n } else if points.len() == 3 {\n\n return norm(points[0], points[1], points[2]);\n\n }\n\n let mut total = Vector3::zero();\n\n for i in 0..points.len() {\n\n let n = norm(points[i], points[(i + 1) % points.len()], points[(i + 2) % points.len()]);\n", "file_path": "src/mesh.rs", "rank": 47, "score": 21.96998432816334 }, { "content": " if self.candidates.is_empty() {\n\n return true;\n\n }\n\n let mut rm_vec : Vec<usize> = Vec::new();\n\n for (i, &it) in self.candidates.iter().enumerate() {\n\n if self.is_vertex_closed(it) {\n\n rm_vec.push(i);\n\n }\n\n }\n\n for &i in rm_vec.iter().rev() {\n\n self.candidates.swap_remove(i);\n\n }\n\n self.candidates.is_empty()\n\n }\n\n\n\n fn find_best_vertex_on_the_left(&mut self, item_index: usize) -> Option<usize> {\n\n let p1 = self.items[item_index].p1;\n\n let p2 = self.items[item_index].p2;\n\n let mut max_angle = 0 as f32;\n\n let mut choosen_it = None;\n", "file_path": "src/wrap.rs", "rank": 48, "score": 21.90252034922779 }, { "content": " }\n\n }\n\n if (from_is_front && to_is_back) || (from_is_back && to_is_front) {\n\n let intersect = intersect_map.entry(edge.clone()).or_insert_with(|| {\n\n let p0 = self.vertex(from_vert_id).unwrap().position;\n\n let p1 = self.vertex(to_vert_id).unwrap().position;\n\n intersect_of_segment_and_plane(p0, p1, pt_on_plane, norm)\n\n });\n\n if let SegmentPlaneIntersect::Intersection(intersect_pt) = *intersect {\n\n if from_is_front || to_is_front {\n\n let new_vert_id = *front_intersect_map.entry(edge.clone()).or_insert_with(|| {\n\n front_mesh.add_vertex(intersect_pt)\n\n });\n\n if front_new_vert_set.insert(new_vert_id) {\n\n front_new_verts.push(new_vert_id);\n\n }\n\n if from_is_front {\n\n front_intersects[0] = new_vert_id;\n\n }\n\n if to_is_front {\n", "file_path": "src/mesh.rs", "rank": 49, "score": 20.868422732287577 }, { "content": " self.finalize_finished = true;\n\n for f in self.generated_faces.iter() {\n\n if used_ids.contains_key(&f.index) {\n\n continue;\n\n }\n\n used_ids.insert(f.index, true);\n\n let paired = self.find_pair_face3(&f, &used_ids, &mut quards);\n\n if !paired.is_none() {\n\n used_ids.insert(paired.unwrap(), true);\n\n continue;\n\n }\n\n let mut added_vertices = Vec::new();\n\n added_vertices.push(self.source_vertices[f.p1].tag);\n\n added_vertices.push(self.source_vertices[f.p2].tag);\n\n added_vertices.push(self.source_vertices[f.p3].tag);\n\n if 0 == mesh.add_vertices(added_vertices) {\n\n self.finalize_finished = false;\n\n }\n\n }\n\n for f in quards.iter() {\n", "file_path": "src/wrap.rs", "rank": 50, "score": 20.749123672744226 }, { "content": " }\n\n for other_index in indicies {\n\n match self.neighbor_count_map[&other_index.index()] {\n\n 1 => self.resolve_base_norm_for_leaves_from_node(other_index, base_norm),\n\n 2 => {\n\n let edge_base_norm = self.calculate_node_base_norm(other_index);\n\n if edge_base_norm.is_none() {\n\n self.resolve_base_norm_for_leaves_from_node(other_index, base_norm)\n\n } else {\n\n self.resolve_base_norm_for_leaves_from_node(other_index, edge_base_norm.unwrap())\n\n }\n\n },\n\n _ => {},\n\n }\n\n }\n\n }\n\n\n\n fn calculate_node_base_norm(&self, node_index: NodeIndex) -> Option<Vector3<f32>> {\n\n let mut directs : Vec<Vector3<f32>> = Vec::new();\n\n let mut positions : Vec<Point3<f32>> = Vec::new();\n", "file_path": "src/bmesh.rs", "rank": 51, "score": 20.60368725242819 }, { "content": " }\n\n\n\n pub fn add_vertex(&mut self, position: Point3<f32>) -> usize {\n\n let new_id = self.vertices.len() + 1;\n\n self.vertices.push(Vertex {\n\n id: new_id,\n\n halfedges: SmallVec::<[Id; VERTEX_HALFEDGE_INLINE_COUNT]>::new(),\n\n prev: 0,\n\n next: 0,\n\n position : position,\n\n alive: true,\n\n source: -1,\n\n });\n\n self.vertex_count += 1;\n\n new_id\n\n }\n\n\n\n pub fn add_halfedge(&mut self) -> Id {\n\n let new_id = self.halfedges.len() + 1;\n\n self.halfedges.push(Halfedge {\n", "file_path": "src/mesh.rs", "rank": 52, "score": 20.41650086147654 }, { "content": " fn resolve_base_norm(&mut self) {\n\n for &(node_id, _neighbor_count, _) in self.neighbor_count_vec.clone().iter() {\n\n self.resolve_base_norm_from_node(NodeIndex::new(node_id));\n\n }\n\n }\n\n\n\n fn output_debug_info_if_enabled(&mut self) {\n\n if self.debug_enabled {\n\n for &(node_id, _, _) in self.neighbor_count_vec.iter() {\n\n let node_index = NodeIndex::new(node_id);\n\n let node_origin = self.graph.node_weight(node_index).unwrap().position;\n\n let base_norm = self.graph.node_weight(node_index).unwrap().base_norm;\n\n self.mesh.add_debug_norm(node_origin, base_norm);\n\n }\n\n }\n\n }\n\n\n\n pub fn add_node(&mut self, position: Point3<f32>, radius: f32) -> usize {\n\n //println!(\"add_node position:{:?} radius:{:?}\", position, radius);\n\n let node = Node::new(radius, position);\n", "file_path": "src/bmesh.rs", "rank": 54, "score": 19.913492786398994 }, { "content": " pub fn set_node_cut_subdiv_count(&mut self, node_id: usize, subdiv_count: usize) {\n\n self.graph.node_weight_mut(NodeIndex::new(node_id)).unwrap().cut_subdiv_count = Some(subdiv_count);\n\n }\n\n\n\n pub fn set_node_round_way(&mut self, node_id: usize, round_way: i32) {\n\n self.graph.node_weight_mut(NodeIndex::new(node_id)).unwrap().round_way = Some(round_way);\n\n }\n\n\n\n fn resolve_base_norm_for_leaves_from_node(&mut self, node_index: NodeIndex, base_norm: Vector3<f32>) {\n\n if self.graph.node_weight(node_index).unwrap().base_norm_resolved {\n\n return;\n\n }\n\n self.graph.node_weight_mut(node_index).unwrap().base_norm_resolved = true;\n\n self.graph.node_weight_mut(node_index).unwrap().base_norm = base_norm;\n\n let mut indicies = Vec::new();\n\n {\n\n let neighbors = self.graph.neighbors_undirected(node_index);\n\n for other_index in neighbors {\n\n indicies.push(other_index);\n\n }\n", "file_path": "src/bmesh.rs", "rank": 55, "score": 19.644809631250393 }, { "content": " self.items[item_index].p3 = p3;\n\n let base_normal = norm(self.source_vertices[p1].position, \n\n self.source_vertices[p2].position,\n\n self.source_vertices[p3].position);\n\n let face_index = self.generated_faces.len();\n\n self.generated_faces.push(Face3 {p1: p1, p2: p2, p3: p3, norm: base_normal, index: face_index});\n\n self.add_item(p3, p2, base_normal);\n\n self.add_item(p1, p3, base_normal);\n\n self.generated_face_edges_map.insert(WrapItemKey {p1: p1, p2: p2}, Some(face_index));\n\n self.generated_face_edges_map.insert(WrapItemKey {p1: p2, p2: p3}, Some(face_index));\n\n self.generated_face_edges_map.insert(WrapItemKey {p1: p3, p2: p1}, Some(face_index));\n\n self.generated_vertex_edges_map.entry(p1).or_insert(Vec::new()).push(p2);\n\n self.generated_vertex_edges_map.entry(p1).or_insert(Vec::new()).push(p3);\n\n self.generated_vertex_edges_map.entry(p2).or_insert(Vec::new()).push(p3);\n\n self.generated_vertex_edges_map.entry(p2).or_insert(Vec::new()).push(p1);\n\n self.generated_vertex_edges_map.entry(p3).or_insert(Vec::new()).push(p1);\n\n self.generated_vertex_edges_map.entry(p3).or_insert(Vec::new()).push(p2);\n\n }\n\n }\n\n }\n", "file_path": "src/wrap.rs", "rank": 56, "score": 19.455747324607934 }, { "content": " }\n\n\n\n pub fn peek_item(&self) -> Option<usize> {\n\n for &item_index in self.items_list.iter() {\n\n if !self.items[item_index].processed {\n\n return Some(item_index);\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn is_edge_closed(&self, p1: usize, p2: usize) -> bool {\n\n self.generated_face_edges_map.contains_key(&WrapItemKey {p1: p1, p2: p2}) &&\n\n self.generated_face_edges_map.contains_key(&WrapItemKey {p1: p2, p2: p1})\n\n }\n\n\n\n fn is_vertex_closed(&self, vertex_index: usize) -> bool {\n\n let map = self.generated_vertex_edges_map.get(&vertex_index);\n\n if map.is_none() {\n\n return false;\n", "file_path": "src/wrap.rs", "rank": 57, "score": 19.370783082709778 }, { "content": " }\n\n for &other_index in map.unwrap() {\n\n if !self.is_edge_closed(vertex_index, other_index) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n fn generate(&mut self) {\n\n while let Some(item_index) = self.peek_item() {\n\n self.items[item_index].processed = true;\n\n let p1 = self.items[item_index].p1;\n\n let p2 = self.items[item_index].p2;\n\n if self.is_edge_closed(p1, p2) {\n\n continue;\n\n }\n\n let p3 = self.find_best_vertex_on_the_left(item_index);\n\n if !p3.is_none() {\n\n let p3 = p3.unwrap();\n", "file_path": "src/wrap.rs", "rank": 58, "score": 19.199461825498783 }, { "content": " }\n\n\n\n pub fn set_end_radius(&mut self, radius: f32) {\n\n self.end_radius = radius;\n\n }\n\n\n\n pub fn add_bone(&mut self, from: Point3<f32>, to: Point3<f32>) {\n\n let bone = Bone {from: from, to: to};\n\n self.bones.push(bone);\n\n }\n\n\n\n fn add_sphere(&mut self, position: Point3<f32>, radius: f32) {\n\n let mut mesh = Mesh::new();\n\n let face_id = mesh.add_plane(radius, radius);\n\n let normal = mesh.face_norm(face_id);\n\n mesh.extrude_face(face_id, normal, radius).translate(position.x, \n\n position.y, \n\n position.z - radius * 0.5);\n\n self.mesh += mesh.subdivide();\n\n }\n", "file_path": "src/skeletonmesh.rs", "rank": 59, "score": 19.010856355084876 }, { "content": " pub fn diff_convex_mesh(&self, other: &Mesh) -> Mesh {\n\n let (_, other_inner) = other.split_mesh_by_other(self);\n\n let (my_outter, _) = self.split_mesh_by_other(other);\n\n let mesh = other_inner.flip_mesh() + my_outter;\n\n mesh.weld().fix_tjunction().combine_adj_faces()\n\n }\n\n\n\n pub fn intersect_convex_mesh(&self, other: &Mesh) -> Mesh {\n\n let (_, other_inner) = other.split_mesh_by_other(self);\n\n let (_, my_inner) = self.split_mesh_by_other(other);\n\n let mesh = other_inner + my_inner;\n\n mesh.weld().fix_tjunction().combine_adj_faces()\n\n }\n\n\n\n pub fn split_mesh_by_plane(&self, pt_on_plane: Point3<f32>, norm: Vector3<f32>, fill_cut: bool) -> (Mesh, Mesh) { \n\n let mut vert_side_map : HashMap<Id, PointSide> = HashMap::new();\n\n for face_id in FaceIterator::new(self) {\n\n for halfedge_id in FaceHalfedgeIterator::new(self, self.face_first_halfedge_id(face_id).unwrap()) {\n\n let vert = self.halfedge_start_vertex(halfedge_id).unwrap();\n\n vert_side_map.entry(vert.id).or_insert(point_side_on_plane(vert.position, pt_on_plane, norm));\n", "file_path": "src/mesh.rs", "rank": 60, "score": 18.993499427488935 }, { "content": " let paired_face3_id = paired_face3_id.unwrap().unwrap();\n\n if used_ids.contains_key(&paired_face3_id) {\n\n continue;\n\n }\n\n let paired_face3 = &self.generated_faces[paired_face3_id];\n\n if !almost_eq(paired_face3.norm, f.norm) {\n\n continue;\n\n }\n\n let another_index = self.another_vertex_index_of_face3(paired_face3, indices[next_i], indices[i]);\n\n let merged_f = Face4 {p1: indices[i], p2: another_index, p3: indices[next_i], p4: indices[next_next_i]};\n\n q.push(merged_f);\n\n return Some(paired_face3_id);\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn finalize(&mut self, mesh: &mut Mesh) {\n\n let mut quards : Vec<Face4> = Vec::new();\n\n let mut used_ids: HashMap<usize, bool> = HashMap::new();\n", "file_path": "src/wrap.rs", "rank": 61, "score": 18.992772835171465 }, { "content": "\n\n pub fn generate_mesh(&mut self) -> &mut Mesh {\n\n let end_radius = self.end_radius;\n\n for bone in self.bones.clone() {\n\n self.add_sphere(bone.from, end_radius);\n\n self.add_sphere(bone.to, end_radius);\n\n let bone_vector = bone.to - bone.from;\n\n let norm = bone_vector.normalize();\n\n let mut big_radius = bone_vector.magnitude() * 0.15;\n\n if big_radius > self.max_radius {\n\n big_radius = self.max_radius;\n\n }\n\n let big_end_quad = make_quad(bone.from, norm, end_radius, norm);\n\n let small_end_quad = make_quad(bone.to, -norm, end_radius, -norm);\n\n let middle_for_big_quad = make_quad((bone.from + bone_vector * 0.2) - (-norm) * big_radius, -norm, big_radius, -norm);\n\n let middle_for_small_quad = make_quad((bone.from + bone_vector * 0.2) - norm * big_radius, norm, big_radius, norm);\n\n let big_end_face = self.mesh.add_positions(big_end_quad);\n\n let small_end_face = self.mesh.add_positions(small_end_quad);\n\n let middle_for_big_face = self.mesh.add_positions(middle_for_big_quad);\n\n let middle_for_small_face = self.mesh.add_positions(middle_for_small_quad);\n", "file_path": "src/skeletonmesh.rs", "rank": 62, "score": 18.717745742913053 }, { "content": " }\n\n self.add_halfedges_and_vertices(&added_halfedges)\n\n }\n\n\n\n pub fn transform(&mut self, mat: &Matrix4<f32>) -> &mut Self {\n\n for vertex in self.vertices.iter_mut() {\n\n vertex.position = mat.transform_point(vertex.position);\n\n }\n\n self\n\n }\n\n\n\n pub fn translate(&mut self, x: f32, y: f32, z: f32) -> &mut Self {\n\n let mat = Matrix4::from_translation(Vector3::new(x, y, z));\n\n self.transform(&mat)\n\n }\n\n\n\n pub fn scale(&mut self, value: f32) -> &mut Self {\n\n let mat = Matrix4::from_scale(value);\n\n self.transform(&mat)\n\n }\n", "file_path": "src/mesh.rs", "rank": 63, "score": 18.619387212693017 }, { "content": " let mut mesh = Mesh::new();\n\n mesh.import(\"/Users/jeremy/ball.obj\");\n\n let point = Point3 {x: 0.25, y: 0.25, z: 0.25};\n\n let norm = Vector3 {x: 0.0, y: 0.2, z: 0.78};\n\n let (mut front_mesh, mut back_mesh) = mesh.split_mesh_by_plane(point, norm);\n\n let mut merged_mesh = Mesh::new();\n\n merged_mesh += front_mesh;\n\n back_mesh.translate(0.0, 0.0, -0.5);\n\n merged_mesh += back_mesh;\n\n mesh.translate(0.0, 0.0, -3.0);\n\n merged_mesh += mesh;\n\n merged_mesh.export(\"test.obj\").expect(\"save file failed\");\n\n */\n\n\n\n /*\n\n let mut m1 = Mesh::new();\n\n let mut m2 = Mesh::new();\n\n m1.import(\"/Users/jeremy/cube.obj\");\n\n m1.scale(0.60);\n\n m2.import(\"/Users/jeremy/ball.obj\");\n", "file_path": "examples/obj_export/main.rs", "rank": 64, "score": 18.600022530141107 }, { "content": " }\n\n }\n\n while new_mesh.add_linked_vertices(&mut border_map) > 0 {};\n\n new_mesh\n\n }\n\n\n\n pub fn smooth(&mut self, factor: f32, limit_vertices: Option<&HashSet<usize>>) {\n\n let mut neighbor_position_sum_map : HashMap<Id, (Point3<f32>, usize)> = HashMap::new();\n\n let mut face_norm_map : HashMap<Id, Vector3<f32>> = HashMap::new();\n\n for face_id in FaceIterator::new(self) {\n\n for halfedge_id in FaceHalfedgeIterator::new(self, self.face_first_halfedge_id(face_id).unwrap()) {\n\n let from_vert = self.halfedge_start_vertex(halfedge_id);\n\n if from_vert.is_some() {\n\n let next_halfedge_id = self.halfedge_next_id(halfedge_id);\n\n if next_halfedge_id.is_some() {\n\n let to_vert = self.halfedge_start_vertex(next_halfedge_id.unwrap());\n\n if to_vert.is_some() {\n\n if limit_vertices.is_none() || limit_vertices.unwrap().contains(&to_vert.unwrap().id) {\n\n let item = &mut neighbor_position_sum_map.entry(to_vert.unwrap().id).or_insert((Point3 {x:0.0, y:0.0, z:0.0}, 0));\n\n item.0 += from_vert.unwrap().position.to_vec();\n", "file_path": "src/mesh.rs", "rank": 65, "score": 18.138874379242473 }, { "content": "\n\n pub fn extrude_face(&mut self, face_id: Id, normal: Vector3<f32>, amount: f32) -> &mut Self {\n\n let mut new_halfedges : Vec<Id> = Vec::new();\n\n for halfedge_id in FaceHalfedgeIterator::new(self, face_id) {\n\n new_halfedges.push(halfedge_id);\n\n }\n\n self.extrude_halfedges(&new_halfedges, normal, amount);\n\n self\n\n }\n\n\n\n pub fn add_plane(&mut self, width: f32, depth: f32) -> Id {\n\n let x = width / 2.0;\n\n let y = depth / 2.0;\n\n let points = vec![Point3 {x: -x, y: -y, z: 0.0},\n\n Point3 {x: x, y: -y, z: 0.0},\n\n Point3 {x: x, y: y, z: 0.0},\n\n Point3 {x: -x, y: y, z: 0.0}];\n\n let mut added_halfedges : Vec<(Id, Id)> = Vec::new();\n\n for i in 0..points.len() {\n\n added_halfedges.push((self.add_halfedge(), self.add_vertex(points[i])));\n", "file_path": "src/mesh.rs", "rank": 66, "score": 18.08400828354179 }, { "content": " }\n\n\n\n pub fn error_count(&self) -> usize {\n\n self.wrap_error_count as usize\n\n }\n\n\n\n fn resolve_deform(&mut self) {\n\n for vert in self.mesh.vertices.iter_mut() {\n\n let node_index = if self.vertex_node_map.is_empty() {\n\n NodeIndex::new(self.last_node_id)\n\n } else {\n\n self.vertex_node_map[&vert.id]\n\n };\n\n let node_base_norm = self.graph.node_weight(node_index).unwrap().base_norm;\n\n let node_position = self.graph.node_weight(node_index).unwrap().position;\n\n let vert_ray = vert.position - node_position;\n\n let mut sum_x = 0.0;\n\n let mut sum_y = 0.0;\n\n let mut sum_z = 0.0;\n\n let mut num = 0;\n", "file_path": "src/bmesh.rs", "rank": 67, "score": 17.397003270885346 }, { "content": " let mut added_vertices = Vec::new();\n\n added_vertices.push(self.source_vertices[f.p1].tag);\n\n added_vertices.push(self.source_vertices[f.p2].tag);\n\n added_vertices.push(self.source_vertices[f.p3].tag);\n\n added_vertices.push(self.source_vertices[f.p4].tag);\n\n if 0 == mesh.add_vertices(added_vertices) {\n\n self.finalize_finished = false;\n\n }\n\n }\n\n }\n\n\n\n pub fn stitch_two_faces(&mut self, mesh: &mut Mesh, face1: Id, face2: Id) {\n\n let mut remove_faces = Vec::new();\n\n self.add_candidate_face(mesh, face1, false);\n\n if !mesh.face_adj_id(face1).is_none() {\n\n remove_faces.push(face1);\n\n }\n\n self.add_candidate_face(mesh, face2, false);\n\n if !mesh.face_adj_id(face2).is_none() {\n\n remove_faces.push(face2);\n", "file_path": "src/wrap.rs", "rank": 68, "score": 17.270146360590292 }, { "content": " pub face: Id,\n\n pub prev: Id,\n\n pub next: Id,\n\n pub opposite: Id,\n\n pub alive: bool,\n\n}\n\n\n\n#[derive(Hash, Eq, PartialEq, Debug, Clone)]\n\npub struct Point3Key {\n\n x: u32,\n\n y: u32,\n\n z: u32,\n\n}\n\n\n\nimpl Point3Key {\n\n pub fn new(point: Point3<f32>) -> Self {\n\n Point3Key {\n\n x: (point.x * 1000.0).round() as u32,\n\n y: (point.y * 1000.0).round() as u32,\n\n z: (point.z * 1000.0).round() as u32,\n", "file_path": "src/mesh.rs", "rank": 69, "score": 16.83318133050536 }, { "content": " let mut inner_mesh = Mesh::new();\n\n let mut outter_mesh = Mesh::new();\n\n inner_mesh.add_mesh(self);\n\n for face_id in FaceIterator::new(other) {\n\n let norm = other.face_norm(face_id);\n\n let point = other.halfedge_start_vertex(other.face_first_halfedge_id(face_id).unwrap()).unwrap().position;\n\n let (sub_front, sub_back) = inner_mesh.split_mesh_by_plane(point, norm, false);\n\n inner_mesh = sub_back;\n\n outter_mesh.add_mesh(&sub_front);\n\n }\n\n (outter_mesh, inner_mesh)\n\n }\n\n\n\n pub fn union_convex_mesh(&self, other: &Mesh) -> Mesh {\n\n let (other_outter, _) = other.split_mesh_by_other(self);\n\n let (my_outter, _) = self.split_mesh_by_other(other);\n\n let mesh = other_outter + my_outter;\n\n mesh.weld().fix_tjunction().combine_adj_faces()\n\n }\n\n\n", "file_path": "src/mesh.rs", "rank": 70, "score": 16.676995438362887 }, { "content": " if halfedge.opposite > 0 && halfedge.opposite < any_paired_id {\n\n return halfedge.opposite;\n\n }\n\n any_paired_id\n\n }\n\n\n\n pub fn edge_center(&self, id: Id) -> Point3<f32> {\n\n let halfedge = self.halfedge(id).unwrap();\n\n let next = self.halfedge(halfedge.next).unwrap();\n\n Point3::midpoint(self.vertex(halfedge.vertex).unwrap().position,\n\n self.vertex(next.vertex).unwrap().position)\n\n }\n\n\n\n pub fn face_center(&self, id: Id) -> Point3<f32> {\n\n let face = self.face(id).unwrap();\n\n let mut points = SmallVec::<[Point3<f32>; 4]>::new();\n\n for halfedge_id in FaceHalfedgeIterator::new(self, face.halfedge) {\n\n let halfedge = self.halfedge(halfedge_id).unwrap();\n\n let vertex = self.vertex(halfedge.vertex).unwrap();\n\n points.push(vertex.position);\n", "file_path": "src/mesh.rs", "rank": 71, "score": 16.673917396595172 }, { "content": " let mut mesh = bmesh.generate_mesh(node4);\n\n //mesh.import(\"test.obj\").expect(\"save file failed\");\n\n\n\n //let mut cc = CatmullClarkSubdivider::new(&mut mesh);\n\n //cc.generate().save_obj(\"test.obj\").expect(\"save file failed\");\n\n\n\n //cc.generate().triangulate().save_obj(\"test.obj\").expect(\"save file failed\");\n\n\n\n mesh.subdivide().triangulate().export(\"test.obj\").expect(\"save file failed\");\n\n */\n\n\n\n /*\n\n let mesh Mesh::new();\n\n let point = Point3 {x: 0.25, y: 0.25, z: 0.25};\n\n let norm = Vector3 {x: 0.0, y: 0.2, z: 0.78};\n\n let (front_mesh, back_mesh) = mesh.split_mesh_by_plane(point, norm);\n\n front_mesh.export(\"test.obj\").expect(\"save file failed\");\n\n */\n\n\n\n /*\n", "file_path": "examples/obj_export/main.rs", "rank": 72, "score": 16.518778742298185 }, { "content": " let enter = vertices[i];\n\n let cone = vertices[i_next];\n\n let leave = vertices[(i + 2) % vertices.len()];\n\n let cone_v = self.vertex(cone).unwrap();\n\n let enter_v = self.vertex(enter).unwrap();\n\n let leave_v = self.vertex(leave).unwrap();\n\n let angle = angle360(\n\n cone_v.position - enter_v.position,\n\n leave_v.position - cone_v.position,\n\n direct,\n\n );\n\n if angle >= 1.0 && angle <= 179.0 {\n\n let mut is_ear = true;\n\n for j in 0..(vertices.len() - 3) {\n\n let fourth =\n\n vertices[(i + 3 + j) % vertices.len()];\n\n let fourth_v = self.vertex(fourth).unwrap();\n\n if point_in_triangle(\n\n enter_v.position,\n\n cone_v.position,\n", "file_path": "src/triangulate.rs", "rank": 73, "score": 16.478282010354288 }, { "content": " let opposite_id = self.halfedge_opposite_id(id);\n\n if opposite_id.is_none() {\n\n return None;\n\n }\n\n self.halfedge_face_id(opposite_id.unwrap())\n\n }\n\n\n\n pub fn halfedge_direct(&self, id: Id) -> Vector3<f32> {\n\n let begin_pos = self.halfedge_start_vertex(id).unwrap().position;\n\n let end_pos = self.halfedge_start_vertex(self.halfedge_next_id(id).unwrap()).unwrap().position;\n\n end_pos - begin_pos\n\n }\n\n\n\n pub fn set_halfedge_start_vertex_id(&mut self, halfedge_id: Id, vertex_id: Id) {\n\n self.halfedge_mut(halfedge_id).unwrap().vertex = vertex_id;\n\n }\n\n\n\n pub fn halfedge_start_vertex_mut(&mut self, id: Id) -> Option<&mut Vertex> {\n\n let vertex_id = self.halfedge_start_vertex_id(id)?;\n\n self.vertex_mut(vertex_id)\n", "file_path": "src/mesh.rs", "rank": 74, "score": 16.223119478898965 }, { "content": " let node_index = self.graph.add_node(node);\n\n let node_id = node_index.index();\n\n self.graph.node_weight_mut(node_index).unwrap().insert_order = self.node_count as isize;\n\n self.node_count += 1;\n\n self.last_node_id = node_id;\n\n node_id\n\n }\n\n\n\n pub fn add_edge(&mut self, first_node_id: usize, second_node_id: usize) -> usize {\n\n let edge = Edge::new();\n\n *self.neighbor_count_map.entry(first_node_id).or_insert(0) += 1;\n\n *self.neighbor_count_map.entry(second_node_id).or_insert(0) += 1;\n\n self.graph.add_edge(NodeIndex::new(first_node_id), NodeIndex::new(second_node_id), edge).index()\n\n }\n\n\n\n fn direct_of_nodes(&self, first_node_index: NodeIndex, second_node_index: NodeIndex) -> Vector3<f32> {\n\n let first_node = self.graph.node_weight(first_node_index).unwrap();\n\n let second_node = self.graph.node_weight(second_node_index).unwrap();\n\n let direct = second_node.position - first_node.position;\n\n direct.normalize()\n", "file_path": "src/bmesh.rs", "rank": 75, "score": 16.21274404913014 }, { "content": "\n\n// Optimized for quad meshes, since that is very common in high-poly models\n\n// during editing. By subdividing some lower resolution model, pure quad models\n\n// are produced. It could be worth tweaking this constant for other use-cases\n\n// though, triangulation will create more halfedges for many vertices and would\n\n// benefit from a higher number, but that would consume more memory for all\n\n// meshes which would not be ideal. It could be worth having completely separate\n\n// implementations for quad, tri and polygon meshes at later time? Or maybe use\n\n// generics and/or macros to specialize some parts of the implementation?\n\nconst VERTEX_HALFEDGE_INLINE_COUNT: usize = 4;\n\n\n\n#[derive(Debug)]\n\npub struct Vertex {\n\n pub id: Id,\n\n pub position: Point3<f32>,\n\n\n\n /// This is at the time of writing used as a set but is implemented using a\n\n /// SmallVec. Finding or implementing something like a \"SmallSet\" could\n\n /// provide a better API.\n\n pub halfedges: SmallVec<[Id; VERTEX_HALFEDGE_INLINE_COUNT]>,\n", "file_path": "src/mesh.rs", "rank": 76, "score": 16.073947988548618 }, { "content": " let mut rm_vec : Vec<usize> = Vec::new();\n\n for (i, &it) in self.candidates.iter().enumerate() {\n\n if self.is_vertex_closed(it) {\n\n rm_vec.push(i);\n\n continue;\n\n }\n\n if self.is_edge_closed(p1, it) || self.is_edge_closed(p2, it) {\n\n continue;\n\n }\n\n let mut angle = self.angle_of_base_face_and_point(item_index, it);\n\n if angle > max_angle {\n\n max_angle = angle;\n\n choosen_it = Some(it);\n\n }\n\n }\n\n for &i in rm_vec.iter().rev() {\n\n self.candidates.swap_remove(i);\n\n }\n\n //println!(\"find_best_vertex_on_the_left angle:{:?}\", max_angle);\n\n choosen_it\n", "file_path": "src/wrap.rs", "rank": 77, "score": 16.01765107347895 }, { "content": " pub fn trim(&self, normalize: bool) -> Self {\n\n let mut to_mesh = Mesh::new();\n\n to_mesh.add_mesh(self);\n\n let mut x_low = f32::MAX;\n\n let mut x_high = f32::MIN;\n\n let mut y_low = f32::MAX;\n\n let mut y_high = f32::MIN;\n\n let mut z_low = f32::MAX;\n\n let mut z_high = f32::MIN;\n\n for vert in self.vertices.iter() {\n\n if vert.position.x < x_low {\n\n x_low = vert.position.x;\n\n } else if vert.position.x > x_high {\n\n x_high = vert.position.x;\n\n }\n\n if vert.position.y < y_low {\n\n y_low = vert.position.y;\n\n } else if vert.position.y > y_high {\n\n y_high = vert.position.y;\n\n }\n", "file_path": "src/mesh.rs", "rank": 78, "score": 15.897317253565944 }, { "content": " }\n\n }\n\n let mut change_back_pairs : Vec<(Id, Point3<f32>)> = Vec::new();\n\n for (face_id, face_normal) in face_norm_map {\n\n if face_normal.dot(self.face_norm(face_id)) <= 0.0 {\n\n for halfedge_id in FaceHalfedgeIterator::new(self, self.face_first_halfedge_id(face_id).unwrap()) {\n\n let from_vert = self.halfedge_start_vertex(halfedge_id);\n\n if from_vert.is_some() {\n\n match old_position_map.get(&from_vert.unwrap().id) {\n\n Some(&old_position) => {\n\n change_back_pairs.push((from_vert.unwrap().id, old_position));\n\n },\n\n _ => {}\n\n }\n\n }\n\n }\n\n }\n\n }\n\n for (vert_id, old_position) in change_back_pairs {\n\n self.vertex_mut(vert_id).unwrap().position = old_position;\n", "file_path": "src/mesh.rs", "rank": 79, "score": 15.873157769434767 }, { "content": " }\n\n }\n\n }\n\n\n\n fn generate_from_node(&mut self, node_index: NodeIndex) {\n\n if self.graph.node_weight(node_index).unwrap().generated {\n\n return;\n\n }\n\n self.graph.node_weight_mut(node_index).unwrap().generated = true;\n\n let user_node_id = {\n\n let from_id = self.graph.node_weight(node_index).unwrap().generate_from_node_id;\n\n if from_id.is_none() {\n\n node_index.index()\n\n } else {\n\n from_id.unwrap()\n\n }\n\n };\n\n let node_base_norm = self.graph.node_weight(node_index).unwrap().base_norm;\n\n let node_position = self.graph.node_weight(node_index).unwrap().position;\n\n let node_radius = self.graph.node_weight(node_index).unwrap().radius;\n", "file_path": "src/bmesh.rs", "rank": 80, "score": 15.821221942947044 }, { "content": " self.face_mut(added_face_id).unwrap().halfedge = added_halfedges[0].0;\n\n for i in 0..added_halfedges.len() {\n\n let first = added_halfedges[i].0;\n\n let second = added_halfedges[(i + 1) % added_halfedges.len()].0;\n\n self.link_halfedges(first, second);\n\n }\n\n added_face_id\n\n }\n\n\n\n pub fn extrude_halfedges(&mut self, halfedges: &Vec<Id>, normal: Vector3<f32>, amount: f32) {\n\n let mut downside_halfedges: Vec<Id> = Vec::new();\n\n let mut downside_vertices: Vec<Id> = Vec::new();\n\n let direct = normal * amount;\n\n let mut need_fill_downside = false;\n\n for &halfedge_id in halfedges {\n\n let opposite = self.halfedge_opposite_id(halfedge_id);\n\n if opposite.is_none() {\n\n let old_position = {\n\n let mut vertex = self.halfedge_start_vertex_mut(halfedge_id).unwrap();\n\n let position = vertex.position;\n", "file_path": "src/mesh.rs", "rank": 81, "score": 15.658547364663184 }, { "content": " vert.position.y = sum_y / num as f32;\n\n vert.position.z = sum_z / num as f32;\n\n }\n\n }\n\n }\n\n\n\n fn resolve_seam_from_node(&mut self, node_index: NodeIndex, from_vertex_id: Id, seam: &mut Vec<Id>) {\n\n // Check seam_resolved in caller, don't check it here\n\n self.graph.node_weight_mut(node_index).unwrap().seam_resolved = true;\n\n seam.push(from_vertex_id);\n\n let halfedges = self.mesh.vertex(from_vertex_id).unwrap().halfedges.clone();\n\n let mut ids = Vec::new();\n\n for halfedge_id in halfedges {\n\n let next_halfedge_id = self.mesh.halfedge_next_id(halfedge_id);\n\n if next_halfedge_id.is_none() {\n\n continue;\n\n }\n\n let vertex_id = self.mesh.halfedge_start_vertex_id(next_halfedge_id.unwrap()).unwrap();\n\n let next_node_id = self.mesh.vertex_mut(vertex_id).unwrap().source as usize;\n\n if next_node_id == node_index.index() {\n", "file_path": "src/bmesh.rs", "rank": 82, "score": 15.466130834492976 }, { "content": " let mesh = ctx.meshes.get((mesh_id - 1) as usize).unwrap();\n\n let count : isize = cmp::min((mesh.vertices.len() * 3) as usize, max_buffer_len as usize) as isize;\n\n let mut i : isize = 0;\n\n for vert_idx in 0..mesh.vertices.len() {\n\n let position = mesh.vertices[vert_idx].position;\n\n if i + 3 > count {\n\n break;\n\n }\n\n unsafe {\n\n *buffer.offset(i + 0) = position.x;\n\n *buffer.offset(i + 1) = position.y;\n\n *buffer.offset(i + 2) = position.z;\n\n }\n\n i += 3;\n\n }\n\n i as c_int\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn meshlite_get_vertex_source_array(context: *mut RustContext, mesh_id: c_int, buffer: *mut c_int, max_buffer_len: c_int) -> c_int {\n", "file_path": "ffi/src/lib.rs", "rank": 83, "score": 15.352522124080437 }, { "content": "\n\n pub fn weld(&self) -> Self {\n\n let mut new_mesh = Mesh::new();\n\n let mut vertices_set : HashMap<Point3Key, Id> = HashMap::new();\n\n for face_id in FaceIterator::new(&self) {\n\n let face = self.face(face_id).unwrap();\n\n let mut key_set : HashSet<Point3Key> = HashSet::new();\n\n let mut positions : Vec<(Point3<f32>, i32)> = Vec::new();\n\n for halfedge_id in FaceHalfedgeIterator::new(&self, face.halfedge) {\n\n let vertex = self.halfedge_start_vertex(halfedge_id).unwrap();\n\n let key = Point3Key::new(vertex.position);\n\n if key_set.contains(&key) {\n\n continue;\n\n }\n\n key_set.insert(key);\n\n positions.push((vertex.position, vertex.source));\n\n }\n\n if positions.len() < 3 {\n\n continue;\n\n }\n", "file_path": "src/mesh.rs", "rank": 84, "score": 14.998707693292504 }, { "content": " let mut added_halfedges : Vec<(Id, Id)> = Vec::new();\n\n for i in 0..added_vertices.len() {\n\n if self.vertex(added_vertices[i]).is_none() {\n\n return 0;\n\n }\n\n }\n\n for i in 0..added_vertices.len() {\n\n added_halfedges.push((self.add_halfedge(), added_vertices[i]));\n\n }\n\n self.add_halfedges_and_vertices(&added_halfedges)\n\n }\n\n\n\n pub fn add_positions(&mut self, added_positions : Vec<Point3<f32>>) -> Id {\n\n if added_positions.is_empty() {\n\n return 0;\n\n }\n\n let mut added_vertices : Vec<Id> = Vec::new();\n\n for i in 0..added_positions.len() {\n\n added_vertices.push(self.add_vertex(added_positions[i]));\n\n }\n", "file_path": "src/mesh.rs", "rank": 85, "score": 14.93403997817558 }, { "content": "\n\n fn add_candidate_vertices(&mut self, mesh: &mut Mesh, vertices: &Vec<Id>, plane_norm: Vector3<f32>, plane_id: usize) {\n\n let mut vertices_index_set : HashMap<Id, usize> = HashMap::new();\n\n for &old_vert_id in vertices {\n\n let vertex = mesh.vertex(old_vert_id).unwrap();\n\n vertices_index_set.entry(vertex.id).or_insert(self.add_source_vertex(vertex.position, plane_id, vertex.id));\n\n }\n\n for i in 0..vertices.len() {\n\n let old_vert_id = vertices[i];\n\n let old_next_vert_id = vertices[(i + 1) % vertices.len()];\n\n let &vertex_index = vertices_index_set.get(&old_vert_id).unwrap();\n\n let &next_vertex_index = vertices_index_set.get(&old_next_vert_id).unwrap();\n\n self.add_startup(next_vertex_index,\n\n vertex_index,\n\n plane_norm);\n\n }\n\n }\n\n\n\n fn add_candidate_face(&mut self, mesh: &mut Mesh, face_id: Id, reverse: bool) {\n\n let mut vertices_index_set : HashMap<Id, usize> = HashMap::new();\n", "file_path": "src/wrap.rs", "rank": 86, "score": 14.746438591392874 }, { "content": " let c = a + b;\n\n let new_point = origin + c.normalize_to(length);\n\n final_cut.push(middle_cut[i]);\n\n final_cut.push(new_point);\n\n }\n\n cut = final_cut;\n\n }\n\n cut\n\n }\n\n\n\n pub fn resolve_round(&mut self) {\n\n let mut new_end_starts : Vec<(NodeIndex, Vector3<f32>)> = Vec::new();\n\n for (&k, &v) in self.neighbor_count_map.iter() {\n\n if 1 == v {\n\n let node_index = NodeIndex::new(k);\n\n let neighbors = self.graph.neighbors_undirected(node_index);\n\n for other_index in neighbors {\n\n let direct = self.direct_of_nodes(node_index, other_index);\n\n new_end_starts.push((node_index, -direct));\n\n }\n", "file_path": "src/bmesh.rs", "rank": 87, "score": 14.743050371589888 }, { "content": " deform_width: f32,\n\n vertex_node_map: HashMap<Id, NodeIndex>,\n\n vertex_cut_direct_map: HashMap<Id, Vector3<f32>>,\n\n round_steps: usize,\n\n pub seams: Vec<Vec<usize>>,\n\n seam_required: bool,\n\n last_node_id: usize,\n\n}\n\n\n\nimpl Bmesh {\n\n pub fn new() -> Self {\n\n Bmesh {\n\n graph: Graph::new_undirected(),\n\n mesh: Mesh::new(),\n\n neighbor_count_map: HashMap::new(),\n\n neighbor_count_vec: Vec::new(),\n\n wrap_error_count: 0,\n\n node_count: 0,\n\n debug_enabled: false,\n\n cut_subdiv_count: 0,\n", "file_path": "src/bmesh.rs", "rank": 88, "score": 14.550933193275682 }, { "content": " vert_ids.push(self.mesh.add_vertex(vert));\n\n }\n\n let mut rev_vert_ids = vert_ids.clone();\n\n rev_vert_ids.reverse();\n\n let cut_faces = vec![vert_ids, rev_vert_ids];\n\n let cut_directs = vec![direct, -direct];\n\n for other_index in neighbors.clone() {\n\n let edge_index = self.graph.find_edge(node_index, other_index).unwrap();\n\n new_cuts.push((edge_index, (cut_faces[order].clone(), -cut_directs[order])));\n\n order += 1;\n\n }\n\n } else if neighbors_count >= 3 {\n\n let mut cuts : Vec<(Vec<Point3<f32>>, EdgeIndex, NodeIndex, Vector3<f32>)> = Vec::new();\n\n let max_round : usize = 25;\n\n let factor_step = 1.0 / max_round as f32;\n\n const DIRECT_INITIAL_AFFECT_FACTOR : f32 = 0.5;\n\n for round in 0..max_round {\n\n for other_index in neighbors.clone() {\n\n let factor = factor_step * round as f32;\n\n let mut direct = self.direct_of_nodes(node_index, other_index);\n", "file_path": "src/bmesh.rs", "rank": 89, "score": 14.531806410696813 }, { "content": " item.1 += 1;\n\n face_norm_map.entry(face_id).or_insert_with(|| self.face_norm(face_id));\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n let self_factor = 1.0 - factor;\n\n let mut old_position_map: HashMap<Id, Point3<f32>> = HashMap::new();\n\n for vert in self.vertices.iter_mut() {\n\n match neighbor_position_sum_map.get(&vert.id) {\n\n Some(&sum_and_count) => {\n\n if sum_and_count.1 > 0 {\n\n let old_position = vert.position;\n\n old_position_map.entry(vert.id).or_insert(old_position);\n\n vert.position = (sum_and_count.0 / sum_and_count.1 as f32) * factor + old_position.to_vec() * self_factor;\n\n }\n\n },\n\n _ => {}\n", "file_path": "src/mesh.rs", "rank": 90, "score": 14.319983288101021 }, { "content": "use cgmath::Point3;\n\nuse cgmath::Vector3;\n\nuse cgmath::prelude::*;\n\nuse cgmath::Matrix4;\n\nuse std::option::Option;\n\nuse std::io;\n\nuse std::vec::Vec;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse fnv::FnvHashSet;\n\nuse fnv::FnvHashMap;\n\nuse iterator::FaceHalfedgeIterator;\n\nuse iterator::FaceIterator;\n\nuse util::*;\n\nuse smallvec::SmallVec;\n\nuse std::ops::Add;\n\nuse std::ops::AddAssign;\n\nuse std::f32;\n\n\n\npub type Id = usize;\n", "file_path": "src/mesh.rs", "rank": 91, "score": 14.284930584041156 }, { "content": " let node_cut_subdiv_count = self.graph.node_weight(node_index).unwrap().cut_subdiv_count;\n\n let cut_subdiv_count = {\n\n if node_cut_subdiv_count.is_none() {\n\n self.cut_subdiv_count\n\n } else {\n\n node_cut_subdiv_count.unwrap()\n\n }\n\n };\n\n let mut new_cuts : Vec<(EdgeIndex, (Vec<Id>, Vector3<f32>))> = Vec::new();\n\n let mut other_node_indices : Vec<NodeIndex> = Vec::new();\n\n {\n\n let neighbors = self.graph.neighbors_undirected(node_index);\n\n let mut neighbors_count = 0;\n\n let mut directs = Vec::new();\n\n let mut rev_directs = Vec::new();\n\n for other_index in neighbors.clone() {\n\n let direct = self.direct_of_nodes(node_index, other_index);\n\n directs.push(direct);\n\n rev_directs.push(-direct);\n\n other_node_indices.push(other_index);\n", "file_path": "src/bmesh.rs", "rank": 92, "score": 14.179825660488731 }, { "content": " );\n\n self.tmp_avg_of_edge_mids.push(\n\n edge_data_mut(\n\n &self.input,\n\n *halfedge_id,\n\n &mut self.face_data_set,\n\n &mut self.edge_data_set,\n\n &mut self.output,\n\n ).mid_point,\n\n );\n\n }\n\n let barycenter = Point3::centroid(&self.tmp_avg_of_faces);\n\n let average_of_edge = Point3::centroid(&self.tmp_avg_of_edge_mids);\n\n let position = (((average_of_edge * 2.0) + barycenter.to_vec())\n\n + (vertex.position.to_vec()\n\n * ((self.tmp_avg_of_faces.len() as i32 - 3).abs() as f32)))\n\n / (self.tmp_avg_of_faces.len() as f32);\n\n let mut data = VertexData::new();\n\n data.generated_vertex_id = self.output.add_vertex(position);\n\n self.vertex_data_set[vertex_id] = Some(data);\n\n data\n\n }\n\n}\n\n\n", "file_path": "src/subdivide.rs", "rank": 93, "score": 13.580731947935833 }, { "content": " &mut *context\n\n };\n\n assert_eq!(ctx.magic, MAGIC_NUM);\n\n let bmesh = ctx.bmeshes.get_mut((bmesh_id - 1) as usize).unwrap();\n\n let base_norm = bmesh.get_node_base_norm(node_id as usize);\n\n unsafe {\n\n *norm_buffer.offset(0) = base_norm.x;\n\n *norm_buffer.offset(1) = base_norm.y;\n\n *norm_buffer.offset(2) = base_norm.z;\n\n }\n\n 0\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn meshlite_bmesh_enable_debug(context: *mut RustContext, bmesh_id: c_int, enable: c_int) -> c_int {\n\n let ctx = unsafe {\n\n assert!(!context.is_null());\n\n &mut *context\n\n };\n\n assert_eq!(ctx.magic, MAGIC_NUM);\n", "file_path": "ffi/src/lib.rs", "rank": 94, "score": 13.524278046207497 }, { "content": " node_position.z - node_radius * 0.5);\n\n if self.cut_subdiv_count > 0 {\n\n let subdived_mesh = self.mesh.subdivide();\n\n self.mesh = subdived_mesh;\n\n }\n\n for vertex in self.mesh.vertices.iter_mut() {\n\n vertex.source = root_node_id as i32;\n\n }\n\n if (self.deform_thickness - 1.0).abs() > SMALL_NUM ||\n\n (self.deform_width - 1.0).abs() > SMALL_NUM {\n\n self.resolve_deform();\n\n }\n\n }\n\n &mut self.mesh\n\n }\n\n}\n\n\n\nimpl Node {\n\n fn new(radius: f32, position: Point3<f32>) -> Self {\n\n Node {\n", "file_path": "src/bmesh.rs", "rank": 95, "score": 13.513417898317893 }, { "content": " let to_vert_id = self.halfedge_start_vertex_id(self.halfedge_next_id(halfedge_id).unwrap()).unwrap();\n\n let from_is_front = vert_side_map[&from_vert_id] != PointSide::Back;\n\n let to_is_front = vert_side_map[&to_vert_id] != PointSide::Back;\n\n let from_is_back = vert_side_map[&from_vert_id] != PointSide::Front;\n\n let to_is_back = vert_side_map[&to_vert_id] != PointSide::Front;\n\n let edge = EdgeEndpoints::new(from_vert_id, to_vert_id);\n\n if from_is_front {\n\n let new_vert_id = *front_vert_map.entry(from_vert_id).or_insert_with(|| {\n\n front_mesh.add_vertex(self.vertex(from_vert_id).unwrap().position)\n\n });\n\n if front_new_vert_set.insert(new_vert_id) {\n\n front_new_verts.push(new_vert_id);\n\n }\n\n }\n\n if from_is_back {\n\n let new_vert_id = *back_vert_map.entry(from_vert_id).or_insert_with(|| {\n\n back_mesh.add_vertex(self.vertex(from_vert_id).unwrap().position)\n\n });\n\n if back_new_vert_set.insert(new_vert_id) {\n\n back_new_verts.push(new_vert_id);\n", "file_path": "src/mesh.rs", "rank": 96, "score": 13.452200166035823 }, { "content": "use mesh::Mesh;\n\nuse mesh::Id;\n\n\n\npub struct FaceIterator<'a> {\n\n index: usize,\n\n mesh: &'a Mesh,\n\n}\n\n\n\nimpl<'a> Iterator for FaceIterator<'a> {\n\n type Item = Id;\n\n\n\n fn next(&mut self) -> Option<Id> {\n\n while self.index < self.mesh.faces.len() {\n\n let face = &self.mesh.faces[self.index];\n\n self.index += 1;\n\n if face.alive {\n\n return Some(face.id)\n\n }\n\n }\n\n None\n", "file_path": "src/iterator.rs", "rank": 97, "score": 12.979367493332466 }, { "content": " radius: radius,\n\n position: position,\n\n base_norm: Vector3 {x:0.0, y:0.0, z:1.0},\n\n generated: false,\n\n base_norm_resolved: false,\n\n cut_subdiv_count: None,\n\n round_way: None,\n\n generate_from_node_id: None,\n\n seam_resolved: false,\n\n generated_vertices: Vec::new(),\n\n insert_order: 0\n\n }\n\n }\n\n}\n\n\n\nimpl Edge {\n\n fn new() -> Self {\n\n Edge {\n\n cuts: Vec::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/bmesh.rs", "rank": 98, "score": 12.975093211123054 }, { "content": " break;\n\n }\n\n let face_id = mesh.halfedge_face_id(halfedge_id).unwrap();\n\n let norm = mesh.face_norm(face_id);\n\n unsafe {\n\n *buffer.offset(i + 0) = norm.x;\n\n *buffer.offset(i + 1) = norm.y;\n\n *buffer.offset(i + 2) = norm.z;\n\n }\n\n i += 3;\n\n }\n\n i as c_int\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn meshlite_build(context: *mut RustContext, vertex_position_buffer: *mut c_float, vertex_count: c_int, face_index_buffer: *mut c_int, face_index_buffer_len: c_int) -> c_int {\n\n let ctx = unsafe {\n\n assert!(!context.is_null());\n\n &mut *context\n\n };\n", "file_path": "ffi/src/lib.rs", "rank": 99, "score": 12.965318561801872 } ]
Rust
main/src/vocab/sentence_piece_bpe_model.rs
sftse/rust-tokenizers
d869924622e40ea525d8244d1716517751c7743a
use crate::error::TokenizerError; use crate::tokenizer::base_tokenizer::{Token, TokenRef}; use crate::tokenizer::tokenization_utils::{is_punctuation, is_whitespace}; use crate::vocab::sentencepiece_proto::sentencepiece_model::ModelProto; use crate::{Mask, Offset, OffsetSize}; use hashbrown::HashMap; use protobuf::Message; use std::cmp::Ordering; use std::collections::BinaryHeap; use std::fs::File; use std::io::Read; use std::ops::Index; #[derive(Debug, Clone)] pub struct BpeMergeVocab { pub values: HashMap<String, i64>, } pub struct SentencePieceBpeModel { bpe_ranks: BpeMergeVocab, } impl SentencePieceBpeModel { pub fn from_file(path: &str) -> Result<SentencePieceBpeModel, TokenizerError> { let mut f = File::open(path).map_err(|e| { TokenizerError::FileNotFound(format!("{} vocabulary file not found :{}", path, e)) })?; let mut contents = Vec::new(); let proto = match f.read_to_end(&mut contents) { Ok(_) => match ModelProto::parse_from_bytes(contents.as_slice()) { Ok(proto_value) => proto_value, Err(e) => { return Err(TokenizerError::VocabularyParsingError(e.to_string())); } }, Err(e) => { return Err(TokenizerError::VocabularyParsingError(e.to_string())); } }; let mut values = HashMap::new(); for (idx, piece) in proto.get_pieces().iter().enumerate() { values.insert(piece.get_piece().to_owned(), idx as i64); } let bpe_ranks = BpeMergeVocab { values }; Ok(SentencePieceBpeModel { bpe_ranks }) } pub fn tokenize_to_tokens(&self, initial_token: TokenRef) -> Vec<Token> { let mut sub_tokens = Vec::new(); if initial_token.mask != Mask::Special && initial_token.mask != Mask::Unknown { let mut agenda: BinaryHeap<SymbolPair> = BinaryHeap::new(); let mut symbols = SymbolList::from(initial_token); for symbol_index in 1..symbols.len() { self.maybe_add_pair( symbol_index as isize - 1, symbol_index as isize, initial_token.text, &symbols, &mut agenda, ); } while let Some(symbol_pair) = agenda.pop() { let left_symbol_index = symbol_pair.left; let right_symbol_index = symbol_pair.right; if left_symbol_index != -1 && right_symbol_index != -1 { let new_symbol = symbols.merge_symbols( left_symbol_index as usize, right_symbol_index as usize, symbol_pair.pair_size, ); if let Some(new_symbol) = new_symbol { self.maybe_add_pair( new_symbol.prev, left_symbol_index, initial_token.text, &symbols, &mut agenda, ); self.maybe_add_pair( left_symbol_index, new_symbol.next, initial_token.text, &symbols, &mut agenda, ); } } } for symbol in symbols.into_iter().flatten() { sub_tokens.push(Token { text: initial_token.text[symbol.start_byte..symbol.end_byte].to_string(), offset: Offset { begin: symbol.start_offset as OffsetSize + initial_token.offset.begin, end: symbol.end_offset as OffsetSize + initial_token.offset.begin, }, reference_offsets: initial_token.reference_offsets [symbol.start_offset..symbol.end_offset] .to_vec(), mask: Default::default(), }) } } else { sub_tokens.push(initial_token.to_owned()); } self.populate_masks(sub_tokens.as_mut_slice(), '\u{2581}'); sub_tokens } fn maybe_add_pair( &self, left_symbol_index: isize, right_symbol_index: isize, input_text: &str, symbols: &SymbolList, agenda: &mut BinaryHeap<SymbolPair>, ) { if left_symbol_index != -1 && right_symbol_index != -1 { if let (Some(left_symbol), Some(right_symbol)) = ( symbols[left_symbol_index as usize], symbols[right_symbol_index as usize], ) { let merged_text = &input_text[left_symbol.start_byte..right_symbol.end_byte]; if let Some(&score) = self.bpe_ranks.values.get(merged_text) { agenda.push(SymbolPair { left: left_symbol_index, right: right_symbol_index, score, pair_size: left_symbol.size + right_symbol.size, }) } } } } pub fn populate_masks(&self, tokens: &mut [Token], whitespace_token: char) { let mut previous_mask = Mask::None; for token in tokens { if token.text.chars().count() == 1 { let first_char = match token.text.chars().last() { Some(value) => value, None => { token.mask = Mask::Unknown; previous_mask = Mask::Unknown; continue; } }; if is_punctuation(&first_char) { token.mask = Mask::Punctuation; previous_mask = Mask::Punctuation; continue; } if is_whitespace(&first_char) { token.mask = Mask::Whitespace; previous_mask = Mask::Punctuation; continue; } } if !token.text.starts_with(whitespace_token) & !(previous_mask == Mask::Punctuation) & !(previous_mask == Mask::Whitespace) { token.mask = Mask::Continuation; previous_mask = Mask::Continuation; } else { previous_mask = Mask::None; } } } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct Symbol { start_byte: usize, end_byte: usize, start_offset: usize, end_offset: usize, prev: isize, next: isize, size: usize, } #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct SymbolPair { left: isize, right: isize, score: i64, pair_size: usize, } impl Ord for SymbolPair { fn cmp(&self, other: &Self) -> Ordering { other .score .cmp(&self.score) .then_with(|| other.left.cmp(&self.left)) } } impl PartialOrd for SymbolPair { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } struct SymbolList { symbols: Vec<Option<Symbol>>, } impl Index<usize> for SymbolList { type Output = Option<Symbol>; fn index(&self, index: usize) -> &Option<Symbol> { self.symbols.index(index) } } impl IntoIterator for SymbolList { type Item = Option<Symbol>; type IntoIter = <Vec<Option<Symbol>> as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { self.symbols.into_iter() } } impl From<TokenRef<'_>> for SymbolList { fn from(token: TokenRef) -> Self { let mut symbols = Vec::with_capacity(token.text.len()); for (index, (character_start, character)) in token.text.char_indices().enumerate() { let next = if index == token.text.char_indices().count() - 1 { -1 } else { (index + 1) as isize }; symbols.push(Some(Symbol { start_byte: character_start, end_byte: character_start + character.len_utf8(), start_offset: index, end_offset: index + 1, prev: index as isize - 1, next, size: 1, })); } Self { symbols } } } impl SymbolList { pub fn len(&self) -> usize { self.symbols.len() } pub fn merge_symbols( &mut self, symbol_1_index: usize, symbol_2_index: usize, size_validation: usize, ) -> Option<Symbol> { if let (Some(left_symbol), Some(right_symbol)) = (self[symbol_1_index], self[symbol_2_index]) { if left_symbol.size + right_symbol.size != size_validation { return None; } if right_symbol.next != -1 { if let Some(next_next) = self.symbols.get_mut(right_symbol.next as usize).unwrap() { next_next.prev = symbol_1_index as isize; } } let new_symbol = Symbol { start_byte: left_symbol.start_byte, end_byte: right_symbol.end_byte, start_offset: left_symbol.start_offset, end_offset: right_symbol.end_offset, prev: left_symbol.prev, next: right_symbol.next, size: left_symbol.size + right_symbol.size, }; self.symbols[symbol_2_index] = None; self.symbols[symbol_1_index] = Some(new_symbol); Some(new_symbol) } else { None } } }
use crate::error::TokenizerError; use crate::tokenizer::base_tokenizer::{Token, TokenRef}; use crate::tokenizer::tokenization_utils::{is_punctuation, is_whitespace}; use crate::vocab::sentencepiece_proto::sentencepiece_model::ModelProto; use crate::{Mask, Offset, OffsetSize}; use hashbrown::HashMap; use protobuf::Message; use std::cmp::Ordering; use std::collections::BinaryHeap; use std::fs::File; use std::io::Read; use std::ops::Index; #[derive(Debug, Clone)] pub struct BpeMergeVocab { pub values: HashMap<String, i64>, } pub struct SentencePieceBpeModel { bpe_ranks: BpeMergeVocab, } impl SentencePieceBpeModel { pub fn from_file(path: &str) -> Result<SentencePieceBpeModel, TokenizerError> { let mut f = File::open(path).map_err(|e| { TokenizerError::FileNotFound(format!("{} vocabulary file not found :{}", path, e)) })?; let mut contents = Vec::new(); let proto = match f.read_to_end(&mut contents) { Ok(_) => match ModelProto::parse_from_bytes(contents.as_slice()) { Ok(proto_value) => proto_value, Err(e) => { return Err(TokenizerError::VocabularyParsingError(e.to_string())); } }, Err(e) => { return Err(TokenizerError::VocabularyParsingError(e.to_string())); } }; let mut values = HashMap::new(); for (idx, piece) in proto.get_pieces().iter().enumerate() { values.insert(piece.get_piece().to_owned(), idx as i64); } let bpe_ranks = BpeMergeVocab { values }; Ok(SentencePieceBpeModel { bpe_ranks }) } pub fn tokenize_to_tokens(&self, initial_token: TokenRef) -> Vec<Token> { let mut sub_tokens = Vec::new(); if initial_token.mask != Mask::Special && initial_token.mask != Mask::Unknown { let mut agenda: BinaryHeap<SymbolPair> = BinaryHeap::new(); let mut symbols = SymbolList::from(initial_token); for symbol_index in 1..symbols.len() { self.maybe_add_pair( symbol_index as isize - 1, symbol_index as isize, initial_token.text, &symbols, &mut agenda, ); } while let Some(symbol_pair) = agenda.pop() { let left_symbol_index = symbol_pair.left; let right_symbol_index = symbol_pair.right; if left_symbol_index != -1 && right_symbol_index != -1 { let new_symbol = symbols.merge_symbols( left_symbol_index as usize, right_symbol_index as usize, symbol_pair.pair_size, ); if let Some(new_symbol) = new_symbol { self.maybe_add_pair( new_symbol.prev, left_symbol_index, initial_token.text, &symbols, &mut agenda, ); self.maybe_add_pair( left_symbol_index, new_symbol.next, initial_token.text, &symbols, &mut agenda, ); } } } for symbol in symbols.into_iter().flatten() { sub_tokens.push(Token { text: initial_token.text[symbol.start_byte..symbol.end_byte].to_string(), offset: Offset { begin: symbol.start_offset as OffsetSize + initial_token.offset.begin, end: symbol.end_offset as OffsetSize + initial_token.offset.begin, }, reference_offsets: initial_token.reference_offsets [symbol.start_offset..symbol.end_offset] .to_vec(), mask: Default::default(), }) } } else { sub_tokens.push(initial_token.to_owned()); } self.populate_masks(sub_tokens.as_mut_slice(), '\u{2581}'); sub_tokens } fn maybe_add_pair( &self, left_symbol_index: isize, right_symbol_index: isize, input_text: &str, symbols: &SymbolList, agenda: &mut BinaryHeap<SymbolPair>, ) { if left_symbol_index != -1 && right_symbol_index != -1 { if let (Some(left_symbol), Some(right_symbol)) = ( symbols[left_symbol_index as usize], symbols[right_symbol_index as usize], ) { let merged_text = &input_text[left_symbol.start_byte..right_symbol.end_byte]; if let Some(&score) = self.bpe_ranks.values.get(merged_text) { agenda.push(SymbolPair { left: left_symbol_index, right: right_symbol_index, score, pair_size: left_symbol.size + right_symbol.size, }) } } } } pub fn populate_masks(&self, tokens: &mut [Token], whitespace_token: char) { let mut previous_mask = Mask::None; for token in tokens { if token.text.chars().count() == 1 { let first_char = match token.text.chars().last() { Some(value) => value, None => { token.mask = Mask::Unknown; previous_mask = Mask::Unknown; continue; } }; if is_punctuation(&first_char) { token.mask = Mask::Punctuation; previous_mask = Mask::Punctuation; continue; } if is_whitespace(&first_char) { token.mask = Mask::Whitespace; previous_mask = Mask::Punctuation; continue; } } if !token.text.starts_with(whitespace_token) & !(previous_mask == Mask::Punctuation) & !(previous_mask == Mask::Whitespace) { token.mask = Mask::Continuation; previous_mask = Mask::Continuation; } else { previous_mask = Mask::None; } } } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct Symbol { start_byte: usize, end_byte: usize, start_offset: usize, end_offset: usize, prev: isize, next: isize, size: usize, } #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct SymbolPair { left: isize, right: isize, score: i64, pair_size: usize, } impl Ord for SymbolPair { fn cmp(&self, other: &Self) -> Ordering { other .score .cmp(&self.score) .then_with(|| other.left.cmp(&self.left)) } } impl PartialOrd for SymbolPair { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } struct SymbolList { symbols: Vec<Option<Symbol>>, } impl Index<usize> for SymbolList { type Output = Option<Symbol>; fn index(&self, index: usize) -> &Option<Symbol> { self.symbols.index(index) } } impl IntoIterator for SymbolList { type Item = Option<Symbol>; type IntoIter = <Vec<Option<Symbol>> as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { self.symbols.into_iter() } } impl From<TokenRef<'_>> for SymbolList { fn from(token: TokenRef) -> Self { let mut symbols = Vec::with_capacity(token.text.len()); for (index, (character_start, character)) in token.text.char_indices().enumerate() { let next = if index == token.text.char_indices().count() - 1 { -1 } else { (index + 1) as isize }; symbols.push(
); } Self { symbols } } } impl SymbolList { pub fn len(&self) -> usize { self.symbols.len() } pub fn merge_symbols( &mut self, symbol_1_index: usize, symbol_2_index: usize, size_validation: usize, ) -> Option<Symbol> { if let (Some(left_symbol), Some(right_symbol)) = (self[symbol_1_index], self[symbol_2_index]) { if left_symbol.size + right_symbol.size != size_validation { return None; } if right_symbol.next != -1 { if let Some(next_next) = self.symbols.get_mut(right_symbol.next as usize).unwrap() { next_next.prev = symbol_1_index as isize; } } let new_symbol = Symbol { start_byte: left_symbol.start_byte, end_byte: right_symbol.end_byte, start_offset: left_symbol.start_offset, end_offset: right_symbol.end_offset, prev: left_symbol.prev, next: right_symbol.next, size: left_symbol.size + right_symbol.size, }; self.symbols[symbol_2_index] = None; self.symbols[symbol_1_index] = Some(new_symbol); Some(new_symbol) } else { None } } }
Some(Symbol { start_byte: character_start, end_byte: character_start + character.len_utf8(), start_offset: index, end_offset: index + 1, prev: index as isize - 1, next, size: 1, })
call_expression
[ { "content": "fn bytes_offsets(text: &str) -> Vec<usize> {\n\n let mut offsets = Vec::with_capacity(text.len());\n\n for (char_idx, character) in text.chars().enumerate() {\n\n for _ in 0..character.len_utf8() {\n\n offsets.push(char_idx)\n\n }\n\n }\n\n offsets\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 0, "score": 331415.20155949885 }, { "content": "/// Replaces a pattern &str by a replacement &str keeping track of the offsets\n\n/// (all new characters in replacement have the same reference offset as the first pattern character as these may have a different size)\n\npub fn replace_string(token: &mut Token, pattern: &str, replacement_string: &str) {\n\n let pattern_len = pattern.len();\n\n let pattern_char_len = pattern.chars().count();\n\n let replacement_char_len = replacement_string.chars().count();\n\n let matches: Vec<usize> = token.text.rmatch_indices(pattern).map(|v| v.0).collect();\n\n let char_indices: HashMap<usize, usize> = token\n\n .text\n\n .char_indices()\n\n .enumerate()\n\n .map(|(idx, v)| (v.0, idx))\n\n .collect();\n\n for hit in matches {\n\n token\n\n .text\n\n .replace_range(hit..hit + pattern_len, replacement_string);\n\n let char_position = *char_indices.get(&hit).unwrap();\n\n let reference_offset: u32 = token.reference_offsets[char_position];\n\n token.reference_offsets.splice(\n\n char_position..char_position + pattern_char_len,\n\n vec![reference_offset; replacement_char_len],\n\n );\n\n }\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 1, "score": 290732.6236489136 }, { "content": "pub fn fix_mask(tokens: &mut Vec<Token>) {\n\n for i in 1..tokens.len() {\n\n if tokens[i].mask == Mask::Continuation && tokens[i - 1].mask == Mask::None {\n\n if let Some(token) = tokens.get_mut(i - 1) {\n\n token.mask = Mask::Begin;\n\n }\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn split_on_language_code<'a>(\n\n token: TokenRef<'a>,\n\n code_length: usize,\n\n language_codes_bytes: &HashSet<Vec<u8>>,\n\n) -> Vec<TokenRef<'a>> {\n\n if token.text.as_bytes().len() < code_length {\n\n return vec![token];\n\n }\n\n let mut tokens: Vec<TokenRef<'a>> = Vec::new();\n\n let mut begin_char: usize = 0usize;\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 2, "score": 274806.73682259594 }, { "content": "///Cleans text by removing control characters and normalizing whitespace\n\npub fn clean_text(token: &mut Token, strict: bool) {\n\n let capacity = token.text.capacity();\n\n let mut cleaned_string = String::with_capacity(capacity);\n\n let mut character_mapping: Vec<OffsetSize> = Vec::with_capacity(capacity);\n\n for (character, position) in token.text.chars().zip(token.reference_offsets.iter()) {\n\n if is_control(&character, strict)\n\n || character == '\\x00'\n\n || character == REPLACEMENT_CHARACTER\n\n {\n\n continue;\n\n }\n\n if is_whitespace(&character) {\n\n cleaned_string.push(' ');\n\n } else {\n\n cleaned_string.push(character);\n\n }\n\n character_mapping.push(*position);\n\n }\n\n token.text = cleaned_string;\n\n token.reference_offsets = character_mapping;\n\n token.offset.begin = *token.reference_offsets.first().unwrap_or(&(0));\n\n token.offset.end = *token.reference_offsets.last().unwrap_or(&(0)) + 1;\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 3, "score": 274337.76250836643 }, { "content": "///Split a token on one or more characters (given a character test function)\n\n/// * token: The token to split\n\n/// * test_character: A function that borrows a `char` and returns a boolean. If true, a split will be made here\n\n/// * add_separators: Add the separating characters to the tokens as well? (bool), separating tokens will be indicated in the returned mask by the value set in `set_mask`\n\npub fn split_on_char<'a, F>(\n\n token: TokenRef<'a>,\n\n test_character: F,\n\n add_separators: bool,\n\n set_mask: Mask,\n\n) -> Vec<TokenRef<'a>>\n\nwhere\n\n F: Fn(&char) -> bool,\n\n{\n\n let mut tokens: Vec<TokenRef<'a>> = Vec::new();\n\n let mut charbegin: usize = 0;\n\n let mut bytesbegin: usize = 0;\n\n let mut charcount: usize = 0;\n\n\n\n if token.mask == Mask::None {\n\n //iterate over all characters, returning the byte position with each\n\n for (char_idx, (bytes_idx, c)) in token.text.char_indices().enumerate() {\n\n charcount += 1;\n\n if test_character(&c) {\n\n if charbegin < char_idx {\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 4, "score": 271667.0951002269 }, { "content": "///Remove diacritics\n\npub fn lowercase(token: &mut Token) {\n\n let capacity = token.text.capacity();\n\n let mut lower_cased_string: String = String::with_capacity(capacity);\n\n let mut character_mapping: Vec<OffsetSize> = Vec::with_capacity(capacity);\n\n for (character, position) in token.text.chars().zip(token.reference_offsets.iter()) {\n\n for c in character.to_lowercase() {\n\n lower_cased_string.push(c);\n\n character_mapping.push(*position);\n\n }\n\n }\n\n token.text = lower_cased_string;\n\n token.reference_offsets = character_mapping;\n\n token.offset.begin = *token.reference_offsets.first().unwrap_or(&(0));\n\n token.offset.end = *token.reference_offsets.last().unwrap_or(&(0)) + 1;\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 5, "score": 261310.1731030109 }, { "content": "pub fn is_punctuation(character: &char) -> bool {\n\n let u32_char = *character as u32;\n\n if (33..=47).contains(&u32_char)\n\n | (58..=64).contains(&u32_char)\n\n | (91..=96).contains(&u32_char)\n\n | (123..=126).contains(&u32_char)\n\n {\n\n true\n\n } else {\n\n PUNCTUATION_CHARS.contains(&u32_char)\n\n }\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 6, "score": 260822.1837108283 }, { "content": "pub fn is_whitespace(character: &char) -> bool {\n\n WHITESPACE_CHARS.contains(&(*character as u32))\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 7, "score": 260822.1837108283 }, { "content": "///NFKC decomposition\n\npub fn decompose_nfkc(token: &mut Token) {\n\n let capacity = token.text.capacity();\n\n let mut decomposed_string: String = String::with_capacity(capacity);\n\n let mut character_mapping: Vec<OffsetSize> = Vec::with_capacity(capacity);\n\n let mut cur_position: isize = 0;\n\n for (character, extra_char) in token.text.nfkc() {\n\n decomposed_string.push(character);\n\n character_mapping.push(token.reference_offsets[cur_position as usize]);\n\n cur_position = cur_position + 1 - extra_char;\n\n }\n\n token.text = decomposed_string;\n\n token.reference_offsets = character_mapping;\n\n token.offset.begin = *token.reference_offsets.first().unwrap_or(&(0));\n\n token.offset.end = *token.reference_offsets.last().unwrap_or(&(0)) + 1;\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 8, "score": 257794.07657997368 }, { "content": "///Remove diacritics\n\npub fn strip_accents(token: &mut Token) {\n\n let capacity = token.text.capacity();\n\n let mut decomposed_string: String = String::with_capacity(capacity);\n\n let mut character_mapping: Vec<OffsetSize> = Vec::with_capacity(capacity);\n\n for (character, position) in token.text.chars().zip(token.reference_offsets.iter()) {\n\n decompose_canonical(character, |c| {\n\n if !ACCENT_MARKERS.contains(&(c as u32)) {\n\n decomposed_string.push(c);\n\n character_mapping.push(*position);\n\n }\n\n });\n\n }\n\n token.text = decomposed_string;\n\n token.reference_offsets = character_mapping;\n\n token.offset.begin = *token.reference_offsets.first().unwrap_or(&(0));\n\n token.offset.end = *token.reference_offsets.last().unwrap_or(&(0)) + 1;\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 9, "score": 257794.07657997368 }, { "content": "///Tokenize a token into word pieces according to the supplied vocabulary\n\n///Continuation word pieces will all have the suffix `##`\n\npub fn tokenize_wordpiece(token: TokenRef, vocab: &impl Vocab, max_word_len: usize) -> Vec<Token> {\n\n let mut tokens: Vec<Token> = Vec::new();\n\n if token.text.chars().count() > max_word_len {\n\n tokens.push(Token {\n\n text: BertVocab::unknown_value().to_owned(),\n\n offset: token.offset,\n\n reference_offsets: token.reference_offsets.to_vec(),\n\n mask: Mask::Unknown,\n\n });\n\n } else {\n\n let char_indices: Vec<usize> = token.text.char_indices().map(|v| v.0).collect();\n\n let max_end: usize =\n\n char_indices.last().unwrap() + token.text.chars().last().unwrap().len_utf8();\n\n let mut start: usize = 0; //bytes\n\n let mut pos_begin = 0; //chars\n\n let mut pos_end;\n\n let mut end;\n\n while start < max_end {\n\n //bytes\n\n end = max_end;\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 10, "score": 251825.94204974826 }, { "content": "///Tokenizes CJK characters, each character will be a token\n\npub fn tokenize_cjk_chars(token: TokenRef) -> Vec<TokenRef> {\n\n split_on_char(token, is_cjk_char, true, Mask::CJK)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 11, "score": 242722.22836607293 }, { "content": "/// This is a custom method to check if a character is a control character. The BERT tokenizer is\n\n/// taking any character whose unicode category starts with `C` as a control character, which includes\n\n/// the traditional control `Cc` category, but also the format `Cc`, private use `Co` and surrogate `Cs`.\n\n/// The unassigned unicode category `Cn` has been skipped in order to avoid unnecessary checks.\n\n/// A faster method may be called by setting strict to false and only check against the core control\n\n/// characters. To match the original BERT tokenization, this should remain true.\n\npub fn is_control(character: &char, strict: bool) -> bool {\n\n if ADDITIONAL_WHITESPACE_CHARS.contains(character) {\n\n false\n\n } else if strict {\n\n let u32_char = *character as u32;\n\n (u32_char <= 0x001F)\n\n | (0x0080..=0x009F).contains(&u32_char)\n\n | (0xE0020..=0xE007F).contains(&u32_char)\n\n | (0xE000..=0xF8FF).contains(&u32_char)\n\n | (0xF0000..=0xFFFFD).contains(&u32_char)\n\n | (0x100000..=0x10FFFD).contains(&u32_char)\n\n | (0xD800..=0xDB7F).contains(&u32_char)\n\n | (0xDB80..=0xDBFF).contains(&u32_char)\n\n | (0xDC00..=0xDFFF).contains(&u32_char)\n\n | CONTROL_CHARS.contains(&u32_char)\n\n } else {\n\n character.is_control()\n\n }\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 12, "score": 242500.0775818817 }, { "content": "/// Reads a SST2 dataset file and returns a vector of SST2 examples\n\n///\n\n/// # Arguments\n\n/// - path (`&str`): path to the SST2 file\n\n/// - sep (`u8`): separator for CSV parsing (default is a `\\t` for SST2 dataset files)\n\n///\n\n/// # Returns\n\n/// - `Result<Vec<Example>, TokenizerError>` containing the examples with their corresponding label\n\npub fn read_sst2(path: &str, sep: u8) -> Result<Vec<Example>, TokenizerError> {\n\n let mut examples: Vec<Example> = Vec::new();\n\n let f = File::open(path).expect(\"Could not open source file.\");\n\n\n\n let mut rdr = csv::ReaderBuilder::new()\n\n .has_headers(true)\n\n .delimiter(sep)\n\n .flexible(false)\n\n .from_reader(f);\n\n\n\n for result in rdr.records() {\n\n let record = result?;\n\n let example = Example::new(&record[0], \"\", &record[1])?;\n\n examples.push(example);\n\n }\n\n Ok(examples)\n\n}\n", "file_path": "main/src/adapters.rs", "rank": 13, "score": 239236.98692967865 }, { "content": "/// Split a token on one or more substrings (given a substring test function)\n\n/// * token: The token to split\n\n/// * test_str: A function that contains the string buffer from the current point forward and\n\n/// returns a 3-tuple with the length of the match in bytes, chars and the mask to set (if the\n\n/// length is zero then there is no match.\n\n/// * add_separators: Add the separating characters to the tokens as well? (bool), separating tokens\n\n/// will be indicated in the returned mask by the value set in `set_mask`, which is returned by the test_substr function\n\npub fn split_on_substr<'a, F>(\n\n token: TokenRef<'a>,\n\n test_substr: F,\n\n add_separators: bool,\n\n) -> Vec<TokenRef<'a>>\n\nwhere\n\n F: Fn(&'a str) -> (usize, usize, Mask),\n\n{\n\n let mut tokens: Vec<TokenRef<'a>> = Vec::new();\n\n let mut char_begin: usize = 0;\n\n let mut bytes_begin: usize = 0;\n\n let mut char_count: usize = 0;\n\n\n\n if token.mask == Mask::None {\n\n //don't process a token that already got marked in the mask\n\n //iterate over all characters, returning the byte position with each\n\n for (char_idx, (bytes_idx, _)) in token.text.char_indices().enumerate() {\n\n char_count += 1;\n\n let (matched_bytes, matched_chars, set_mask): (usize, usize, Mask) =\n\n test_substr(&token.text[bytes_idx..]);\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 14, "score": 232252.58857945347 }, { "content": "///Default bpe function, as called by Roberta and GPT2\n\npub fn bpe(token: &str, bpe_ranks: &BpePairVocab) -> (Vec<String>, Vec<usize>) {\n\n let sub_tokens = token\n\n .chars()\n\n .map(|v| v.to_string())\n\n .collect::<Vec<String>>();\n\n\n\n let mut output = (sub_tokens, false);\n\n loop {\n\n output = group_common_pairs(output.0, bpe_ranks);\n\n if output.1 {\n\n break;\n\n }\n\n }\n\n let char_counts = output.0.iter().map(|v| v.chars().count()).collect();\n\n (output.0, char_counts)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 15, "score": 230527.23608563273 }, { "content": "pub fn split_on_bpe_pairs<'a, F>(\n\n token: TokenRef<'a>,\n\n bpe_function: F,\n\n bpe_ranks: &BpePairVocab,\n\n cache: &BpeCache,\n\n as_bytes: bool,\n\n) -> Vec<Token>\n\nwhere\n\n F: Fn(&str, &BpePairVocab) -> (Vec<String>, Vec<usize>),\n\n{\n\n let mut tokens: Vec<Token> = Vec::new();\n\n let text: String;\n\n let reference_offsets_placeholder: Vec<OffsetSize>;\n\n let (text, reference_offsets) = if as_bytes {\n\n reference_offsets_placeholder = bytes_offsets(token.text)\n\n .iter()\n\n .map(|&pos| token.reference_offsets[pos])\n\n .collect();\n\n text = token\n\n .text\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 16, "score": 228287.52849731804 }, { "content": "pub fn ctrl_bpe(token: &str, bpe_ranks: &BpePairVocab) -> (Vec<String>, Vec<usize>) {\n\n let mut sub_tokens = token\n\n .chars()\n\n .map(|v| v.to_string())\n\n .collect::<Vec<String>>();\n\n\n\n if !sub_tokens.is_empty() {\n\n sub_tokens.last_mut().unwrap().push_str(\"</w>\");\n\n };\n\n\n\n let mut output = (sub_tokens, false);\n\n loop {\n\n output = group_common_pairs(output.0, bpe_ranks);\n\n if output.1 {\n\n break;\n\n }\n\n }\n\n\n\n let length = output.0.len();\n\n for (i, token) in output.0.iter_mut().enumerate() {\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 17, "score": 227289.73660652206 }, { "content": "pub fn download_file_to_cache(src: &str, target: &str) -> Result<PathBuf, reqwest::Error> {\n\n let mut home = dirs::home_dir().unwrap();\n\n home.push(\".cache\");\n\n home.push(\".rust_tokenizers\");\n\n home.push(target);\n\n if !home.exists() {\n\n let mut response = reqwest::blocking::get(src)?;\n\n fs::create_dir_all(home.parent().unwrap()).unwrap();\n\n let mut dest = File::create(&home).unwrap();\n\n copy(&mut response, &mut dest).unwrap();\n\n }\n\n Ok(home)\n\n}\n", "file_path": "main/tests/test_utils.rs", "rank": 18, "score": 226560.25257158026 }, { "content": "pub fn openai_gpt_bpe(token: &str, bpe_ranks: &BpePairVocab) -> (Vec<String>, Vec<usize>) {\n\n let mut sub_tokens = token\n\n .chars()\n\n .map(|v| v.to_string())\n\n .collect::<Vec<String>>();\n\n\n\n //the addition of </w> is basically the only difference between this function and the default bpe:\n\n if !sub_tokens.is_empty() {\n\n sub_tokens.last_mut().unwrap().push_str(\"</w>\");\n\n };\n\n\n\n let mut output = (sub_tokens, false);\n\n loop {\n\n output = group_common_pairs(output.0, bpe_ranks);\n\n if output.1 {\n\n break;\n\n }\n\n }\n\n let char_counts = output\n\n .0\n\n .iter()\n\n .map(|v| v.trim_end_matches(\"</w>\").chars().count())\n\n .collect();\n\n (output.0, char_counts)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 19, "score": 224178.0082012171 }, { "content": "///Split a text on special tokens (like BOS/EOS/UNK markers), depending on the vocabulary\n\npub fn split_on_special_tokens<'a>(token: TokenRef<'a>, vocab: &impl Vocab) -> Vec<TokenRef<'a>> {\n\n let test_substr = |s: &str| {\n\n for special_value in vocab.special_values().keys() {\n\n if s.starts_with(special_value.as_str()) {\n\n return (\n\n special_value.len(),\n\n special_value.chars().count(),\n\n if vocab.get_unknown_value() == special_value.as_str() {\n\n Mask::Unknown\n\n } else {\n\n Mask::Special\n\n },\n\n );\n\n }\n\n }\n\n (0, 0, Mask::None)\n\n };\n\n split_on_substr(token, test_substr, true)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 20, "score": 222339.50396496098 }, { "content": "fn is_cjk_char(character: &char) -> bool {\n\n let u32_char = *character as u32;\n\n (0x4E00..=0x9FFF).contains(&u32_char)\n\n | (0x3400..=0x4DBF).contains(&u32_char)\n\n | (0x20000..=0x2A6DF).contains(&u32_char)\n\n | (0x2A700..=0x2B73F).contains(&u32_char)\n\n | (0x2B740..=0x2B81F).contains(&u32_char)\n\n | (0x2B820..=0x2CEAF).contains(&u32_char)\n\n | (0xF900..=0xFAFF).contains(&u32_char)\n\n | (0x2F800..=0x2FA1F).contains(&u32_char)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 21, "score": 217635.55618047222 }, { "content": "///Simple tokenization based on whitespace only\n\npub fn whitespace_tokenize(token: TokenRef) -> Vec<TokenRef> {\n\n split_on_char(token, is_whitespace, false, Mask::Whitespace)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 22, "score": 210850.68613366154 }, { "content": "///Split a token on punctuation\n\npub fn split_on_punct(token: TokenRef) -> Vec<TokenRef> {\n\n split_on_char(token, is_punctuation, true, Mask::Punctuation)\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 23, "score": 209175.45570713223 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n}\n", "file_path": "main/src/vocab/sentencepiece_proto/sentencepiece_model.rs", "rank": 24, "score": 198139.19165820425 }, { "content": "/// # Truncates a sequence pair in place to the maximum length.\n\n///\n\n/// * tokens_1: list of tokenized input ids. Can be obtained from a string by chaining the\n\n/// `tokenize` and `convert_tokens_to_ids` methods.\n\n/// * tokens_2: Optional second list of input ids. Can be obtained from a string by chaining the\n\n/// `tokenize` and `convert_tokens_to_ids` methods.\n\n/// * offsets: list of offsets for tokens_1 (must be same length or empty if not used at all)\n\n/// * offsets_2: optional second list of offsets for tokens_2 (must be same length or empty if not used at all)\n\n/// * tokens_2: Optional second list of input ids. Can be obtained from a string by chaining the\n\n/// `tokenize` and `convert_tokens_to_ids` methods.\n\n/// * num_tokens_to_remove\n\n/// number of tokens to remove using the truncation strategy\n\n/// * truncation_strategy: truncation strategy\n\n/// - TruncationStrategy::LongestFirst (default) Iteratively reduce the inputs sequence until the input is under max_length\n\n/// starting from the longest one at each token (when there is a pair of input sequences).\n\n/// Overflowing tokens only contains overflow from the first sequence.\n\n/// - TruncationStrategy::OnlyFirst: Only truncate the first sequence. raise an error if the first sequence is shorter or equal to than num_tokens_to_remove.\n\n/// - TruncationStrategy::OnlySecond: Only truncate the second sequence\n\n/// - TruncationStrategy::DoNotTruncate: Does not truncate (raise an error if the input sequence is longer than max_length)\n\n/// * stride\n\n/// If set to a number along with max_length, the overflowing tokens returned will contain some tokens\n\n/// from the main sequence returned. The value of this argument defines the number of additional tokens.\n\npub fn truncate_sequences(\n\n mut token_ids_with_offsets_1: TokenIdsWithOffsets,\n\n mut token_ids_with_offsets_2: Option<TokenIdsWithOffsets>,\n\n num_tokens_to_remove: usize,\n\n truncation_strategy: &TruncationStrategy,\n\n stride: usize,\n\n) -> Result<\n\n (\n\n TokenIdsWithOffsets,\n\n Option<TokenIdsWithOffsets>,\n\n Vec<i64>,\n\n Vec<Option<Offset>>,\n\n ),\n\n TokenizerError,\n\n> {\n\n if num_tokens_to_remove == 0 {\n\n Ok((\n\n token_ids_with_offsets_1,\n\n token_ids_with_offsets_2,\n\n Vec::new(),\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 26, "score": 182981.87989311153 }, { "content": "pub fn split_at_regex<'a>(token: TokenRef<'a>, pattern_tokenization: &Regex) -> Vec<TokenRef<'a>> {\n\n let mut tokens: Vec<TokenRef<'a>> = Vec::new();\n\n let mut begin_char: usize = 0usize;\n\n let mut start_byte: usize = 0usize;\n\n for hit in pattern_tokenization.find_iter(token.text) {\n\n let hit_start_byte = hit.start();\n\n let hit_start_char = token.text[..hit_start_byte].chars().count();\n\n let hit_end_byte = hit.end();\n\n let hit_end_char = begin_char + hit.as_str().chars().count();\n\n\n\n if !&token.text[start_byte..hit_start_byte].trim().is_empty() {\n\n tokens.push(TokenRef {\n\n text: &token.text[start_byte..hit_start_byte],\n\n offset: Offset::new(\n\n token.offset.begin + begin_char as OffsetSize,\n\n token.offset.begin + hit_start_char as OffsetSize,\n\n ),\n\n reference_offsets: &token.reference_offsets[begin_char..hit_start_char],\n\n mask: Mask::None,\n\n });\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 27, "score": 182321.05171161733 }, { "content": "pub fn split_on_regex<'a>(token: TokenRef<'a>, pattern_tokenization: &Regex) -> Vec<TokenRef<'a>> {\n\n let mut tokens: Vec<TokenRef<'a>> = Vec::new();\n\n let mut begin_char: usize = 0usize;\n\n for hit in pattern_tokenization.find_iter(token.text) {\n\n let start_byte = hit.start();\n\n if start_byte > 0 {\n\n begin_char = token.text[..start_byte].chars().count();\n\n }\n\n let end_char = begin_char + hit.as_str().chars().count();\n\n tokens.push(TokenRef {\n\n text: hit.as_str(),\n\n offset: Offset::new(\n\n token.offset.begin + begin_char as OffsetSize,\n\n token.offset.begin + end_char as OffsetSize,\n\n ),\n\n reference_offsets: &token.reference_offsets[begin_char..end_char],\n\n mask: Mask::None,\n\n });\n\n begin_char = end_char;\n\n }\n\n tokens\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 28, "score": 182321.05171161733 }, { "content": "pub fn split_on_regex_with_lookahead<'a>(\n\n token: TokenRef<'a>,\n\n pattern_lookahead: &Regex,\n\n pattern_tokenization: &Regex,\n\n) -> Vec<TokenRef<'a>> {\n\n if token.mask == Mask::None {\n\n let mut sub_words: Vec<&str> = vec![];\n\n let mut splits: Vec<&str> = vec![];\n\n\n\n let mut i: usize = 0;\n\n let mut end_byte: usize;\n\n for hit in pattern_lookahead.find_iter(token.text) {\n\n let mut hit_chars = hit.as_str().chars().rev();\n\n let start = hit_chars.next().unwrap();\n\n let sep = hit_chars.next().unwrap();\n\n end_byte = hit.end() - sep.len_utf8() - start.len_utf8();\n\n splits.push(&token.text[i..end_byte]);\n\n i = end_byte;\n\n }\n\n splits.push(&token.text[i..]);\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 31, "score": 174232.62026963892 }, { "content": "pub fn get_pairs(token: &[String]) -> Option<HashSet<BpePairRef>> {\n\n match token.len() {\n\n 0 | 1 => None,\n\n _ => {\n\n let mut output: HashSet<BpePairRef> = HashSet::with_capacity(token.len());\n\n for idx in 0..token.len() - 1 {\n\n if let [byte_1, byte_2] = &token[idx..idx + 2] {\n\n output.insert(BpePairRef { byte_1, byte_2 });\n\n }\n\n }\n\n Some(output)\n\n }\n\n }\n\n}\n\n\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 32, "score": 165914.03879154383 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PySentencePieceTokenizer {\n\n tokenizer: SentencePieceTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<SentencePieceTokenizer, SentencePieceVocab> for PySentencePieceTokenizer {\n\n fn tokenizer(&self) -> &SentencePieceTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<SentencePieceTokenizer, SentencePieceVocab>\n\n for PySentencePieceTokenizer\n\n{\n\n}\n\n\n\n#[pymethods]\n\nimpl PySentencePieceTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PySentencePieceTokenizer {\n", "file_path": "python-bindings/src/lib.rs", "rank": 33, "score": 148779.54578400383 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PySentencePieceBpeTokenizer {\n\n tokenizer: SentencePieceBpeTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<SentencePieceBpeTokenizer, SentencePieceVocab> for PySentencePieceBpeTokenizer {\n\n fn tokenizer(&self) -> &SentencePieceBpeTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<SentencePieceBpeTokenizer, SentencePieceVocab>\n\n for PySentencePieceBpeTokenizer\n\n{\n\n}\n\n\n\n#[pymethods]\n\nimpl PySentencePieceBpeTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PySentencePieceBpeTokenizer {\n", "file_path": "python-bindings/src/lib.rs", "rank": 34, "score": 145590.86519396753 }, { "content": "#[test]\n\nfn test_sentence_piece_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/xlnet-base-cased-spiece.model\",\n\n \"spiece.model\",\n\n )\n\n .unwrap();\n\n\n\n let sentence_piece_tokenizer =\n\n SentencePieceTokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n", "file_path": "main/tests/test_sentence_piece.rs", "rank": 35, "score": 144471.68930352613 }, { "content": "fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {\n\n ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()\n\n}\n\n\n", "file_path": "main/src/vocab/sentencepiece_proto/sentencepiece_model.rs", "rank": 36, "score": 141975.67146202267 }, { "content": "pub fn group_common_pairs(tokens: Vec<String>, bpe_ranks: &BpePairVocab) -> (Vec<String>, bool) {\n\n if let Some(pairs) = get_pairs(&tokens) {\n\n let bigram = pairs\n\n .iter()\n\n .min_by_key(|pair| match bpe_ranks.byte_pair_to_id(pair) {\n\n Some(&rank) => rank,\n\n None => i64::MAX,\n\n })\n\n .unwrap();\n\n if bpe_ranks.byte_pair_to_id(bigram).is_none() {\n\n return (tokens, true);\n\n }\n\n let mut temp_sub_tokens: Vec<String> = Vec::with_capacity(tokens.len());\n\n let mut i = 0;\n\n\n\n while i < tokens.len() {\n\n let j = if let Some(index) = &tokens[i..].iter().position(|r| r == bigram.byte_1) {\n\n index + i\n\n } else {\n\n temp_sub_tokens.extend_from_slice(&tokens[i..]);\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 37, "score": 133949.66748486154 }, { "content": "#[test]\n\nfn test_m2m100_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://huggingface.co/facebook/m2m100_418M/resolve/main/sentencepiece.bpe.model\",\n\n \"m2m100_419M_spiece.model\",\n\n )\n\n .unwrap();\n\n\n\n let mbart_tokenizer =\n\n SentencePieceBpeTokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n", "file_path": "main/tests/test_sentence_piece_bpe.rs", "rank": 38, "score": 132470.70494371644 }, { "content": "fn truncate_with_overflow(\n\n sequence: &mut Vec<i64>,\n\n offsets: &mut Vec<Option<Offset>>,\n\n original_positions: &mut Vec<Vec<OffsetSize>>,\n\n mask: &mut Vec<Mask>,\n\n num_tokens_to_remove: usize,\n\n stride: usize,\n\n) -> (Vec<i64>, Vec<Option<Offset>>) {\n\n if !offsets.is_empty() {\n\n assert_eq!(sequence.len(), offsets.len());\n\n }\n\n if !mask.is_empty() {\n\n assert_eq!(sequence.len(), mask.len());\n\n }\n\n let cutoff = sequence.len() - num_tokens_to_remove;\n\n let mut overflow_tokens = sequence.split_off(cutoff);\n\n let mut overflow_offsets = if !offsets.is_empty() {\n\n offsets.split_off(cutoff)\n\n } else {\n\n Vec::new()\n", "file_path": "main/src/tokenizer/tokenization_utils.rs", "rank": 39, "score": 120213.14670455884 }, { "content": "/// Token abstraction trait to access token fields, irrespective of their form (reference of owned)\n\npub trait TokenTrait {\n\n /// Returns the offset of the token with respect to the original string\n\n fn offset(&self) -> Option<Offset>;\n\n /// Returns the token mask\n\n fn mask(&self) -> Mask;\n\n /// Returns a string representation for the token\n\n fn as_str(&self) -> &str;\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy, Eq)]\n\n/// Reference token that references the original text, with a string slice representation\n\npub struct TokenRef<'a> {\n\n /// String representation\n\n pub text: &'a str,\n\n /// Start and end positions of the token with respect to the original text\n\n pub offset: Offset,\n\n /// Sequence of positions with respect to the original text contained in the token.\n\n /// For example, if the token offset is `start: 4, end: 10`, corresponding reference_offsets are `[4, 5, 6, 7, 8, 9]`\n\n pub reference_offsets: &'a [OffsetSize],\n\n /// Mask indicating the type of the token\n", "file_path": "main/src/tokenizer/base_tokenizer.rs", "rank": 40, "score": 120000.93936292427 }, { "content": "/// # ConsolidatableTokens\n\n///\n\n/// This trait can be implemented for collections of tokens (i.e. things that implement `TokenTrait`)\n\n/// and instantiates an iterator to quickly iterate over the tokens in consolidated form, e.g.\n\n/// grouping subtokens into words.\n\n///\n\n/// ```no_run\n\n/// use rust_tokenizers::{ConsolidatableTokens, Token};\n\n/// let tokens: Vec<Token> = vec![]; //add some tokens\n\n/// for (wordcount, word_tokens) in tokens.iter_consolidate_tokens().enumerate() {\n\n/// eprintln!(\"word #{} - {:?}\", wordcount + 1, word_tokens);\n\n/// }\n\n/// ```\n\npub trait ConsolidatableTokens<T>\n\nwhere\n\n T: TokenTrait,\n\n{\n\n /// Creates an iterator from a sequence of `ConsolidatableTokens`.\n\n fn iter_consolidate_tokens(&self) -> ConsolidatedTokenIterator<T>;\n\n}\n\n\n\nimpl ConsolidatableTokens<Token> for Vec<Token> {\n\n fn iter_consolidate_tokens(&self) -> ConsolidatedTokenIterator<Token> {\n\n ConsolidatedTokenIterator::new(self)\n\n }\n\n}\n\n\n\nimpl<'a> ConsolidatableTokens<TokenRef<'a>> for Vec<TokenRef<'a>> {\n\n fn iter_consolidate_tokens(&self) -> ConsolidatedTokenIterator<TokenRef<'a>> {\n\n ConsolidatedTokenIterator::new(self)\n\n }\n\n}\n\n\n", "file_path": "main/src/tokenizer/base_tokenizer.rs", "rank": 41, "score": 115361.66414203093 }, { "content": "/// # Base trait for tokenizers\n\npub trait Tokenizer<T: Vocab> {\n\n /// returns a reference to the tokenizer vocabulary\n\n fn vocab(&self) -> &T;\n\n\n\n /// Tokenize a string, returns a vector of tokens as strings.\n\n /// Use `tokenize_with_offsets` or `tokenize_to_tokens` to return offset information.\n\n ///\n\n /// # Parameters\n\n /// - text : text (string-like) to tokenize\n\n ///\n\n /// # Returns\n\n /// `Vec<String>` containing the tokens string representation\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// use rust_tokenizers::tokenizer::{BaseTokenizer, Tokenizer};\n\n /// use rust_tokenizers::vocab::BaseVocab;\n\n /// let strip_accents = false;\n\n /// let lower_case = false;\n", "file_path": "main/src/tokenizer/base_tokenizer.rs", "rank": 42, "score": 113021.08282468189 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyPegasusTokenizer {\n\n tokenizer: PegasusTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<PegasusTokenizer, PegasusVocab> for PyPegasusTokenizer {\n\n fn tokenizer(&self) -> &PegasusTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<PegasusTokenizer, PegasusVocab> for PyPegasusTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyPegasusTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PyPegasusTokenizer {\n\n tokenizer: PegasusTokenizer::from_file(path.as_str(), do_lower_case).unwrap(),\n\n }\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 43, "score": 110832.52065944235 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyCtrlTokenizer {\n\n tokenizer: CtrlTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<CtrlTokenizer, OpenAiGptVocab> for PyCtrlTokenizer {\n\n fn tokenizer(&self) -> &CtrlTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<CtrlTokenizer, OpenAiGptVocab> for PyCtrlTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyCtrlTokenizer {\n\n #[new]\n\n fn new(vocab_path: String, merges_path: String, do_lower_case: bool) -> Self {\n\n PyCtrlTokenizer {\n\n tokenizer: CtrlTokenizer::from_file(\n\n vocab_path.as_str(),\n\n merges_path.as_str(),\n", "file_path": "python-bindings/src/lib.rs", "rank": 44, "score": 110832.52065944235 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyAlbertTokenizer {\n\n tokenizer: AlbertTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<AlbertTokenizer, AlbertVocab> for PyAlbertTokenizer {\n\n fn tokenizer(&self) -> &AlbertTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<AlbertTokenizer, AlbertVocab> for PyAlbertTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyAlbertTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool, strip_accents: bool) -> Self {\n\n PyAlbertTokenizer {\n\n tokenizer: AlbertTokenizer::from_file(path.as_str(), do_lower_case, strip_accents)\n\n .unwrap(),\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 45, "score": 110832.52065944235 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyGpt2Tokenizer {\n\n tokenizer: Gpt2Tokenizer,\n\n}\n\n\n\nimpl PyTokenizer<Gpt2Tokenizer, Gpt2Vocab> for PyGpt2Tokenizer {\n\n fn tokenizer(&self) -> &Gpt2Tokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<Gpt2Tokenizer, Gpt2Vocab> for PyGpt2Tokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyGpt2Tokenizer {\n\n #[new]\n\n fn new(vocab_path: String, merges_path: String, do_lower_case: bool) -> Self {\n\n PyGpt2Tokenizer {\n\n tokenizer: Gpt2Tokenizer::from_file(\n\n vocab_path.as_str(),\n\n &merges_path.as_str(),\n", "file_path": "python-bindings/src/lib.rs", "rank": 46, "score": 110832.52065944235 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyReformerTokenizer {\n\n tokenizer: ReformerTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<ReformerTokenizer, ReformerVocab> for PyReformerTokenizer {\n\n fn tokenizer(&self) -> &ReformerTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<ReformerTokenizer, ReformerVocab> for PyReformerTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyReformerTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PyReformerTokenizer {\n\n tokenizer: ReformerTokenizer::from_file(path.as_str(), do_lower_case).unwrap(),\n\n }\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 47, "score": 110832.52065944235 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyT5Tokenizer {\n\n tokenizer: T5Tokenizer,\n\n}\n\n\n\nimpl PyTokenizer<T5Tokenizer, T5Vocab> for PyT5Tokenizer {\n\n fn tokenizer(&self) -> &T5Tokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<T5Tokenizer, T5Vocab> for PyT5Tokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyT5Tokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PyT5Tokenizer {\n\n tokenizer: T5Tokenizer::from_file(path.as_str(), do_lower_case).unwrap(),\n\n }\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 48, "score": 110832.52065944235 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyRobertaTokenizer {\n\n tokenizer: RobertaTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<RobertaTokenizer, RobertaVocab> for PyRobertaTokenizer {\n\n fn tokenizer(&self) -> &RobertaTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<RobertaTokenizer, RobertaVocab> for PyRobertaTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyRobertaTokenizer {\n\n #[new]\n\n fn new(\n\n vocab_path: String,\n\n merges_path: String,\n\n do_lower_case: bool,\n\n add_prefix_space: bool,\n", "file_path": "python-bindings/src/lib.rs", "rank": 49, "score": 110832.52065944235 }, { "content": "#[pyclass(dict, module = \"rust_tokenizers\")]\n\nstruct PyBertTokenizer {\n\n tokenizer: BertTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<BertTokenizer, BertVocab> for PyBertTokenizer {\n\n fn tokenizer(&self) -> &BertTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<BertTokenizer, BertVocab> for PyBertTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyBertTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool, strip_accents: bool) -> Self {\n\n PyBertTokenizer {\n\n tokenizer: BertTokenizer::from_file(path.as_str(), do_lower_case, strip_accents)\n\n .unwrap(),\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 50, "score": 110832.47249320592 }, { "content": "/// # Extension for multithreaded tokenizers\n\npub trait MultiThreadedTokenizer<T: Vocab>\n\nwhere\n\n Self: std::marker::Sync + Send + Tokenizer<T>,\n\n{\n\n /// returns a reference to the tokenizer vocabulary\n\n fn vocab(&self) -> &T {\n\n Tokenizer::<T>::vocab(self)\n\n }\n\n\n\n /// Tokenize a list of strings (with multithreading), where each corresponds to for example a sentence, returns a\n\n /// vector of TokensWithOffsets containing the tokens and their offset information. This calls\n\n /// `tokenize_with_offsets` on the list provided.\n\n ///\n\n /// # Parameters\n\n /// - text_list: list of strings to tokenize\n\n ///\n\n /// # Returns\n\n /// `Vec<TokensWithOffsets>` with the token strings representation and offsets\n\n ///\n\n /// # Example\n", "file_path": "main/src/tokenizer/base_tokenizer.rs", "rank": 51, "score": 109682.82641746673 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyXLNetTokenizer {\n\n tokenizer: XLNetTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<XLNetTokenizer, XLNetVocab> for PyXLNetTokenizer {\n\n fn tokenizer(&self) -> &XLNetTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<XLNetTokenizer, XLNetVocab> for PyXLNetTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyXLNetTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool, strip_accents: bool) -> Self {\n\n PyXLNetTokenizer {\n\n tokenizer: XLNetTokenizer::from_file(path.as_str(), do_lower_case, strip_accents)\n\n .unwrap(),\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 52, "score": 108618.29900349333 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyMBart50Tokenizer {\n\n tokenizer: MBart50Tokenizer,\n\n}\n\n\n\nimpl PyTokenizer<MBart50Tokenizer, MBart50Vocab> for PyMBart50Tokenizer {\n\n fn tokenizer(&self) -> &MBart50Tokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<MBart50Tokenizer, MBart50Vocab> for PyMBart50Tokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyMBart50Tokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PyMBart50Tokenizer {\n\n tokenizer: MBart50Tokenizer::from_file(path.as_str(), do_lower_case).unwrap(),\n\n }\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 53, "score": 108618.29900349333 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyXLMRobertaTokenizer {\n\n tokenizer: XLMRobertaTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<XLMRobertaTokenizer, XLMRobertaVocab> for PyXLMRobertaTokenizer {\n\n fn tokenizer(&self) -> &XLMRobertaTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<XLMRobertaTokenizer, XLMRobertaVocab> for PyXLMRobertaTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyXLMRobertaTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool) -> Self {\n\n PyXLMRobertaTokenizer {\n\n tokenizer: XLMRobertaTokenizer::from_file(path.as_str(), do_lower_case).unwrap(),\n\n }\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 54, "score": 108618.29900349333 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyM2M100Tokenizer {\n\n tokenizer: M2M100Tokenizer,\n\n}\n\n\n\nimpl PyTokenizer<M2M100Tokenizer, M2M100Vocab> for PyM2M100Tokenizer {\n\n fn tokenizer(&self) -> &M2M100Tokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<M2M100Tokenizer, M2M100Vocab> for PyM2M100Tokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyM2M100Tokenizer {\n\n #[new]\n\n fn new(vocab_path: String, merges_path: String, do_lower_case: bool) -> Self {\n\n PyM2M100Tokenizer {\n\n tokenizer: M2M100Tokenizer::from_files(\n\n vocab_path.as_str(),\n\n merges_path.as_str(),\n", "file_path": "python-bindings/src/lib.rs", "rank": 55, "score": 108618.29900349333 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyProphetNetTokenizer {\n\n tokenizer: ProphetNetTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<ProphetNetTokenizer, ProphetNetVocab> for PyProphetNetTokenizer {\n\n fn tokenizer(&self) -> &ProphetNetTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<ProphetNetTokenizer, ProphetNetVocab> for PyProphetNetTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyProphetNetTokenizer {\n\n #[new]\n\n fn new(path: String, do_lower_case: bool, strip_accents: bool) -> Self {\n\n PyProphetNetTokenizer {\n\n tokenizer: ProphetNetTokenizer::from_file(path.as_str(), do_lower_case, strip_accents)\n\n .unwrap(),\n\n }\n", "file_path": "python-bindings/src/lib.rs", "rank": 56, "score": 108618.29900349333 }, { "content": "#[pyclass(module = \"rust_tokenizers\")]\n\nstruct PyOpenAiGptTokenizer {\n\n tokenizer: OpenAiGptTokenizer,\n\n}\n\n\n\nimpl PyTokenizer<OpenAiGptTokenizer, OpenAiGptVocab> for PyOpenAiGptTokenizer {\n\n fn tokenizer(&self) -> &OpenAiGptTokenizer {\n\n &self.tokenizer\n\n }\n\n}\n\n\n\nimpl PyMultiThreadTokenizer<OpenAiGptTokenizer, OpenAiGptVocab> for PyOpenAiGptTokenizer {}\n\n\n\n#[pymethods]\n\nimpl PyOpenAiGptTokenizer {\n\n #[new]\n\n fn new(vocab_path: String, merges_path: String, do_lower_case: bool) -> Self {\n\n PyOpenAiGptTokenizer {\n\n tokenizer: OpenAiGptTokenizer::from_file(\n\n vocab_path.as_str(),\n\n merges_path.as_str(),\n", "file_path": "python-bindings/src/lib.rs", "rank": 57, "score": 106524.84414249138 }, { "content": "#[test]\n\nfn test_t5_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/t5-spiece.model\",\n\n \"t5-spiece.model\",\n\n )\n\n .unwrap();\n\n\n\n let t5_tokenizer = T5Tokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n\n token_ids: vec![100, 19, 3, 9, 3106, 7142, 12, 36, 12, 157, 154, 29, 1601, 1],\n", "file_path": "main/tests/test_t5_cased.rs", "rank": 58, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_mbart50_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://huggingface.co/facebook/mbart-large-50-many-to-many-mmt/resolve/main/sentencepiece.bpe.model\",\n\n \"mbart50_spiece.model\",\n\n )?;\n\n\n\n let mbart_tokenizer = MBart50Tokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \">>en<< This is a sample sentence to be tokénized\",\n\n \">>en<< Wondering how this will get tokenized 🤔 ?\",\n\n \">>fr<< İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \">>hi<< İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \">>lt<< � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n\n token_ids: vec![\n\n 250004, 3293, 83, 10, 121413, 149357, 47, 186, 25636, 2746, 29367, 2,\n", "file_path": "main/tests/test_mbart50_cased.rs", "rank": 59, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_albert_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/albert-base-v2-spiece.model\",\n\n \"albert-base-v2-spiece.model\",\n\n )?;\n\n\n\n let albert_tokenizer = AlbertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n\n token_ids: vec![2, 48, 25, 21, 5717, 5123, 20, 44, 20, 2853, 1333, 3],\n\n segment_ids: vec![],\n", "file_path": "main/tests/test_albert_uncased.rs", "rank": 60, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_ctrl_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://raw.githubusercontent.com/salesforce/ctrl/master/ctrl-vocab.json\",\n\n \"ctrl_vocab.json\",\n\n )?;\n\n\n\n let merges_path = download_file_to_cache(\n\n \"https://raw.githubusercontent.com/salesforce/ctrl/master/ctrl-merges.txt\",\n\n \"ctrl_merges.txt\",\n\n )?;\n\n\n\n let ctrl_tokenizer = CtrlTokenizer::from_file(\n\n vocab_path.to_str().unwrap(),\n\n merges_path.to_str().unwrap(),\n\n false,\n\n )?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n", "file_path": "main/tests/test_ctrl_cased.rs", "rank": 61, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_m2m100_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://huggingface.co/facebook/m2m100_418M/resolve/main/vocab.json\",\n\n \"m2m100_419M_vocab.json\",\n\n )\n\n .unwrap();\n\n\n\n let merges_path = download_file_to_cache(\n\n \"https://huggingface.co/facebook/m2m100_418M/resolve/main/sentencepiece.bpe.model\",\n\n \"m2m100_419M_spiece.model\",\n\n )\n\n .unwrap();\n\n\n\n let mbart_tokenizer = M2M100Tokenizer::from_files(\n\n vocab_path.to_str().unwrap(),\n\n merges_path.to_str().unwrap(),\n\n false,\n\n )?;\n\n\n\n let original_strings = [\n", "file_path": "main/tests/test_m2m100_cased.rs", "rank": 62, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_xlnet_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://cdn.huggingface.co/xlnet-base-cased-spiece.model\",\n\n \"xlnet-base-cased-spiece.model\",\n\n )?;\n\n\n\n let xlnet_tokenizer = XLNetTokenizer::from_file(vocab_path.to_str().unwrap(), false, true)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n\n token_ids: vec![122, 27, 24, 4561, 3833, 22, 39, 17366, 1227, 4, 3],\n\n segment_ids: vec![],\n", "file_path": "main/tests/test_xlnet_cased.rs", "rank": 63, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_pegasus_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://cdn.huggingface.co/google/pegasus-cnn_dailymail/spiece.model\",\n\n \"pegasus-cnn_dailymail-spiece.model\",\n\n )?;\n\n\n\n let pegasus_tokenizer = PegasusTokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n\n token_ids: vec![\n\n 182, 117, 114, 2630, 5577, 112, 129, 112, 1052, 32600, 3792, 1,\n", "file_path": "main/tests/test_pegasus_cased.rs", "rank": 64, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_bert_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt\",\n\n \"bert-base-uncased_vocab.txt\",\n\n )\n\n .unwrap();\n\n\n\n let vocab = BertVocab::from_file(vocab_path.to_str().unwrap())?;\n\n let bert_tokenizer: BertTokenizer = BertTokenizer::from_existing_vocab(vocab, true, true);\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \"İs th!s 𩸽 [SEP] Ϻ Šœ Uglj[SEP]šić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" �� İs th!s ���� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n", "file_path": "main/tests/test_bert_uncased.rs", "rank": 65, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_roberta_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-vocab.json\",\n\n \"roberta_vocab.json\",\n\n )\n\n .unwrap();\n\n\n\n let merges_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-merges.txt\",\n\n \"roberta_merges.txt\",\n\n )\n\n .unwrap();\n\n\n\n let roberta_tokenizer = RobertaTokenizer::from_file(\n\n vocab_path.to_str().unwrap(),\n\n merges_path.to_str().unwrap(),\n\n false,\n\n true,\n\n )?;\n\n\n", "file_path": "main/tests/test_roberta_cased.rs", "rank": 66, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_bert_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://cdn.huggingface.co/google/reformer-crime-and-punishment/spiece.model\",\n\n \"reformer.spiece\",\n\n )\n\n .unwrap();\n\n\n\n let bert_tokenizer: ReformerTokenizer =\n\n ReformerTokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \"İs th!s 𩸽 [SEP] Ϻ Šœ Uglj[SEP]šić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" �� İs th!s ���� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n", "file_path": "main/tests/test_reformer_cased.rs", "rank": 67, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_prophetnet_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://huggingface.co/microsoft/prophetnet-large-uncased/resolve/main/prophetnet.tokenizer\",\n\n \"prophetnet.tokenizer\",\n\n )?;\n\n\n\n let vocab = ProphetNetVocab::from_file(vocab_path.to_str().unwrap())?;\n\n let bert_tokenizer: ProphetNetTokenizer =\n\n ProphetNetTokenizer::from_existing_vocab(vocab, true, true);\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \"İs th!s 𩸽 [SEP] Ϻ Šœ Uglj[SEP]šić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" �� İs th!s ���� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n", "file_path": "main/tests/test_prophetnet_uncased.rs", "rank": 68, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_gpt2_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-vocab.json\",\n\n \"gpt2_vocab.json\",\n\n )\n\n .unwrap();\n\n\n\n let merges_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-merges.txt\",\n\n \"gpt2_merges.txt\",\n\n )\n\n .unwrap();\n\n\n\n let vocab = Gpt2Vocab::from_file(vocab_path.to_str().unwrap())?;\n\n let merges = BpePairVocab::from_file(merges_path.to_str().unwrap())?;\n\n\n\n let gpt2_tokenizer = Gpt2Tokenizer::from_existing_vocab_and_merges(vocab, merges, false);\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n", "file_path": "main/tests/test_gpt2_cased.rs", "rank": 69, "score": 97302.05003424789 }, { "content": "#[test]\n\nfn test_xlm_roberta_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\"https://cdn.huggingface.co/xlm-roberta-large-finetuned-conll03-english-sentencepiece.bpe.model\",\n\n \"xlm-roberta-spiece.model\")?;\n\n\n\n let xlm_roberta_tokenizer =\n\n XLMRobertaTokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n\n\n let original_strings = [\n\n \"This is a sample sentence to be tokénized\",\n\n \"Wondering how this will get tokenized 🤔 ?\",\n\n \"İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng\",\n\n \" İs th!s 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n \" � İs th!s �� 𩸽 Ϻ Šœ Ugljšić dấu nặng \",\n\n ];\n\n\n\n let expected_results = [\n\n TokenizedInput {\n\n token_ids: vec![\n\n 0, 3293, 83, 10, 121413, 149357, 47, 186, 25636, 2746, 29367, 2,\n\n ],\n", "file_path": "main/tests/test_xlm_roberta_uncased.rs", "rank": 70, "score": 93642.97156610762 }, { "content": "#[test]\n\nfn test_openai_gpt_tokenization() -> anyhow::Result<()> {\n\n let vocab_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-vocab.json\",\n\n \"openai-gpt_vocab.json\",\n\n )\n\n .unwrap();\n\n\n\n let merges_path = download_file_to_cache(\n\n \"https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-merges.txt\",\n\n \"openai-gpt_merges.txt\",\n\n )\n\n .unwrap();\n\n\n\n let openai_gpt_tokenizer = OpenAiGptTokenizer::from_file(\n\n vocab_path.to_str().unwrap(),\n\n merges_path.to_str().unwrap(),\n\n true,\n\n )?;\n\n\n\n let original_strings = [\n", "file_path": "main/tests/test_openai_gpt_uncased.rs", "rank": 71, "score": 93642.97156610762 }, { "content": " decompose_nfkc(&mut token);\n\n if self.lower_case {\n\n lowercase(&mut token);\n\n }\n\n token.text = token.text.replace(|c: char| is_whitespace(&c), \"\\u{2581}\");\n\n if !token.text.starts_with('\\u{2581}') {\n\n token.text.insert(0, '\\u{2581}');\n\n token.reference_offsets.insert(0, 0);\n\n };\n\n let output = self.model.decode_forward_token_ref(token.as_ref());\n\n let decoded = self.model.decode_backward(&output);\n\n self.model.parse_nodes_to_tokens(decoded)\n\n }\n\n\n\n fn convert_tokens_to_string(&self, tokens: Vec<String>) -> String {\n\n tokens\n\n .into_iter()\n\n .map(|v| v.replace('\\u{2581}', \" \"))\n\n .collect::<Vec<String>>()\n\n .join(\"\")\n\n }\n\n}\n\n\n\nimpl MultiThreadedTokenizer<SentencePieceVocab> for SentencePieceTokenizer {}\n", "file_path": "main/src/tokenizer/sentence_piece_tokenizer.rs", "rank": 72, "score": 91047.84991896762 }, { "content": " /// Create a new instance of a `SentencePieceTokenizer` from an existing vocabulary and model\n\n ///\n\n /// # Parameters\n\n /// - vocab (`SentencePieceVocab`): vocabulary\n\n /// - model (`SentencePieceModel`): SentencePiece model\n\n /// - lower_case (`bool`): flag indicating if the text should be lower-cased as part of the tokenization\n\n /// - strip_accents (`bool`): flag indicating if accents should be stripped from the text\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// use rust_tokenizers::tokenizer::{SentencePieceTokenizer, Tokenizer};\n\n /// use rust_tokenizers::vocab::{SentencePieceModel, SentencePieceVocab, Vocab};\n\n /// let lower_case = false;\n\n /// let vocab = SentencePieceVocab::from_file(\"path/to/vocab/file\").unwrap();\n\n /// let model = SentencePieceModel::from_file(\"path/to/model/file\").unwrap();\n\n ///\n\n /// let tokenizer = SentencePieceTokenizer::from_existing_vocab_and_model(vocab, model, lower_case);\n\n /// ```\n\n pub fn from_existing_vocab_and_model(\n", "file_path": "main/src/tokenizer/sentence_piece_tokenizer.rs", "rank": 73, "score": 91044.92680600195 }, { "content": " /// # Example\n\n ///\n\n /// ```no_run\n\n /// use rust_tokenizers::tokenizer::{SentencePieceTokenizer, Tokenizer};\n\n /// let lower_case = false;\n\n /// let tokenizer = SentencePieceTokenizer::from_file(\"path/to/vocab/file\", lower_case).unwrap();\n\n /// ```\n\n pub fn from_file(\n\n path: &str,\n\n lower_case: bool,\n\n ) -> Result<SentencePieceTokenizer, TokenizerError> {\n\n let model = SentencePieceModel::from_file(path)?;\n\n let vocab = SentencePieceVocab::from_file(path)?;\n\n Ok(SentencePieceTokenizer {\n\n model,\n\n vocab,\n\n lower_case,\n\n })\n\n }\n\n\n", "file_path": "main/src/tokenizer/sentence_piece_tokenizer.rs", "rank": 74, "score": 91044.52627281388 }, { "content": "/// SentencePiece tokenizer performing:\n\n/// - text cleaning\n\n/// - NFKC decomposition\n\n/// - (optional) lower casing\n\n/// - SentencePiece decomposition\n\npub struct SentencePieceTokenizer {\n\n model: SentencePieceModel,\n\n vocab: SentencePieceVocab,\n\n lower_case: bool,\n\n}\n\n\n\nimpl SentencePieceTokenizer {\n\n /// Create a new instance of a `SentencePieceTokenizer`\n\n /// Expects a SentencePiece protobuf file as an input.\n\n ///\n\n /// # Parameters\n\n /// - path (`&str`): path to the SentencePiece model file\n\n /// - lower_case (`bool`): flag indicating if the text should be lower-cased as part of the tokenization\n\n /// - strip_accents (`bool`): flag indicating if accents should be stripped from the text\n\n ///\n", "file_path": "main/src/tokenizer/sentence_piece_tokenizer.rs", "rank": 75, "score": 91041.30775314262 }, { "content": " vocab: SentencePieceVocab,\n\n model: SentencePieceModel,\n\n lower_case: bool,\n\n ) -> SentencePieceTokenizer {\n\n SentencePieceTokenizer {\n\n model,\n\n vocab,\n\n lower_case,\n\n }\n\n }\n\n}\n\n\n\nimpl Tokenizer<SentencePieceVocab> for SentencePieceTokenizer {\n\n fn vocab(&self) -> &SentencePieceVocab {\n\n &self.vocab\n\n }\n\n\n\n fn tokenize_to_tokens(&self, text: TokenRef) -> Vec<Token> {\n\n let mut token = text.to_owned();\n\n clean_text(&mut token, true);\n", "file_path": "main/src/tokenizer/sentence_piece_tokenizer.rs", "rank": 76, "score": 91037.31335227408 }, { "content": "// Copyright 2019 Google LLC. All Rights Reserved.\n\n// Copyright 2019-2020 Guillaume Becquin\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::error::TokenizerError;\n\nuse crate::tokenizer::base_tokenizer::{Token, TokenRef};\n\nuse crate::tokenizer::tokenization_utils::{clean_text, lowercase};\n\nuse crate::tokenizer::tokenization_utils::{decompose_nfkc, is_whitespace};\n\nuse crate::tokenizer::{MultiThreadedTokenizer, Tokenizer};\n\nuse crate::vocab::{SentencePieceModel, SentencePieceVocab, Vocab};\n\n\n\n/// # SentencePiece tokenizer\n", "file_path": "main/src/tokenizer/sentence_piece_tokenizer.rs", "rank": 77, "score": 91035.21353694826 }, { "content": " decompose_nfkc(&mut token);\n\n if self.lower_case {\n\n lowercase(&mut token);\n\n }\n\n token.text = token.text.replace(|c: char| is_whitespace(&c), \"\\u{2581}\");\n\n if !token.text.starts_with('\\u{2581}') {\n\n token.text.insert(0, '\\u{2581}');\n\n token.reference_offsets.insert(0, 0);\n\n };\n\n self.model.tokenize_to_tokens(token.as_ref())\n\n }\n\n\n\n fn convert_tokens_to_string(&self, tokens: Vec<String>) -> String {\n\n tokens\n\n .into_iter()\n\n .map(|v| v.replace('\\u{2581}', \" \"))\n\n .collect::<Vec<String>>()\n\n .join(\"\")\n\n }\n\n}\n\n\n\nimpl MultiThreadedTokenizer<SentencePieceVocab> for SentencePieceBpeTokenizer {}\n", "file_path": "main/src/tokenizer/sentence_piece_bpe_tokenizer.rs", "rank": 78, "score": 88927.40719548814 }, { "content": " /// Create a new instance of a `SentencePieceBpeTokenizer` from an existing vocabulary and model\n\n ///\n\n /// # Parameters\n\n /// - vocab (`SentencePieceVocab`): vocabulary\n\n /// - model (`SentencePieceModel`): SentencePiece model\n\n /// - lower_case (`bool`): flag indicating if the text should be lower-cased as part of the tokenization\n\n /// - strip_accents (`bool`): flag indicating if accents should be stripped from the text\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// use rust_tokenizers::tokenizer::{Tokenizer, SentencePieceBpeTokenizer};\n\n /// use rust_tokenizers::vocab::{SentencePieceVocab, Vocab, SentencePieceBpeModel};\n\n /// let lower_case = false;\n\n /// let vocab = SentencePieceVocab::from_file(\"path/to/vocab/file\").unwrap();\n\n /// let model = SentencePieceBpeModel::from_file(\"path/to/model/file\").unwrap();\n\n ///\n\n /// let tokenizer = SentencePieceBpeTokenizer::from_existing_vocab_and_model(vocab, model, lower_case);\n\n /// ```\n\n pub fn from_existing_vocab_and_model(\n", "file_path": "main/src/tokenizer/sentence_piece_bpe_tokenizer.rs", "rank": 79, "score": 88926.53683091605 }, { "content": " /// # Example\n\n ///\n\n /// ```no_run\n\n /// use rust_tokenizers::tokenizer::{SentencePieceTokenizer, Tokenizer};\n\n /// let lower_case = false;\n\n /// let tokenizer = SentencePieceTokenizer::from_file(\"path/to/vocab/file\", lower_case).unwrap();\n\n /// ```\n\n pub fn from_file(\n\n path: &str,\n\n lower_case: bool,\n\n ) -> Result<SentencePieceBpeTokenizer, TokenizerError> {\n\n let model = SentencePieceBpeModel::from_file(path)?;\n\n let vocab = SentencePieceVocab::from_file(path)?;\n\n Ok(SentencePieceBpeTokenizer {\n\n model,\n\n vocab,\n\n lower_case,\n\n })\n\n }\n\n\n", "file_path": "main/src/tokenizer/sentence_piece_bpe_tokenizer.rs", "rank": 80, "score": 88926.28904811355 }, { "content": "/// SentencePiece BPE tokenizer performing:\n\n/// - text cleaning\n\n/// - NFKC decomposition\n\n/// - (optional) lower casing\n\n/// - SentencePiece decomposition\n\npub struct SentencePieceBpeTokenizer {\n\n model: SentencePieceBpeModel,\n\n vocab: SentencePieceVocab,\n\n lower_case: bool,\n\n}\n\n\n\nimpl SentencePieceBpeTokenizer {\n\n /// Create a new instance of a `SentencePieceBpeTokenizer`\n\n /// Expects a SentencePiece protobuf file as an input.\n\n ///\n\n /// # Parameters\n\n /// - path (`&str`): path to the SentencePiece model file\n\n /// - lower_case (`bool`): flag indicating if the text should be lower-cased as part of the tokenization\n\n /// - strip_accents (`bool`): flag indicating if accents should be stripped from the text\n\n ///\n", "file_path": "main/src/tokenizer/sentence_piece_bpe_tokenizer.rs", "rank": 81, "score": 88922.88087684789 }, { "content": " vocab: SentencePieceVocab,\n\n model: SentencePieceBpeModel,\n\n lower_case: bool,\n\n ) -> SentencePieceBpeTokenizer {\n\n SentencePieceBpeTokenizer {\n\n model,\n\n vocab,\n\n lower_case,\n\n }\n\n }\n\n}\n\n\n\nimpl Tokenizer<SentencePieceVocab> for SentencePieceBpeTokenizer {\n\n fn vocab(&self) -> &SentencePieceVocab {\n\n &self.vocab\n\n }\n\n\n\n fn tokenize_to_tokens(&self, text: TokenRef) -> Vec<Token> {\n\n let mut token = text.to_owned();\n\n clean_text(&mut token, true);\n", "file_path": "main/src/tokenizer/sentence_piece_bpe_tokenizer.rs", "rank": 82, "score": 88919.09452825703 }, { "content": "// Copyright 2016 Google Inc.\n\n// Adapted from https://github.com/google/sentencepiece/blob/master/src/bpe_model.cc\n\n// Copyright 2019-2021 Guillaume Becquin\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::error::TokenizerError;\n\nuse crate::tokenizer::tokenization_utils::{clean_text, decompose_nfkc, is_whitespace, lowercase};\n\nuse crate::tokenizer::{MultiThreadedTokenizer, Tokenizer};\n\nuse crate::vocab::{SentencePieceBpeModel, SentencePieceVocab, Vocab};\n\nuse crate::{Token, TokenRef};\n\n\n\n/// # SentencePiece tokenizer\n", "file_path": "main/src/tokenizer/sentence_piece_bpe_tokenizer.rs", "rank": 83, "score": 88914.18273292921 }, { "content": "#[pymodule]\n\nfn rust_tokenizers(_py: Python<'_>, m: &PyModule) -> PyResult<()> {\n\n m.add_class::<PyBertTokenizer>()?;\n\n m.add_class::<PyCtrlTokenizer>()?;\n\n m.add_class::<PyGpt2Tokenizer>()?;\n\n m.add_class::<PyRobertaTokenizer>()?;\n\n m.add_class::<PyOpenAiGptTokenizer>()?;\n\n m.add_class::<PySentencePieceTokenizer>()?;\n\n m.add_class::<PySentencePieceBpeTokenizer>()?;\n\n m.add_class::<PyAlbertTokenizer>()?;\n\n m.add_class::<PyT5Tokenizer>()?;\n\n m.add_class::<PyXLMRobertaTokenizer>()?;\n\n m.add_class::<PyXLNetTokenizer>()?;\n\n m.add_class::<PyReformerTokenizer>()?;\n\n m.add_class::<PyProphetNetTokenizer>()?;\n\n m.add_class::<PyPegasusTokenizer>()?;\n\n m.add_class::<PyMBart50Tokenizer>()?;\n\n m.add_class::<PyM2M100Tokenizer>()?;\n\n Ok(())\n\n}\n", "file_path": "python-bindings/src/lib.rs", "rank": 84, "score": 86443.37772260292 }, { "content": " def test_tokenization_sentence_piece(self):\n\n # Given\n\n self.base_tokenizer = sentencepiece.SentencePieceProcessor()\n\n self.base_tokenizer.Load(str(self.test_dir / 'spiece.model'))\n\n self.rust_tokenizer = PySentencePieceTokenizer(str(self.test_dir / 'spiece.model'), do_lower_case=False)\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.base_tokenizer.EncodeAsIds(example.text_a))\n\n\n\n # When\n\n # Note: the original sentence piece tokenizer strips trailing spaces and deletes consecutive spaces\n\n output_rust = self.rust_tokenizer.encode_list(\n\n [re.sub(' +', ' ', example.text_a.strip()) for example in self.examples],\n\n max_len=256,\n\n truncation_strategy='longest_first',\n\n stride=0)\n\n\n\n # Then\n\n for idx, (rust, baseline) in enumerate(zip(output_rust, output_baseline)):\n\n if rust.token_ids != baseline:\n\n assert sum(self.base_tokenizer.get_score(baseline)) == \\\n\n sum(self.base_tokenizer.get_score(rust.token_ids)), \\\n\n f'Difference in tokenization for {self.rust_tokenizer.__class__}: \\n ' \\\n\n f'Sentence a: {self.examples[idx].text_a} \\n' \\\n\n f'Sentence b: {self.examples[idx].text_b} \\n' \\\n\n f'Token mismatch: {self.get_token_diff_sentence_piece(rust.token_ids, baseline)} \\n' \\\n\n f'Rust: {rust.token_ids} \\n' \\\n", "file_path": "python-bindings/tests/test_tokenization_qnli.py", "rank": 85, "score": 82224.19143328209 }, { "content": " def test_tokenization_sentence_piece(self):\n\n # Given\n\n self.base_tokenizer = sentencepiece.SentencePieceProcessor()\n\n self.base_tokenizer.Load(str(self.test_dir / 'spiece.bpe.model'))\n\n self.rust_tokenizer = PySentencePieceBpeTokenizer(str(self.test_dir / 'spiece.bpe.model'), do_lower_case=False)\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.base_tokenizer.EncodeAsIds(example.text_a))\n\n\n\n # When\n\n # Note: the original sentence piece tokenizer strips trailing spaces\n\n output_rust = self.rust_tokenizer.encode_list([example.text_a.strip() for example in self.examples],\n\n max_len=256,\n\n truncation_strategy='longest_first',\n\n stride=0)\n\n\n\n # Then\n\n for idx, (rust, baseline) in enumerate(zip(output_rust, output_baseline)):\n\n if rust.token_ids != baseline:\n\n assert sum(self.base_tokenizer.get_score(baseline)) == \\\n\n sum(self.base_tokenizer.get_score(rust.token_ids)), \\\n\n f'Difference in tokenization for {self.rust_tokenizer.__class__}: \\n ' \\\n\n f'Sentence a: {self.examples[idx].text_a} \\n' \\\n\n f'Sentence b: {self.examples[idx].text_b} \\n' \\\n\n f'Token mismatch: {self.get_token_diff_sentence_piece(rust.token_ids, baseline)} \\n' \\\n\n f'Rust: {rust.token_ids} \\n' \\\n", "file_path": "python-bindings/tests/test_tokenization_sst2.py", "rank": 86, "score": 82224.19143328209 }, { "content": " def python_sentence_piece_tokenizer(self):\n\n output_baseline = []\n\n for example in self.examples:\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 87, "score": 80836.15505434103 }, { "content": " def get_token_diff_sentence_piece(self, rust_tokens, python_tokens):\n\n last_index = 1\n\n first_index = 0\n\n max_index = min(len(rust_tokens), len(python_tokens))\n\n while rust_tokens[first_index] == python_tokens[first_index] and first_index < max_index - 1:\n\n first_index += 1\n\n first_index -= 1\n\n while rust_tokens[-last_index] == python_tokens[-last_index] and last_index < max_index - 1:\n\n last_index += 1\n\n last_index += 1\n\n python_last_index = len(python_tokens) + last_index\n\n rust_last_index = len(rust_tokens) + last_index\n\n rust_tokens_diff = rust_tokens[first_index:rust_last_index]\n\n python_token_diff = python_tokens[first_index:python_last_index]\n\n rust_decoded_tokens = self.base_tokenizer.DecodeIds(rust_tokens_diff)\n\n python_decoded_tokens = self.base_tokenizer.DecodeIds(python_token_diff)\n", "file_path": "python-bindings/tests/test_tokenization_sst2.py", "rank": 88, "score": 80836.15505434103 }, { "content": " def test_tokenization_sentence_piece_bpe(self):\n\n # Given\n\n self.base_tokenizer = sentencepiece.SentencePieceProcessor()\n\n self.base_tokenizer.Load(str(self.test_dir / 'spiece.model'))\n\n self.rust_tokenizer = PySentencePieceTokenizer(str(self.test_dir / 'spiece.model'), do_lower_case=False)\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.base_tokenizer.EncodeAsIds(example.text_a))\n\n\n\n # When\n\n # Note: the original sentence piece tokenizer strips trailing spaces\n\n output_rust = self.rust_tokenizer.encode_list([example.text_a.strip() for example in self.examples],\n\n max_len=256,\n\n truncation_strategy='longest_first',\n\n stride=0)\n\n\n\n # Then\n\n for idx, (rust, baseline) in enumerate(zip(output_rust, output_baseline)):\n\n if rust.token_ids != baseline:\n\n assert sum(self.base_tokenizer.get_score(baseline)) == \\\n\n sum(self.base_tokenizer.get_score(rust.token_ids)), \\\n\n f'Difference in tokenization for {self.rust_tokenizer.__class__}: \\n ' \\\n\n f'Sentence a: {self.examples[idx].text_a} \\n' \\\n\n f'Sentence b: {self.examples[idx].text_b} \\n' \\\n\n f'Token mismatch: {self.get_token_diff_sentence_piece(rust.token_ids, baseline)} \\n' \\\n\n f'Rust: {rust.token_ids} \\n' \\\n", "file_path": "python-bindings/tests/test_tokenization_sst2.py", "rank": 89, "score": 80836.15505434103 }, { "content": " def get_token_diff_sentence_piece(self, rust_tokens, python_tokens):\n\n last_index = 1\n\n first_index = 0\n\n max_index = min(len(rust_tokens), len(python_tokens))\n\n while rust_tokens[first_index] == python_tokens[first_index] and first_index < max_index - 1:\n\n first_index += 1\n\n first_index -= 1\n\n while rust_tokens[-last_index] == python_tokens[-last_index] and last_index < max_index - 1:\n\n last_index += 1\n\n last_index += 1\n\n python_last_index = len(python_tokens) + last_index\n\n rust_last_index = len(rust_tokens) + last_index\n\n rust_tokens_diff = rust_tokens[first_index:rust_last_index]\n\n python_token_diff = python_tokens[first_index:python_last_index]\n\n rust_decoded_tokens = self.base_tokenizer.DecodeIds(rust_tokens_diff)\n\n python_decoded_tokens = self.base_tokenizer.DecodeIds(python_token_diff)\n", "file_path": "python-bindings/tests/test_tokenization_qnli.py", "rank": 90, "score": 80836.15505434103 }, { "content": " def rust_sentence_piece_tokenizer_single_threaded(self):\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.rust_tokenizer.encode(example.text_a,\n\n max_len=128,\n\n truncation_strategy='longest_first',\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 91, "score": 78209.81963625074 }, { "content": " def rust_sentence_piece_tokenizer_multi_threaded(self):\n\n self.rust_tokenizer.encode_list([example.text_a for example in self.examples],\n\n max_len=128,\n\n truncation_strategy='longest_first',\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 92, "score": 78209.81963625074 }, { "content": "fn main() {\n\n #[cfg(feature = \"proto-compile\")]\n\n {\n\n let out_path = \"src/vocab/sentencepiece_proto\";\n\n let out_file_name = \"src/vocab/sentencepiece_proto/sentencepiece_model.proto\";\n\n let proto_path = \"protos/sentencepiece_model.proto\";\n\n\n\n let metadata = fs::metadata(out_file_name);\n\n\n\n if metadata.is_err() {\n\n protobuf_codegen_pure::Codegen::new()\n\n .out_dir(out_path)\n\n .inputs(&[proto_path])\n\n .include(\"protos\")\n\n .run()\n\n .expect(\"Codegen failed.\");\n\n }\n\n }\n\n}\n", "file_path": "main/build.rs", "rank": 93, "score": 78053.43592508997 }, { "content": " def test_rust_sentence_piece_tokenizer_single_threaded(self, benchmark):\n\n benchmark.pedantic(self.rust_sentence_piece_tokenizer_single_threaded, setup=self.setup_rust_tokenizer,\n\n iterations=1,\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 94, "score": 76965.80626544634 }, { "content": " def test_python_sentence_piece_tokenizer_single_threaded(self, benchmark):\n\n benchmark.pedantic(self.python_sentence_piece_tokenizer, setup=self.setup_python_tokenizer, iterations=1,\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 95, "score": 76965.80626544634 }, { "content": " def test_rust_sentence_piece_tokenizer_multi_threaded(self, benchmark):\n\n benchmark.pedantic(self.rust_sentence_piece_tokenizer_multi_threaded, setup=self.setup_rust_tokenizer,\n\n iterations=1,\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 96, "score": 76965.80626544634 }, { "content": "class TestBenchmarkSentencePiece:\n\n def setup_class(self):\n\n self.processor = Sst2Processor()\n\n self.test_dir = Path(tempfile.mkdtemp())\n\n sst2_url = 'https://firebasestorage.googleapis.com/v0/b/mtl-sentence-representations.appspot.com/o/data%2FSST-2.zip?alt=media&token=aabc5f6b-e466-44a2-b9b4-cf6337f84ac8'\n\n contents = requests.get(sst2_url)\n\n (self.test_dir / 'SST-2.zip').open('wb').write(contents.content)\n\n with ZipFile(self.test_dir / 'SST-2.zip', 'r') as zipObj:\n\n zipObj.extractall(self.test_dir)\n\n self.examples = self.processor.get_train_examples(self.test_dir / 'SST-2')\n\n sentence_piece_url = 'https://s3.amazonaws.com/models.huggingface.co/bert/xlnet-base-cased-spiece.model'\n\n contents = requests.get(sentence_piece_url)\n\n (self.test_dir / 'spiece.model').open('wb').write(contents.content)\n\n self.base_tokenizer = sentencepiece.SentencePieceProcessor()\n\n self.base_tokenizer.Load(str(self.test_dir / 'spiece.model'))\n\n self.rust_tokenizer = PySentencePieceTokenizer(str(self.test_dir / 'spiece.model'), do_lower_case=False)\n\n\n\n def setup_python_tokenizer(self):\n\n self.base_tokenizer = sentencepiece.SentencePieceProcessor()\n\n self.base_tokenizer.Load(str(self.test_dir / 'spiece.model'))\n\n\n\n def setup_rust_tokenizer(self):\n\n self.rust_tokenizer = PySentencePieceTokenizer(str(self.test_dir / 'spiece.model'), do_lower_case=False)\n\n\n\n def python_sentence_piece_tokenizer(self):\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.base_tokenizer.EncodeAsIds(example.text_a))\n\n\n\n def rust_sentence_piece_tokenizer_single_threaded(self):\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.rust_tokenizer.encode(example.text_a,\n\n max_len=128,\n\n truncation_strategy='longest_first',\n\n stride=0))\n\n\n\n def rust_sentence_piece_tokenizer_multi_threaded(self):\n\n self.rust_tokenizer.encode_list([example.text_a for example in self.examples],\n\n max_len=128,\n\n truncation_strategy='longest_first',\n\n stride=0)\n\n\n\n def rust_sentence_piece_encoding_single_threaded(self):\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.rust_tokenizer.encode(example.text_a,\n\n max_len=128,\n\n truncation_strategy='longest_first',\n\n stride=0))\n\n\n\n def rust_sentence_piece_encoding_multi_threaded(self):\n\n self.rust_tokenizer.encode_list([example.text_a for example in self.examples],\n\n max_len=128,\n\n truncation_strategy='longest_first',\n\n stride=0)\n\n\n\n def test_python_sentence_piece_tokenizer_single_threaded(self, benchmark):\n\n benchmark.pedantic(self.python_sentence_piece_tokenizer, setup=self.setup_python_tokenizer, iterations=1,\n\n rounds=3)\n\n\n\n def test_rust_sentence_piece_tokenizer_single_threaded(self, benchmark):\n\n benchmark.pedantic(self.rust_sentence_piece_tokenizer_single_threaded, setup=self.setup_rust_tokenizer,\n\n iterations=1,\n\n rounds=3)\n\n\n\n def test_rust_sentence_piece_tokenizer_multi_threaded(self, benchmark):\n\n benchmark.pedantic(self.rust_sentence_piece_tokenizer_multi_threaded, setup=self.setup_rust_tokenizer,\n\n iterations=1,\n\n rounds=3)\n\n\n\n def test_rust_sentence_piece_encoding_single_threaded(self, benchmark):\n\n benchmark.pedantic(self.rust_sentence_piece_encoding_single_threaded, setup=self.setup_rust_tokenizer,\n\n iterations=1,\n\n rounds=3)\n\n\n\n def test_rust_sentence_piece_encoding_multi_threaded(self, benchmark):\n\n benchmark.pedantic(self.rust_sentence_piece_encoding_multi_threaded, setup=self.setup_rust_tokenizer,\n\n iterations=1,\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 97, "score": 75049.8739777959 }, { "content": " def rust_sentence_piece_encoding_single_threaded(self):\n\n output_baseline = []\n\n for example in self.examples:\n\n output_baseline.append(self.rust_tokenizer.encode(example.text_a,\n\n max_len=128,\n\n truncation_strategy='longest_first',\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 98, "score": 72258.89991083514 }, { "content": " def rust_sentence_piece_encoding_multi_threaded(self):\n\n self.rust_tokenizer.encode_list([example.text_a for example in self.examples],\n\n max_len=128,\n\n truncation_strategy='longest_first',\n", "file_path": "python-bindings/tests/test_benchmark_tokenization_sentencepiece.py", "rank": 99, "score": 72258.89991083514 } ]
Rust
storage/libradb/src/metrics.rs
chouette254/libra
1eaefa60d29e1df72ba6c4f9cf1867964821b586
use libra_metrics::{ register_histogram_vec, register_int_counter, register_int_gauge, register_int_gauge_vec, HistogramVec, IntCounter, IntGauge, IntGaugeVec, }; use once_cell::sync::Lazy; pub static LIBRA_STORAGE_LEDGER: Lazy<IntGaugeVec> = Lazy::new(|| { register_int_gauge_vec!( "libra_storage_ledger", "Libra storage ledger counters", &["type"] ) .unwrap() }); pub static LIBRA_STORAGE_CF_SIZE_BYTES: Lazy<IntGaugeVec> = Lazy::new(|| { register_int_gauge_vec!( "libra_storage_cf_size_bytes", "Libra storage Column Family size in bytes", &["cf_name"] ) .unwrap() }); pub static LIBRA_STORAGE_COMMITTED_TXNS: Lazy<IntCounter> = Lazy::new(|| { register_int_counter!( "libra_storage_committed_txns", "Libra storage committed transactions" ) .unwrap() }); pub static LIBRA_STORAGE_LATEST_TXN_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_latest_transaction_version", "Libra storage latest transaction version" ) .unwrap() }); pub static LIBRA_STORAGE_LEDGER_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_ledger_version", "Version in the latest saved ledger info." ) .unwrap() }); pub static LIBRA_STORAGE_NEXT_BLOCK_EPOCH: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_next_block_epoch", "ledger_info.next_block_epoch() for the latest saved ledger info." ) .unwrap() }); pub static LIBRA_STORAGE_PRUNE_WINDOW: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!("libra_storage_prune_window", "Libra storage prune window").unwrap() }); pub static LIBRA_STORAGE_PRUNER_LEAST_READABLE_STATE_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_pruner_least_readable_state_version", "Libra storage pruner least readable state version" ) .unwrap() }); pub static LIBRA_STORAGE_API_LATENCY_SECONDS: Lazy<HistogramVec> = Lazy::new(|| { register_histogram_vec!( "libra_storage_api_latency_seconds", "Libra storage api latency in seconds", &["api_name"] ) .unwrap() }); pub static LIBRA_STORAGE_OTHER_TIMERS_SECONDS: Lazy<HistogramVec> = Lazy::new(|| { register_histogram_vec!( "libra_storage_other_timers_seconds", "Various timers below public API level.", &["name"] ) .unwrap() }); pub(crate) static BACKUP_EPOCH_ENDING_EPOCH: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_epoch_ending_epoch", "Current epoch returned in an epoch ending backup." ) .unwrap() }); pub(crate) static BACKUP_TXN_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_transaction_version", "Current version returned in a transaction backup." ) .unwrap() }); pub(crate) static BACKUP_STATE_SNAPSHOT_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_state_snapshot_version", "Version of requested state snapshot backup." ) .unwrap() }); pub(crate) static BACKUP_STATE_SNAPSHOT_LEAF_IDX: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_state_snapshot_leaf_index", "Index of current leaf index returned in a state snapshot backup." ) .unwrap() });
use libra_metrics::{ register_histogram_vec, register_int_counter, register_int_gauge,
in bytes", &["cf_name"] ) .unwrap() }); pub static LIBRA_STORAGE_COMMITTED_TXNS: Lazy<IntCounter> = Lazy::new(|| { register_int_counter!( "libra_storage_committed_txns", "Libra storage committed transactions" ) .unwrap() }); pub static LIBRA_STORAGE_LATEST_TXN_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_latest_transaction_version", "Libra storage latest transaction version" ) .unwrap() }); pub static LIBRA_STORAGE_LEDGER_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_ledger_version", "Version in the latest saved ledger info." ) .unwrap() }); pub static LIBRA_STORAGE_NEXT_BLOCK_EPOCH: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_next_block_epoch", "ledger_info.next_block_epoch() for the latest saved ledger info." ) .unwrap() }); pub static LIBRA_STORAGE_PRUNE_WINDOW: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!("libra_storage_prune_window", "Libra storage prune window").unwrap() }); pub static LIBRA_STORAGE_PRUNER_LEAST_READABLE_STATE_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_storage_pruner_least_readable_state_version", "Libra storage pruner least readable state version" ) .unwrap() }); pub static LIBRA_STORAGE_API_LATENCY_SECONDS: Lazy<HistogramVec> = Lazy::new(|| { register_histogram_vec!( "libra_storage_api_latency_seconds", "Libra storage api latency in seconds", &["api_name"] ) .unwrap() }); pub static LIBRA_STORAGE_OTHER_TIMERS_SECONDS: Lazy<HistogramVec> = Lazy::new(|| { register_histogram_vec!( "libra_storage_other_timers_seconds", "Various timers below public API level.", &["name"] ) .unwrap() }); pub(crate) static BACKUP_EPOCH_ENDING_EPOCH: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_epoch_ending_epoch", "Current epoch returned in an epoch ending backup." ) .unwrap() }); pub(crate) static BACKUP_TXN_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_transaction_version", "Current version returned in a transaction backup." ) .unwrap() }); pub(crate) static BACKUP_STATE_SNAPSHOT_VERSION: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_state_snapshot_version", "Version of requested state snapshot backup." ) .unwrap() }); pub(crate) static BACKUP_STATE_SNAPSHOT_LEAF_IDX: Lazy<IntGauge> = Lazy::new(|| { register_int_gauge!( "libra_backup_handler_state_snapshot_leaf_index", "Index of current leaf index returned in a state snapshot backup." ) .unwrap() });
register_int_gauge_vec, HistogramVec, IntCounter, IntGauge, IntGaugeVec, }; use once_cell::sync::Lazy; pub static LIBRA_STORAGE_LEDGER: Lazy<IntGaugeVec> = Lazy::new(|| { register_int_gauge_vec!( "libra_storage_ledger", "Libra storage ledger counters", &["type"] ) .unwrap() }); pub static LIBRA_STORAGE_CF_SIZE_BYTES: Lazy<IntGaugeVec> = Lazy::new(|| { register_int_gauge_vec!( "libra_storage_cf_size_bytes", "Libra storage Column Family size
random
[ { "content": "// Parse a use declaration:\n\n// UseDecl =\n\n// \"use\" <ModuleIdent> <UseAlias> \";\" |\n\n// \"use\" <ModuleIdent> :: <UseMember> \";\" |\n\n// \"use\" <ModuleIdent> :: \"{\" Comma<UseMember> \"}\" \";\"\n\nfn parse_use_decl<'input>(tokens: &mut Lexer<'input>) -> Result<Use, Error> {\n\n consume_token(tokens, Tok::Use)?;\n\n let ident = parse_module_ident(tokens)?;\n\n let alias_opt = parse_use_alias(tokens)?;\n\n let use_ = match (&alias_opt, tokens.peek()) {\n\n (None, Tok::ColonColon) => {\n\n consume_token(tokens, Tok::ColonColon)?;\n\n let sub_uses = match tokens.peek() {\n\n Tok::LBrace => parse_comma_list(\n\n tokens,\n\n Tok::LBrace,\n\n Tok::RBrace,\n\n parse_use_member,\n\n \"a module member alias\",\n\n )?,\n\n _ => vec![parse_use_member(tokens)?],\n\n };\n\n Use::Members(ident, sub_uses)\n\n }\n\n _ => Use::Module(ident, alias_opt.map(ModuleName)),\n\n };\n\n consume_token(tokens, Tok::Semicolon)?;\n\n Ok(use_)\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 0, "score": 103055.85809772374 }, { "content": "fn use_(context: &mut Context, acc: &mut AliasMap, u: P::Use) {\n\n let unbound_module = |mident: &ModuleIdent| -> Error {\n\n vec![(\n\n mident.loc(),\n\n format!(\"Invalid 'use'. Unbound module: '{}'\", mident),\n\n )]\n\n };\n\n macro_rules! add_module_alias {\n\n ($ident:expr, $alias_opt:expr) => {{\n\n let alias: Name = $alias_opt.unwrap_or_else(|| $ident.0.value.name.0.clone());\n\n if let Err(()) = check_restricted_self_name(context, \"module alias\", &alias) {\n\n return;\n\n }\n\n\n\n if let Err(old_loc) = acc.add_module_alias(alias.clone(), $ident) {\n\n duplicate_module_alias(context, old_loc, alias)\n\n }\n\n }};\n\n };\n\n match u {\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 1, "score": 102324.34338305294 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{\n\n errors::*,\n\n naming::ast as N,\n\n parser::ast::{ModuleIdent, StructName},\n\n shared::{unique_map::UniqueMap, *},\n\n};\n\nuse move_ir_types::location::*;\n\nuse petgraph::{algo::toposort as petgraph_toposort, graphmap::DiGraphMap};\n\nuse std::collections::BTreeMap;\n\n\n\n//**************************************************************************************************\n\n// Entry\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 2, "score": 61832.156576666144 }, { "content": " let m = self\n\n .neighbors\n\n .entry(self.current_module.clone().unwrap())\n\n .or_insert_with(BTreeMap::new);\n\n if m.contains_key(uses) {\n\n return;\n\n }\n\n\n\n m.insert(uses.clone(), loc);\n\n }\n\n\n\n fn dependency_graph(&self) -> DiGraphMap<&ModuleIdent, ()> {\n\n let edges = self\n\n .neighbors\n\n .iter()\n\n .flat_map(|(parent, children)| children.iter().map(move |(child, _)| (parent, child)));\n\n DiGraphMap::from_edges(edges)\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 3, "score": 61831.12282910881 }, { "content": " exp(context, e)\n\n }\n\n E::FieldMutate(edotted, e) => {\n\n exp_dotted(context, edotted);\n\n exp(context, e);\n\n }\n\n\n\n E::Loop(e) | E::Return(e) | E::Abort(e) | E::Dereference(e) | E::UnaryExp(_, e) => {\n\n exp(context, e)\n\n }\n\n\n\n E::Pack(m, _, bs_opt, fes) => {\n\n context.add_usage(m, *loc);\n\n types_opt(context, bs_opt);\n\n fes.iter().for_each(|(_, (_, e))| exp(context, e))\n\n }\n\n\n\n E::ExpList(es) => es.iter().for_each(|e| exp(context, e)),\n\n\n\n E::DerefBorrow(edotted) | E::Borrow(_, edotted) => exp_dotted(context, edotted),\n\n\n\n E::Cast(e, ty) | E::Annotate(e, ty) => {\n\n exp(context, e);\n\n type_(context, ty)\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 4, "score": 61824.35952819172 }, { "content": " }\n\n\n\n E::Builtin(bf, sp!(_, es_)) => {\n\n builtin_function(context, bf);\n\n es_.iter().for_each(|e| exp(context, e))\n\n }\n\n\n\n E::IfElse(ec, et, ef) => {\n\n exp(context, ec);\n\n exp(context, et);\n\n exp(context, ef)\n\n }\n\n\n\n E::BinopExp(e1, _, e2) | E::Mutate(e1, e2) | E::While(e1, e2) => {\n\n exp(context, e1);\n\n exp(context, e2)\n\n }\n\n E::Block(seq) => sequence(context, seq),\n\n E::Assign(al, e) => {\n\n lvalues(context, &al.value);\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 5, "score": 61824.35952819172 }, { "content": "struct Context<'a> {\n\n modules: &'a UniqueMap<ModuleIdent, N::ModuleDefinition>,\n\n neighbors: BTreeMap<ModuleIdent, BTreeMap<ModuleIdent, Loc>>,\n\n current_module: Option<ModuleIdent>,\n\n}\n\n\n\nimpl<'a> Context<'a> {\n\n fn new(modules: &'a UniqueMap<ModuleIdent, N::ModuleDefinition>) -> Self {\n\n Context {\n\n modules,\n\n neighbors: BTreeMap::new(),\n\n current_module: None,\n\n }\n\n }\n\n\n\n fn add_usage(&mut self, uses: &ModuleIdent, loc: Loc) {\n\n if self.current_module.as_ref().unwrap() == uses || !self.modules.contains_key(uses) {\n\n return;\n\n }\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 6, "score": 60532.6479892482 }, { "content": "/// A predicate detailing whether the compiled versions of scripts and the stdlib should be used or\n\n/// not. The default is that the compiled versions of the stdlib and transaction scripts should be\n\n/// used.\n\npub fn use_compiled() -> bool {\n\n std::env::var(NO_USE_COMPILED).is_err()\n\n}\n", "file_path": "language/stdlib/compiled/src/lib.rs", "rank": 7, "score": 58112.264289400504 }, { "content": "fn sign_transaction_using_ed25519(\n\n request: SignTransactionUsingEd25519Request,\n\n) -> SignTransactionUsingEd25519Response {\n\n let raw_txn: RawTransaction = lcs::from_bytes(\n\n &hex::decode(request.raw_txn.clone())\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\"hex decode of raw_txn failed : {}\", err))\n\n })\n\n .unwrap(),\n\n )\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\"lcs deserialization failure of raw_txn : {}\", err))\n\n })\n\n .unwrap();\n\n let private_key = Ed25519PrivateKey::from_encoded_string(&request.private_key)\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\n\n \"Failed to hex decode private_key {} : {}\",\n\n request.private_key, err\n\n ))\n", "file_path": "client/swiss-knife/src/main.rs", "rank": 8, "score": 58104.65818035393 }, { "content": "fn used_local_info(\n\n local_map: &BTreeMap<&String, F::LocalIndex>,\n\n used_local_types: &BTreeMap<Var, H::SingleType>,\n\n) -> UniqueMap<Var, VarInfo> {\n\n UniqueMap::maybe_from_iter(used_local_types.iter().map(|(v, ty)| {\n\n let (v, info) = var_info(&local_map, v.clone(), ty.clone());\n\n let v_orig_ = match display_var(&v.0.value) {\n\n DisplayVar::Tmp => panic!(\"ICE spec block captured a tmp\"),\n\n DisplayVar::Orig(s) => s,\n\n };\n\n let v_orig = Var(sp(v.0.loc, v_orig_));\n\n (v_orig, info)\n\n }))\n\n .unwrap()\n\n}\n\n\n", "file_path": "language/move-lang/src/to_bytecode/translate.rs", "rank": 9, "score": 58104.65818035393 }, { "content": "fn verify_signature_using_ed25519(\n\n request: VerifyEd25519SignatureRequest,\n\n) -> VerifyEd25519SignatureResponse {\n\n let message = helpers::hex_decode(&request.payload);\n\n let signature = Ed25519Signature::from_encoded_string(&request.signature)\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\n\n \"Failed to hex decode signature {} : {}\",\n\n request.signature, err\n\n ))\n\n })\n\n .unwrap();\n\n let public_key = Ed25519PublicKey::from_encoded_string(&request.public_key)\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\n\n \"Failed to hex decode public_key {} : {}\",\n\n request.public_key, err\n\n ))\n\n })\n\n .unwrap();\n\n let valid_signature = signature\n\n .verify_arbitrary_msg(&message, &public_key)\n\n .is_ok();\n\n VerifyEd25519SignatureResponse { valid_signature }\n\n}\n\n\n\n//////////////////////////////////////////\n\n// verify signature of a RawTransaction //\n\n//////////////////////////////////////////\n\n\n", "file_path": "client/swiss-knife/src/main.rs", "rank": 10, "score": 58104.65818035393 }, { "content": "fn best_cycle_loc<'a>(\n\n context: &'a Context,\n\n cycle: Vec<&'a ModuleIdent>,\n\n) -> (Loc, &'a ModuleIdent, &'a ModuleIdent) {\n\n let len = cycle.len();\n\n assert!(len >= 3);\n\n let first = cycle[0];\n\n let user = cycle[len - 2];\n\n let used = cycle[len - 1];\n\n assert!(first == used);\n\n let used_loc = context.neighbors.get(user).unwrap().get(used).unwrap();\n\n (*used_loc, user, used)\n\n}\n\n\n\n//**************************************************************************************************\n\n// Modules\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 11, "score": 58104.65818035393 }, { "content": "fn verify_transaction_signature_using_ed25519(\n\n request: VerifyTransactionEd25519SignatureRequest,\n\n) -> VerifyTransactionEd25519SignatureResponse {\n\n let raw_txn: RawTransaction = lcs::from_bytes(\n\n &hex::decode(request.raw_txn.clone())\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\"hex decode of raw_txn failed : {}\", err))\n\n })\n\n .unwrap(),\n\n )\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\"lcs deserialization failure of raw_txn : {}\", err))\n\n })\n\n .unwrap();\n\n let signature = Ed25519Signature::from_encoded_string(&request.signature)\n\n .map_err(|err| {\n\n helpers::exit_with_error(format!(\n\n \"Failed to hex decode signature {} : {}\",\n\n request.signature, err\n\n ))\n", "file_path": "client/swiss-knife/src/main.rs", "rank": 12, "score": 56962.267992809095 }, { "content": "#[derive(Deserialize, Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\nstruct SignTransactionUsingEd25519Response {\n\n pub signature: String,\n\n}\n\n\n", "file_path": "client/swiss-knife/src/main.rs", "rank": 13, "score": 56962.267992809095 }, { "content": "#[derive(Deserialize, Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\nstruct SignTransactionUsingEd25519Request {\n\n pub raw_txn: String,\n\n pub private_key: String,\n\n}\n\n\n", "file_path": "client/swiss-knife/src/main.rs", "rank": 14, "score": 56962.267992809095 }, { "content": "pub fn get_used_memory<'env>(\n\n target: &'env FunctionTarget,\n\n) -> &'env BTreeSet<QualifiedId<StructId>> {\n\n &target\n\n .get_annotations()\n\n .get::<UsageAnnotation>()\n\n .expect(\"Invariant violation: target not analyzed\")\n\n .used_memory\n\n}\n\n\n", "file_path": "language/move-prover/bytecode/src/usage_analysis.rs", "rank": 15, "score": 54807.151549540955 }, { "content": "fn function(context: &mut Context, fdef: &N::Function) {\n\n function_signature(context, &fdef.signature);\n\n function_acquires(context, &fdef.acquires);\n\n if let N::FunctionBody_::Defined(seq) = &fdef.body.value {\n\n sequence(context, seq)\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 16, "score": 53789.61065377142 }, { "content": "fn sequence(context: &mut Context, sequence: &N::Sequence) {\n\n use N::SequenceItem_ as SI;\n\n for sp!(_, item_) in sequence {\n\n match item_ {\n\n SI::Seq(e) => exp(context, e),\n\n SI::Declare(bl, ty_opt) => {\n\n lvalues(context, &bl.value);\n\n type_opt(context, ty_opt);\n\n }\n\n SI::Bind(bl, e) => {\n\n lvalues(context, &bl.value);\n\n exp(context, e)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 17, "score": 53789.61065377142 }, { "content": "fn use_tmp(var: Var) -> H::UnannotatedExp_ {\n\n use H::UnannotatedExp_ as E;\n\n E::Move {\n\n from_user: false,\n\n var,\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/hlir/translate.rs", "rank": 18, "score": 53789.61065377142 }, { "content": "// Creates and returns a Node using the MockLibraInterface implementation.\n\n// This setup is useful for testing and verifying new development features quickly.\n\nfn setup_node_using_test_mocks() -> Node<MockLibraInterface> {\n\n let (node_config, key_manager_config) = get_test_configs();\n\n let (storage, db_rw) = setup_libra_db(&node_config);\n\n let libra = MockLibraInterface { storage };\n\n let executor = Executor::new(db_rw);\n\n\n\n setup_node(&node_config, &key_manager_config, executor, libra)\n\n}\n\n\n", "file_path": "secure/key-manager/src/tests.rs", "rank": 19, "score": 52816.73764193208 }, { "content": "fn type_(context: &mut Context, sp!(_, ty_): &N::Type) {\n\n use N::Type_ as T;\n\n match ty_ {\n\n T::Apply(_, tn, tys) => {\n\n type_name(context, tn);\n\n types(context, tys);\n\n }\n\n T::Ref(_, t) => type_(context, t),\n\n T::Param(_) | T::Unit | T::Anything | T::UnresolvedError | T::Var(_) => (),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 20, "score": 52809.164076992645 }, { "content": "fn exp(context: &mut Context, sp!(loc, e_): &N::Exp) {\n\n use N::Exp_ as E;\n\n match e_ {\n\n E::Unit { .. }\n\n | E::UnresolvedError\n\n | E::Break\n\n | E::Continue\n\n | E::Spec(_, _)\n\n | E::InferredNum(_)\n\n | E::Value(_)\n\n | E::Constant(None, _)\n\n | E::Move(_)\n\n | E::Copy(_)\n\n | E::Use(_) => (),\n\n\n\n E::Constant(Some(m), _c) => context.add_usage(m, *loc),\n\n E::ModuleCall(m, _, bs_opt, sp!(_, es_)) => {\n\n context.add_usage(m, *loc);\n\n types_opt(context, bs_opt);\n\n es_.iter().for_each(|e| exp(context, e))\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 21, "score": 52809.164076992645 }, { "content": "fn cycle_error(context: &Context, cycle_ident: ModuleIdent) -> Error {\n\n let cycle = shortest_cycle(&context.dependency_graph(), &cycle_ident);\n\n\n\n // For printing uses, sort the cycle by location (earliest first)\n\n let cycle_strings = cycle\n\n .iter()\n\n .map(|m| format!(\"'{}'\", m))\n\n .collect::<Vec<_>>()\n\n .join(\" uses \");\n\n\n\n let (used_loc, user, used) = best_cycle_loc(context, cycle);\n\n\n\n let use_msg = format!(\"Invalid use of module '{}' in module '{}'.\", used, user);\n\n let cycle_msg = format!(\n\n \"Using this module creates a dependency cycle: {}\",\n\n cycle_strings\n\n );\n\n vec![(used_loc, use_msg), (used_loc, cycle_msg)]\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 22, "score": 51863.81972717961 }, { "content": "fn type_opt(context: &mut Context, t_opt: &Option<N::Type>) {\n\n t_opt.iter().for_each(|t| type_(context, t))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Expressions\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 23, "score": 51863.81972717961 }, { "content": "fn struct_def(context: &mut Context, sdef: &N::StructDefinition) {\n\n if let N::StructFields::Defined(fields) = &sdef.fields {\n\n fields.iter().for_each(|(_, (_, bt))| type_(context, bt));\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 24, "score": 51863.81972717961 }, { "content": "fn function_signature(context: &mut Context, sig: &N::FunctionSignature) {\n\n types(context, sig.parameters.iter().map(|(_, st)| st));\n\n type_(context, &sig.return_type)\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 25, "score": 51863.81972717961 }, { "content": "fn lvalue(context: &mut Context, sp!(loc, a_): &N::LValue) {\n\n use N::LValue_ as L;\n\n if let L::Unpack(m, _, bs_opt, f) = a_ {\n\n context.add_usage(m, *loc);\n\n types_opt(context, bs_opt);\n\n lvalues(context, f.iter().map(|(_, (_, b))| b));\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 26, "score": 51863.81972717961 }, { "content": "// Creates and returns a test node that uses the JsonRpcLibraInterface.\n\n// This setup is useful for testing nodes as they operate in a production environment.\n\nfn setup_node_using_json_rpc() -> (Node<JsonRpcLibraInterface>, Runtime) {\n\n let (node_config, key_manager_config) = get_test_configs();\n\n\n\n let (_storage, db_rw) = setup_libra_db(&node_config);\n\n let (libra, server) = setup_libra_interface_and_json_server(db_rw.clone());\n\n let executor = Executor::new(db_rw);\n\n\n\n (\n\n setup_node(&node_config, &key_manager_config, executor, libra),\n\n server,\n\n )\n\n}\n\n\n", "file_path": "secure/key-manager/src/tests.rs", "rank": 27, "score": 50959.29921157192 }, { "content": "fn use_local(context: &mut Context, loc: &Loc, local: &Var) {\n\n use LocalState as L;\n\n let state = context.get_state(local);\n\n match state {\n\n L::Available(_) => (),\n\n L::Unavailable(unavailable) | L::MaybeUnavailable { unavailable, .. } => {\n\n let verb = match state {\n\n LocalState::Available(_) => unreachable!(),\n\n LocalState::Unavailable(_) => \"does\",\n\n LocalState::MaybeUnavailable { .. } => \"might\",\n\n };\n\n let unavailable = *unavailable;\n\n let vstr = match display_var(local.value()) {\n\n DisplayVar::Tmp => panic!(\"ICE invalid use tmp local {}\", local.value()),\n\n DisplayVar::Orig(s) => s,\n\n };\n\n let msg = format!(\n\n \"The local {} not have a value due to this position. The local must be assigned a \\\n\n value before being used\",\n\n verb\n\n );\n\n context.error(vec![\n\n (*loc, format!(\"Invalid usage of local '{}'\", vstr)),\n\n (unavailable, msg),\n\n ])\n\n }\n\n }\n\n}\n", "file_path": "language/move-lang/src/cfgir/locals/mod.rs", "rank": 28, "score": 50951.72564663248 }, { "content": "fn builtin_function(context: &mut Context, sp!(_, bf_): &N::BuiltinFunction) {\n\n use N::BuiltinFunction_ as B;\n\n match bf_ {\n\n B::MoveTo(bt_opt)\n\n | B::MoveFrom(bt_opt)\n\n | B::BorrowGlobal(_, bt_opt)\n\n | B::Exists(bt_opt)\n\n | B::Freeze(bt_opt) => type_opt(context, bt_opt),\n\n B::Assert => (),\n\n }\n\n}\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 29, "score": 50951.72564663248 }, { "content": "fn types<'a>(context: &mut Context, tys: impl IntoIterator<Item = &'a N::Type>) {\n\n tys.into_iter().for_each(|ty| type_(context, ty))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 30, "score": 50951.72564663248 }, { "content": "fn exp_dotted(context: &mut Context, sp!(_, ed_): &N::ExpDotted) {\n\n use N::ExpDotted_ as D;\n\n match ed_ {\n\n D::Exp(e) => exp(context, e),\n\n D::Dot(edotted, _) => exp_dotted(context, edotted),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 31, "score": 50951.72564663248 }, { "content": "fn types_opt(context: &mut Context, tys_opt: &Option<Vec<N::Type>>) {\n\n tys_opt.iter().for_each(|tys| types(context, tys))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 32, "score": 50071.15790269413 }, { "content": "fn module(context: &mut Context, mident: ModuleIdent, mdef: &N::ModuleDefinition) {\n\n context.current_module = Some(mident);\n\n mdef.structs\n\n .iter()\n\n .for_each(|(_, sdef)| struct_def(context, sdef));\n\n mdef.functions\n\n .iter()\n\n .for_each(|(_, fdef)| function(context, fdef));\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 33, "score": 50071.15790269413 }, { "content": "fn lvalues<'a>(context: &mut Context, al: impl IntoIterator<Item = &'a N::LValue>) {\n\n al.into_iter().for_each(|a| lvalue(context, a))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 34, "score": 50071.15790269413 }, { "content": "fn type_name(context: &mut Context, sp!(loc, tn_): &N::TypeName) {\n\n use N::TypeName_ as TN;\n\n if let TN::ModuleType(m, _) = tn_ {\n\n context.add_usage(m, *loc)\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 35, "score": 50071.15790269413 }, { "content": "// Parse an 'as' use alias:\n\n// UseAlias = (\"as\" <Identifier>)?\n\nfn parse_use_alias<'input>(tokens: &mut Lexer<'input>) -> Result<Option<Name>, Error> {\n\n Ok(if tokens.peek() == Tok::As {\n\n tokens.advance()?;\n\n Some(parse_identifier(tokens)?)\n\n } else {\n\n None\n\n })\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 36, "score": 49228.42181111243 }, { "content": "fn function_acquires(_context: &mut Context, _acqs: &BTreeMap<StructName, Loc>) {}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 37, "score": 49220.50971279989 }, { "content": "// Parse an alias for a module member:\n\n// UseMember = <Identifier> <UseAlias>\n\nfn parse_use_member<'input>(tokens: &mut Lexer<'input>) -> Result<(Name, Option<Name>), Error> {\n\n let member = parse_identifier(tokens)?;\n\n let alias_opt = parse_use_alias(tokens)?;\n\n Ok((member, alias_opt))\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 38, "score": 48406.089062106665 }, { "content": "fn module_defs(context: &mut Context, modules: &UniqueMap<ModuleIdent, N::ModuleDefinition>) {\n\n modules\n\n .iter()\n\n .for_each(|(mident, mdef)| module(context, mident, mdef))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 39, "score": 48398.28165952274 }, { "content": "fn compute_gas_used(txn: SignedTransaction, executor: &mut FakeExecutor) -> u64 {\n\n let output = &executor.execute_transaction(txn);\n\n output.gas_used()\n\n}\n", "file_path": "language/testing-infra/e2e-tests/src/gas_costs.rs", "rank": 40, "score": 48398.28165952274 }, { "content": "pub fn verify(errors: &mut Errors, modules: &mut UniqueMap<ModuleIdent, N::ModuleDefinition>) {\n\n let imm_modules = &modules;\n\n let context = &mut Context::new(imm_modules);\n\n module_defs(context, modules);\n\n let graph = &context.dependency_graph();\n\n match petgraph_toposort(graph, None) {\n\n Err(cycle_node) => {\n\n let cycle_ident = cycle_node.node_id().clone();\n\n let error = cycle_error(context, cycle_ident);\n\n errors.push(error)\n\n }\n\n Ok(ordered_ids) => {\n\n let ordered_ids = ordered_ids.into_iter().cloned().collect::<Vec<_>>();\n\n for (order, mident) in ordered_ids.into_iter().rev().enumerate() {\n\n modules.get_mut(&mident).unwrap().dependency_order = order\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 41, "score": 47603.07287024974 }, { "content": "fn good_module_uses_bad(address: AccountAddress, bad_dep: CompiledModule) -> CompiledModule {\n\n let good_module_code = \"\n\n module Test2 {\n\n import 0x1.Test;\n\n struct S { b: bool }\n\n\n\n foo(): Test.S1 {\n\n return Test.new_S1();\n\n }\n\n public bar() {\n\n return;\n\n }\n\n }\n\n \";\n\n\n\n let compiler = Compiler {\n\n address,\n\n extra_deps: vec![bad_dep],\n\n ..Compiler::default()\n\n };\n\n compiler\n\n .into_compiled_module(\"file_name\", good_module_code)\n\n .expect(\"Failed to compile\")\n\n}\n\n\n", "file_path": "language/e2e-testsuite/src/tests/verify_txn.rs", "rank": 42, "score": 47603.07287024974 }, { "content": "pub use compliance_key_rotation::*;\n\npub use exchange_rate_update::*;\n\npub use mint::*;\n\npub use new_block::*;\n\npub use new_epoch::*;\n\npub use preburn::*;\n\npub use received_mint::*;\n\npub use received_payment::*;\n\npub use sent_payment::*;\n\npub use upgrade::*;\n", "file_path": "types/src/account_config/events/mod.rs", "rank": 43, "score": 8.728215981168765 }, { "content": "pub use currency_info::*;\n\npub use designated_dealer::*;\n\npub use dual_attestation::*;\n\npub use freezing_bit::*;\n\npub use key_rotation_capability::*;\n\npub use preburn_balance::*;\n\npub use role::*;\n\npub use role_id::*;\n\npub use vasp::*;\n\npub use withdraw_capability::*;\n", "file_path": "types/src/account_config/resources/mod.rs", "rank": 44, "score": 8.728215981168765 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::state_replication::TxnManager;\n\nuse anyhow::{format_err, Result};\n\nuse consensus_types::{block::Block, common::Payload};\n\nuse executor_types::StateComputeResult;\n\nuse fail::fail_point;\n\nuse futures::channel::{mpsc, oneshot};\n\nuse itertools::Itertools;\n\nuse libra_mempool::{\n\n CommittedTransaction, ConsensusRequest, ConsensusResponse, TransactionExclusion,\n\n};\n\nuse libra_metrics::monitor;\n\nuse libra_trace::prelude::*;\n\nuse libra_types::transaction::TransactionStatus;\n\nuse std::time::Duration;\n\nuse tokio::time::timeout;\n\n\n\n/// Proxy interface to mempool\n", "file_path": "consensus/src/txn_manager.rs", "rank": 45, "score": 8.599177607599378 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::state_replication::StateComputer;\n\nuse anyhow::Result;\n\nuse consensus_types::block::Block;\n\nuse execution_correctness::ExecutionCorrectness;\n\nuse executor_types::{Error, StateComputeResult};\n\nuse fail::fail_point;\n\nuse libra_crypto::HashValue;\n\nuse libra_logger::prelude::*;\n\nuse libra_metrics::monitor;\n\nuse libra_types::ledger_info::LedgerInfoWithSignatures;\n\nuse state_synchronizer::StateSyncClient;\n\nuse std::{\n\n boxed::Box,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\n/// Basic communication with the Execution module;\n", "file_path": "consensus/src/state_computer.rs", "rank": 46, "score": 8.569609345445695 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse super::*;\n\nuse crate::{\n\n peer::DisconnectReason,\n\n peer_manager::{conn_notifs_channel, ConnectionRequest},\n\n};\n\nuse channel::{libra_channel, message_queues::QueueStyle};\n\nuse core::str::FromStr;\n\nuse futures::SinkExt;\n\nuse libra_config::{config::RoleType, network_id::NetworkId};\n\nuse libra_crypto::{test_utils::TEST_SEED, x25519, Uniform};\n\nuse libra_logger::info;\n\nuse libra_network_address::NetworkAddress;\n\nuse netcore::transport::ConnectionOrigin;\n\nuse rand::rngs::StdRng;\n\nuse std::{io, num::NonZeroUsize};\n\nuse tokio::runtime::Runtime;\n\nuse tokio_retry::strategy::FixedInterval;\n\n\n\nconst MAX_TEST_CONNECTIONS: usize = 3;\n\n\n", "file_path": "network/src/connectivity_manager/test.rs", "rank": 47, "score": 8.535564108251656 }, { "content": "use libra_crypto::x25519;\n\nuse libra_logger::prelude::*;\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_types::PeerId;\n\nuse num_variants::NumVariants;\n\nuse rand::{\n\n prelude::{SeedableRng, SmallRng},\n\n seq::SliceRandom,\n\n};\n\nuse serde::{export::Formatter, Serialize};\n\nuse std::{\n\n cmp::min,\n\n collections::{HashMap, HashSet},\n\n fmt, mem,\n\n sync::{Arc, RwLock},\n\n time::Duration,\n\n};\n\nuse tokio::{time, time::Instant};\n\nuse tokio_retry::strategy::jitter;\n\n\n", "file_path": "network/src/connectivity_manager/mod.rs", "rank": 48, "score": 8.510009163462762 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse libra_types::transaction::ScriptABI;\n\nuse serde_generate as serdegen;\n\nuse serde_generate::SourceInstaller as _;\n\nuse serde_reflection::Registry;\n\nuse std::{io::Write, process::Command};\n\nuse tempfile::tempdir;\n\nuse transaction_builder_generator as buildgen;\n\nuse transaction_builder_generator::SourceInstaller as _;\n\n\n", "file_path": "language/transaction-builder/generator/tests/generation.rs", "rank": 49, "score": 8.503063870160904 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::hash::*;\n\nuse bitvec::prelude::*;\n\nuse libra_nibble::Nibble;\n\nuse proptest::{collection::vec, prelude::*};\n\nuse rand::{rngs::StdRng, SeedableRng};\n\nuse serde::Serialize;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Serialize)]\n", "file_path": "crypto/crypto/src/unit_tests/hash_test.rs", "rank": 50, "score": 8.486468869914088 }, { "content": "};\n\nuse executor_test_helpers::integration_test_impl::test_execution_with_storage_impl;\n\nuse libra_temppath::TempPath;\n\nuse libra_types::transaction::Version;\n\nuse libradb::{GetRestoreHandler, LibraDB};\n\nuse proptest::prelude::*;\n\nuse std::{path::PathBuf, sync::Arc};\n\nuse storage_interface::DbReader;\n\nuse tokio::time::Duration;\n\n\n\n#[derive(Debug)]\n", "file_path": "storage/backup/backup-cli/src/backup_types/tests.rs", "rank": 51, "score": 8.482689299513869 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#![forbid(unsafe_code)]\n\n\n\n//! Functionality related to the command line interface of the Move prover.\n\n\n\nuse abigen::AbigenOptions;\n\nuse anyhow::anyhow;\n\nuse clap::{App, Arg};\n\nuse docgen::DocgenOptions;\n\nuse errmapgen::ErrmapOptions;\n\nuse log::LevelFilter;\n\nuse serde::{Deserialize, Serialize};\n\nuse simplelog::{\n\n CombinedLogger, Config, ConfigBuilder, LevelPadding, SimpleLogger, TermLogger, TerminalMode,\n\n};\n\nuse spec_lang::env::VerificationScope;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\n\n", "file_path": "language/move-prover/src/cli.rs", "rank": 52, "score": 8.453112841860143 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#![forbid(unsafe_code)]\n\n\n\nuse crate::coverage_map::CoverageMap;\n\nuse bytecode_source_map::source_map::SourceMap;\n\nuse codespan::{Files, Span};\n\nuse colored::*;\n\nuse move_core_types::identifier::Identifier;\n\nuse move_ir_types::location::Loc;\n\nuse serde::Serialize;\n\nuse std::{\n\n collections::BTreeMap,\n\n fs,\n\n io::{self, Write},\n\n path::Path,\n\n};\n\nuse vm::{\n\n access::ModuleAccess,\n", "file_path": "language/tools/move-coverage/src/source_coverage.rs", "rank": 53, "score": 8.452755944176092 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::serializer::{\n\n ExecutionCorrectnessInput, SerializerClient, SerializerService, TSerializerClient,\n\n};\n\nuse executor::Executor;\n\nuse executor_types::Error;\n\nuse libra_crypto::ed25519::Ed25519PrivateKey;\n\nuse libra_logger::warn;\n\nuse libra_secure_net::{NetworkClient, NetworkServer};\n\nuse libra_vm::LibraVM;\n\nuse std::net::SocketAddr;\n\nuse storage_client::StorageClient;\n\n\n", "file_path": "execution/execution-correctness/src/remote_service.rs", "rank": 54, "score": 8.452755944176092 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#[macro_use]\n\nextern crate rental;\n\n\n\nuse guppy::graph::PackageGraph;\n\nuse once_cell::sync::OnceCell;\n\nuse std::path::Path;\n\n\n\nmod debug_ignore;\n\nmod errors;\n\nmod graph;\n\nmod workspace_subset;\n\n\n\npub use debug_ignore::*;\n\npub use errors::*;\n\nuse graph::PackageGraphPlus;\n\npub use workspace_subset::*;\n\n\n", "file_path": "devtools/x-core/src/lib.rs", "rank": 55, "score": 8.452309864449024 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::ConfigurationChangeListener;\n\nuse channel::libra_channel;\n\nuse libra_config::network_id::NetworkContext;\n\nuse libra_network_address_encryption::Encryptor;\n\nuse libra_types::on_chain_config::OnChainConfigPayload;\n\nuse network::connectivity_manager::ConnectivityRequest;\n\nuse std::sync::Arc;\n\nuse tokio::runtime::Handle;\n\n\n", "file_path": "network/simple-onchain-discovery/src/builder.rs", "rank": 56, "score": 8.452309864449024 }, { "content": "\n\nmod hasher;\n\nmod unions;\n\n\n\nuse hasher::camel_to_snake;\n\nuse proc_macro::TokenStream;\n\nuse proc_macro2::Span;\n\nuse quote::quote;\n\nuse std::iter::FromIterator;\n\nuse syn::{parse_macro_input, parse_quote, Data, DeriveInput, Ident};\n\nuse unions::*;\n\n\n\n#[proc_macro_derive(SilentDisplay)]\n", "file_path": "crypto/crypto-derive/src/lib.rs", "rank": 57, "score": 8.451736402548663 }, { "content": "use rand::{distributions::Alphanumeric, thread_rng, Rng};\n\n\n\nuse crate::instance::{\n\n ApplicationConfig::{Fullnode, Validator, Vault, LSR},\n\n InstanceConfig,\n\n};\n\nuse k8s_openapi::api::batch::v1::Job;\n\nuse kube::api::ListParams;\n\nuse libra_config::config::DEFAULT_JSON_RPC_PORT;\n\nuse reqwest::Client as HttpClient;\n\nuse rusoto_core::Region;\n\nuse rusoto_s3::{PutObjectRequest, S3Client, S3};\n\nuse rusoto_sts::WebIdentityProvider;\n\nuse serde::de::DeserializeOwned;\n\nuse std::{collections::HashSet, convert::TryFrom, process::Command, time::Duration};\n\nuse tokio::sync::Semaphore;\n\n\n\npub const CFG_SEED: &str = \"1337133713371337133713371337133713371337133713371337133713371337\";\n\nconst DEFAULT_NAMESPACE: &str = \"default\";\n\nconst ERROR_NOT_FOUND: u16 = 404;\n", "file_path": "testsuite/cluster-test/src/cluster_swarm/cluster_swarm_kube.rs", "rank": 58, "score": 8.446924658298807 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse super::*;\n\nuse crate::LibraDB;\n\nuse itertools::Itertools;\n\nuse libra_crypto::hash::ACCUMULATOR_PLACEHOLDER_HASH;\n\nuse libra_proptest_helpers::Index;\n\nuse libra_temppath::TempPath;\n\nuse libra_types::{\n\n account_address::AccountAddress,\n\n contract_event::ContractEvent,\n\n event::EventKey,\n\n proptest_types::{AccountInfoUniverse, ContractEventGen},\n\n};\n\nuse proptest::{\n\n collection::{hash_set, vec},\n\n prelude::*,\n\n strategy::Union,\n\n};\n\nuse rand::Rng;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "storage/libradb/src/event_store/test.rs", "rank": 59, "score": 8.445046842469326 }, { "content": " logging::NetworkSchema,\n\n peer_manager::{ConnectionRequestSender, PeerManagerRequestSender},\n\n protocols::network::{Event, NetworkEvents, NetworkSender, NewNetworkSender},\n\n ProtocolId,\n\n};\n\nuse bytes::Bytes;\n\nuse channel::message_queues::QueueStyle;\n\nuse futures::{\n\n sink::SinkExt,\n\n stream::{FusedStream, Stream, StreamExt},\n\n};\n\nuse libra_config::network_id::NetworkContext;\n\nuse libra_crypto_derive::{CryptoHasher, LCSCryptoHash};\n\nuse libra_logger::prelude::*;\n\nuse libra_metrics::IntCounterVec;\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_types::PeerId;\n\nuse rand::{rngs::SmallRng, Rng, SeedableRng};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{\n", "file_path": "network/src/protocols/gossip_discovery/mod.rs", "rank": 60, "score": 8.433509519660987 }, { "content": "\n\nuse anyhow::{ensure, Error, Result};\n\nuse bytes::Bytes;\n\nuse libra_nibble::Nibble;\n\nuse mirai_annotations::*;\n\nuse once_cell::sync::{Lazy, OnceCell};\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nuse proptest_derive::Arbitrary;\n\nuse rand::{rngs::OsRng, Rng};\n\nuse serde::{de, ser};\n\nuse std::{self, convert::AsRef, fmt, str::FromStr};\n\nuse tiny_keccak::{Hasher, Sha3};\n\n\n\n/// A prefix used to begin the salt of every libra hashable structure. The salt\n\n/// consists in this global prefix, concatenated with the specified\n\n/// serialization name of the struct.\n\npub(crate) const LIBRA_HASH_PREFIX: &[u8] = b\"LIBRA::\";\n\nconst SHORT_STRING_LENGTH: usize = 4;\n\n\n\n/// Output value of our hash function. Intentionally opaque for safety and modularity.\n", "file_path": "crypto/crypto/src/hash.rs", "rank": 61, "score": 8.431441372938782 }, { "content": "pub use error::*;\n\nmod execution_config;\n\npub use execution_config::*;\n\nmod key_manager_config;\n\npub use key_manager_config::*;\n\nmod logger_config;\n\npub use logger_config::*;\n\nmod metrics_config;\n\npub use metrics_config::*;\n\nmod mempool_config;\n\npub use mempool_config::*;\n\nmod network_config;\n\npub use network_config::*;\n\nmod rpc_config;\n\npub use rpc_config::*;\n\nmod secure_backend_config;\n\npub use secure_backend_config::*;\n\nmod state_sync_config;\n\npub use state_sync_config::*;\n\nmod storage_config;\n", "file_path": "config/src/config/mod.rs", "rank": 62, "score": 8.428960935409492 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#![forbid(unsafe_code)]\n\n\n\nuse anyhow::Result;\n\nuse libra_logger::info;\n\nuse libradb::LibraDB;\n\nuse std::path::PathBuf;\n\nuse storage_interface::DbReader;\n\nuse transaction_builder::get_transaction_name;\n\n\n\nuse libra_types::{\n\n account_address::AccountAddress, account_config::AccountResource, account_state::AccountState,\n\n};\n\nuse std::convert::TryFrom;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, StructOpt)]\n", "file_path": "storage/inspector/src/main.rs", "rank": 63, "score": 8.425931270776749 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse channel::libra_channel::{self, Receiver};\n\nuse futures::{sink::SinkExt, StreamExt};\n\nuse libra_config::{config::RoleType, network_id::NetworkContext};\n\nuse libra_crypto::x25519;\n\nuse libra_logger::prelude::*;\n\nuse libra_metrics::{register_histogram, DurationHistogram};\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_network_address_encryption::{Encryptor, Error as EncryptorError};\n\nuse libra_types::on_chain_config::{OnChainConfigPayload, ValidatorSet, ON_CHAIN_CONFIG_REGISTRY};\n\nuse network::{\n\n connectivity_manager::{ConnectivityRequest, DiscoverySource},\n\n logging::NetworkSchema,\n\n};\n\nuse once_cell::sync::Lazy;\n\nuse std::{\n\n collections::{HashMap, HashSet},\n\n sync::Arc,\n", "file_path": "network/simple-onchain-discovery/src/lib.rs", "rank": 64, "score": 8.42013692445499 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse std::path::Path;\n\n\n\nuse codespan_reporting::term::termcolor::Buffer;\n\n\n\nuse libra_temppath::TempPath;\n\nuse move_prover::{cli::Options, run_move_prover};\n\nuse std::path::PathBuf;\n\nuse test_utils::baseline_test::verify_or_update_baseline;\n\n\n\nuse itertools::Itertools;\n\n#[allow(unused_imports)]\n\nuse log::debug;\n\nuse std::{fs::File, io::Read};\n\n\n\nconst FLAGS: &[&str] = &[\n\n \"--verbose=warn\",\n\n \"--dependency=../../stdlib/modules\",\n\n \"--docgen\",\n\n];\n\n\n", "file_path": "language/move-prover/docgen/tests/testsuite.rs", "rank": 65, "score": 8.417027507500284 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::storage::{BackupHandle, BackupStorage, FileHandleRef};\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse rand::random;\n\nuse serde::de::DeserializeOwned;\n\nuse std::{convert::TryInto, sync::Arc};\n\nuse tokio::io::AsyncReadExt;\n\n\n\n#[async_trait]\n", "file_path": "storage/backup/backup-cli/src/utils/storage_ext.rs", "rank": 66, "score": 8.383117438100077 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse anyhow::Result;\n\nuse compiled_stdlib::{stdlib_modules, StdLibOptions};\n\nuse functional_tests::{\n\n compiler::{Compiler, ScriptOrModule},\n\n testsuite,\n\n};\n\nuse ir_to_bytecode::{\n\n compiler::{compile_module, compile_script},\n\n parser::parse_script_or_module,\n\n};\n\nuse libra_types::account_address::AccountAddress;\n\nuse move_ir_types::ast;\n\nuse std::path::Path;\n\nuse vm::CompiledModule;\n\n\n", "file_path": "language/ir-testsuite/tests/testsuite.rs", "rank": 67, "score": 8.38226371942803 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse super::*;\n\nuse itertools::zip_eq;\n\nuse libra_config::{config::NodeConfig, utils};\n\nuse libra_crypto::hash::CryptoHash;\n\n#[cfg(test)]\n\nuse libradb::test_helper::arb_blocks_to_commit;\n\nuse proptest::prelude::*;\n\nuse std::{\n\n collections::{BTreeMap, HashMap},\n\n net::{IpAddr, Ipv4Addr, SocketAddr},\n\n};\n\nuse storage_client::StorageClient;\n\n\n", "file_path": "storage/storage-service/src/storage_service_test.rs", "rank": 68, "score": 8.38226371942803 }, { "content": "pub use peer_to_peer::*;\n\npub use rotate_key::*;\n\npub use universe::*;\n\n\n\nuse crate::{\n\n account::{self, lbr_currency_code, Account, AccountData},\n\n executor::FakeExecutor,\n\n gas_costs, transaction_status_eq,\n\n};\n\nuse libra_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey};\n\nuse libra_types::{\n\n transaction::{SignedTransaction, TransactionStatus},\n\n vm_status::{known_locations, KeptVMStatus, StatusCode},\n\n};\n\nuse once_cell::sync::Lazy;\n\nuse proptest::{prelude::*, strategy::Union};\n\nuse std::{fmt, sync::Arc};\n\n\n\nstatic UNIVERSE_SIZE: Lazy<usize> = Lazy::new(|| {\n\n use std::{env, process::abort};\n", "file_path": "language/testing-infra/e2e-tests/src/account_universe.rs", "rank": 69, "score": 8.381068805366864 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n//! Integration tests for validator_network.\n\n\n\nuse crate::builder::NetworkBuilder;\n\nuse channel::message_queues::QueueStyle;\n\nuse futures::{executor::block_on, StreamExt};\n\nuse libra_config::{\n\n config::RoleType,\n\n network_id::{NetworkContext, NetworkId},\n\n};\n\nuse libra_crypto::{test_utils::TEST_SEED, x25519, Uniform};\n\nuse libra_metrics::IntCounterVec;\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_types::{chain_id::ChainId, PeerId};\n\nuse netcore::transport::ConnectionOrigin;\n\nuse network::{\n\n constants,\n\n error::NetworkError,\n", "file_path": "network/builder/src/dummy.rs", "rank": 70, "score": 8.373139645101812 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{BuildSwarm, Error};\n\nuse anyhow::{ensure, format_err, Result};\n\nuse executor::db_bootstrapper;\n\nuse libra_config::{\n\n config::{\n\n DiscoveryMethod, NodeConfig, OnDiskStorageConfig, RemoteService, SafetyRulesService,\n\n SecureBackend, Token, VaultConfig, WaypointConfig,\n\n },\n\n generator,\n\n};\n\nuse libra_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, Uniform};\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_temppath::TempPath;\n\nuse libra_types::{chain_id::ChainId, waypoint::Waypoint};\n\nuse libra_vm::LibraVM;\n\nuse libradb::LibraDB;\n\nuse rand::{rngs::StdRng, Rng, SeedableRng};\n", "file_path": "config/config-builder/src/validator_config.rs", "rank": 71, "score": 8.366230363484046 }, { "content": " memory_instrumentation::MemoryInstrumentationProcessor,\n\n reaching_def_analysis::ReachingDefProcessor,\n\n stackless_bytecode::{Bytecode, Operation},\n\n test_instrumenter::TestInstrumenter,\n\n usage_analysis::{self, UsageProcessor},\n\n};\n\nuse codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};\n\nuse docgen::Docgen;\n\nuse errmapgen::ErrmapGen;\n\nuse handlebars::Handlebars;\n\nuse itertools::Itertools;\n\n#[allow(unused_imports)]\n\nuse log::{debug, info, warn};\n\nuse move_lang::find_move_filenames;\n\nuse once_cell::sync::Lazy;\n\nuse regex::Regex;\n\nuse spec_lang::{code_writer::CodeWriter, emit, emitln, env::GlobalEnv, run_spec_lang_compiler};\n\nuse std::{\n\n collections::{BTreeMap, BTreeSet},\n\n fs,\n", "file_path": "language/move-prover/src/lib.rs", "rank": 72, "score": 8.366230363484046 }, { "content": " util::time_service::TimeService,\n\n};\n\nuse anyhow::{bail, ensure, Context};\n\nuse channel::libra_channel;\n\nuse consensus_types::{\n\n common::{Author, Round},\n\n epoch_retrieval::EpochRetrievalRequest,\n\n};\n\nuse futures::{select, StreamExt};\n\nuse libra_config::config::{ConsensusConfig, ConsensusProposerType, NodeConfig};\n\nuse libra_logger::prelude::*;\n\nuse libra_metrics::monitor;\n\nuse libra_types::{\n\n account_address::AccountAddress,\n\n epoch_change::EpochChangeProof,\n\n epoch_state::EpochState,\n\n on_chain_config::{OnChainConfigPayload, ValidatorSet},\n\n};\n\nuse network::protocols::network::Event;\n\nuse safety_rules::SafetyRulesManager;\n", "file_path": "consensus/src/epoch_manager.rs", "rank": 73, "score": 8.365225473893325 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse anyhow::Result;\n\nuse bytes::Bytes;\n\nuse hyper::Body;\n\nuse libra_logger::prelude::*;\n\nuse libra_metrics::{\n\n register_histogram_vec, register_int_counter_vec, HistogramVec, IntCounterVec,\n\n};\n\nuse libradb::backup::backup_handler::BackupHandler;\n\nuse once_cell::sync::Lazy;\n\nuse serde::Serialize;\n\nuse std::{convert::Infallible, future::Future};\n\nuse warp::{reply::Response, Rejection, Reply};\n\n\n\npub(super) static LATENCY_HISTOGRAM: Lazy<HistogramVec> = Lazy::new(|| {\n\n register_histogram_vec!(\n\n \"libra_backup_service_latency_s\",\n\n \"Backup service endpoint latency.\",\n", "file_path": "storage/backup/backup-service/src/handlers/utils.rs", "rank": 74, "score": 8.365225473893325 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse backup_service::start_backup_service;\n\nuse libra_config::utils::get_available_port;\n\nuse libra_proptest_helpers::ValueGenerator;\n\nuse libra_temppath::TempPath;\n\nuse libra_types::{ledger_info::LedgerInfoWithSignatures, transaction::TransactionToCommit};\n\nuse libradb::{test_helper::arb_blocks_to_commit, LibraDB};\n\nuse std::{\n\n net::{IpAddr, Ipv4Addr, SocketAddr},\n\n sync::Arc,\n\n};\n\nuse storage_interface::DbWriter;\n\nuse tokio::runtime::Runtime;\n\n\n", "file_path": "storage/backup/backup-cli/src/utils/test_utils.rs", "rank": 75, "score": 8.36399760328341 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse std::path::Path;\n\n\n\nuse codespan_reporting::term::termcolor::Buffer;\n\nuse libra_temppath::TempPath;\n\nuse move_prover::{cli::Options, run_move_prover};\n\nuse std::path::PathBuf;\n\nuse test_utils::baseline_test::verify_or_update_baseline;\n\n\n\n#[allow(unused_imports)]\n\nuse log::debug;\n\nuse std::{fs::File, io::Read};\n\n\n\nconst FLAGS: &[&str] = &[\"--verbose=warn\", \"--abigen\"];\n\n\n", "file_path": "language/move-prover/abigen/tests/testsuite.rs", "rank": 76, "score": 8.362463271825458 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{methods, runtime, tests};\n\nuse futures::{channel::mpsc::channel, StreamExt};\n\nuse libra_config::config;\n\nuse libra_proptest_helpers::ValueGenerator;\n\nuse std::sync::Arc;\n\nuse warp::reply::Reply;\n\n\n\n#[test]\n", "file_path": "json-rpc/src/fuzzing.rs", "rank": 77, "score": 8.357863653174453 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::identifier::{IdentStr, Identifier, ALLOWED_IDENTIFIERS};\n\nuse lcs::test_helpers::assert_canonical_encode_decode;\n\nuse once_cell::sync::Lazy;\n\nuse proptest::prelude::*;\n\nuse regex::Regex;\n\nuse std::borrow::Borrow;\n\n\n\n#[test]\n", "file_path": "language/move-core/types/src/unit_tests/identifier_test.rs", "rank": 78, "score": 8.357863653174453 }, { "content": "use crossbeam_channel::{bounded, unbounded, Receiver, Sender};\n\nuse getrandom::getrandom;\n\nuse language_e2e_tests::executor::FakeExecutor;\n\nuse libra_logger::{debug, error, info};\n\nuse libra_state_view::StateView;\n\nuse libra_types::{account_address::AccountAddress, vm_status::StatusCode};\n\nuse libra_vm::LibraVM;\n\nuse module_generation::generate_module;\n\nuse move_core_types::{\n\n gas_schedule::{GasAlgebra, GasUnits},\n\n language_storage::TypeTag,\n\n vm_status::VMStatus,\n\n};\n\nuse move_vm_types::{gas_schedule::CostStrategy, values::Value};\n\nuse rand::{rngs::StdRng, Rng, SeedableRng};\n\nuse std::{fs, io::Write, panic, thread};\n\nuse vm::{\n\n access::ModuleAccess,\n\n file_format::{\n\n CompiledModule, CompiledModuleMut, FunctionDefinitionIndex, Kind, SignatureToken,\n\n StructHandleIndex,\n\n },\n\n};\n\n\n\n/// This function calls the Bytecode verifier to test it\n", "file_path": "language/testing-infra/test-generation/src/lib.rs", "rank": 79, "score": 8.355622936840964 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n//! Wrapper around the boogie program. Allows to call boogie and analyze the output.\n\n\n\nuse anyhow::anyhow;\n\n\n\nuse std::{\n\n collections::{BTreeMap, BTreeSet},\n\n fs,\n\n option::Option::None,\n\n};\n\n\n\nuse codespan::{ByteIndex, ByteOffset, ColumnIndex, FileId, LineIndex, Location, Span};\n\nuse codespan_reporting::diagnostic::{Diagnostic, Label, Severity};\n\nuse itertools::Itertools;\n\nuse log::{debug, info, warn};\n\nuse num::BigInt;\n\nuse pretty::RcDoc;\n\nuse regex::Regex;\n", "file_path": "language/move-prover/src/boogie_wrapper.rs", "rank": 80, "score": 8.355224182624706 }, { "content": " },\n\n transport,\n\n transport::{Connection, ConnectionId, ConnectionMetadata},\n\n ProtocolId,\n\n};\n\nuse bytes::Bytes;\n\nuse channel::{self, libra_channel};\n\nuse futures::{\n\n channel::oneshot,\n\n future::{BoxFuture, FutureExt},\n\n io::{AsyncRead, AsyncWrite, AsyncWriteExt},\n\n sink::SinkExt,\n\n stream::{Fuse, FuturesUnordered, StreamExt},\n\n};\n\nuse libra_config::network_id::NetworkContext;\n\nuse libra_logger::prelude::*;\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_types::PeerId;\n\nuse netcore::transport::{ConnectionOrigin, Transport};\n\nuse serde::Serialize;\n", "file_path": "network/src/peer_manager/mod.rs", "rank": 81, "score": 8.355224182624706 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{options::ModuleGeneratorOptions, padding::Pad, utils::random_string};\n\nuse bytecode_verifier::verify_module;\n\nuse ir_to_bytecode::compiler::compile_module;\n\nuse libra_types::account_address::AccountAddress;\n\nuse move_ir_types::{ast::*, location::*};\n\nuse rand::{rngs::StdRng, Rng};\n\nuse std::collections::{BTreeSet, VecDeque};\n\nuse vm::file_format::CompiledModule;\n\n\n", "file_path": "language/testing-infra/module-generation/src/generator.rs", "rank": 82, "score": 8.3427561483674 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{error::Error, storage::StorageWrapper};\n\nuse libra_config::config;\n\nuse libra_types::chain_id::{self, ChainId};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n/// A config file for working with management tooling.\n\n///\n\n/// # Example:\n\n///\n\n/// ```\n\n/// use libra_management::config::ConfigPath;\n\n/// use structopt::StructOpt;\n\n///\n\n/// #[derive(Clone, Debug, StructOpt)]\n\n/// struct TestCommandLine {\n", "file_path": "config/management/src/config.rs", "rank": 83, "score": 8.3427561483674 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{\n\n counters,\n\n epoch_manager::EpochManager,\n\n network::NetworkTask,\n\n network_interface::{ConsensusNetworkEvents, ConsensusNetworkSender},\n\n persistent_liveness_storage::StorageWriteProxy,\n\n state_computer::ExecutionProxy,\n\n txn_manager::MempoolProxy,\n\n util::time_service::ClockTimeService,\n\n};\n\nuse channel::libra_channel;\n\nuse execution_correctness::ExecutionCorrectnessManager;\n\nuse futures::channel::mpsc;\n\nuse libra_config::config::NodeConfig;\n\nuse libra_logger::prelude::*;\n\nuse libra_mempool::ConsensusRequest;\n\nuse libra_types::on_chain_config::OnChainConfigPayload;\n\nuse state_synchronizer::StateSyncClient;\n\nuse std::sync::Arc;\n\nuse storage_interface::DbReader;\n\nuse tokio::runtime::{self, Runtime};\n\n\n\n/// Helper function to start consensus based on configuration and return the runtime\n", "file_path": "consensus/src/consensus_provider.rs", "rank": 84, "score": 8.340783538981755 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{\n\n config::{Error, RoleType, SecureBackend},\n\n keys::ConfigKey,\n\n network_id::NetworkId,\n\n utils,\n\n};\n\nuse libra_crypto::{x25519, Uniform};\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_network_address_encryption::Encryptor;\n\nuse libra_secure_storage::{CryptoStorage, KVStorage, Storage};\n\nuse libra_types::{transaction::authenticator::AuthenticationKey, PeerId};\n\nuse rand::{\n\n rngs::{OsRng, StdRng},\n\n Rng, SeedableRng,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{\n", "file_path": "config/src/config/network_config.rs", "rank": 85, "score": 8.337732443020757 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{\n\n block_storage::{block_store::BlockStore, BlockReader},\n\n persistent_liveness_storage::{LedgerRecoveryData, RecoveryData, RootMetadata},\n\n state_computer::ExecutionProxy,\n\n test_utils::{EmptyStorage, TreeInserter},\n\n util::mock_time_service::SimulatedTimeService,\n\n};\n\nuse consensus_types::{block::Block, quorum_cert::QuorumCert};\n\nuse execution_correctness::{ExecutionCorrectness, ExecutionCorrectnessManager};\n\nuse executor_test_helpers::start_storage_service;\n\nuse executor_types::ExecutedTrees;\n\nuse futures::channel::mpsc;\n\nuse libra_config::config::{NodeConfig, PersistableConfig};\n\nuse libra_crypto::{ed25519::Ed25519PrivateKey, Uniform};\n\nuse libra_temppath::TempPath;\n\nuse libra_types::validator_signer::ValidatorSigner;\n\nuse state_synchronizer::StateSyncClient;\n\nuse std::sync::Arc;\n\nuse storage_interface::DbReader;\n\n\n", "file_path": "consensus/src/block_storage/block_store_and_lec_recovery_test.rs", "rank": 86, "score": 8.33745816867809 }, { "content": "pub mod typing;\n\n\n\nuse anyhow::anyhow;\n\nuse codespan::{ByteIndex, Span};\n\nuse compiled_unit::CompiledUnit;\n\nuse errors::*;\n\nuse move_ir_types::location::*;\n\nuse parser::syntax::parse_file_string;\n\nuse shared::Address;\n\nuse std::{\n\n collections::{BTreeMap, HashMap},\n\n fs::File,\n\n io::{Read, Write},\n\n iter::Peekable,\n\n path::{Path, PathBuf},\n\n str::Chars,\n\n};\n\n\n\npub const MOVE_EXTENSION: &str = \"move\";\n\npub const MOVE_COMPILED_EXTENSION: &str = \"mv\";\n", "file_path": "language/move-lang/src/lib.rs", "rank": 87, "score": 8.332937377251532 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse std::{\n\n collections::HashSet,\n\n env, fmt, process,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse libra_logger::{info, warn};\n\nuse libra_types::chain_id::ChainId;\n\nuse reqwest::Url;\n\nuse structopt::{clap::ArgGroup, StructOpt};\n\nuse termion::{color, style};\n\n\n\nuse anyhow::{bail, format_err, Result};\n\nuse cluster_test::{\n\n aws,\n\n cluster::Cluster,\n\n cluster_builder::{ClusterBuilder, ClusterBuilderParams},\n", "file_path": "testsuite/cluster-test/src/main.rs", "rank": 88, "score": 8.332937377251532 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n//! Convenience Network API for Libra\n\n\n\npub use crate::protocols::rpc::error::RpcError;\n\nuse crate::{\n\n error::NetworkError,\n\n peer_manager::{\n\n ConnectionNotification, ConnectionRequestSender, PeerManagerNotification,\n\n PeerManagerRequestSender,\n\n },\n\n ProtocolId,\n\n};\n\nuse bytes::Bytes;\n\nuse channel::libra_channel;\n\nuse futures::{\n\n channel::oneshot,\n\n stream::{FusedStream, Map, Select, Stream, StreamExt},\n\n task::{Context, Poll},\n\n};\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_types::PeerId;\n\nuse netcore::transport::ConnectionOrigin;\n\nuse pin_project::pin_project;\n\nuse serde::{de::DeserializeOwned, Serialize};\n\nuse std::{marker::PhantomData, pin::Pin, time::Duration};\n\n\n", "file_path": "network/src/protocols/network/mod.rs", "rank": 89, "score": 8.330436026125334 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse super::*;\n\nuse crate::{pruner, LibraDB};\n\nuse libra_crypto::hash::CryptoHash;\n\nuse libra_jellyfish_merkle::restore::JellyfishMerkleRestore;\n\nuse libra_temppath::TempPath;\n\nuse libra_types::{account_address::AccountAddress, account_state_blob::AccountStateBlob};\n\nuse proptest::{collection::hash_map, prelude::*};\n\n\n", "file_path": "storage/libradb/src/state_store/state_store_test.rs", "rank": 90, "score": 8.326780390855463 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse super::*;\n\nuse crate::storage::{\n\n command_adapter::config::Commands,\n\n test_util::{\n\n arb_backups, arb_metadata_files, test_save_and_list_metadata_files_impl,\n\n test_write_and_read_impl,\n\n },\n\n};\n\nuse futures::Future;\n\nuse libra_temppath::TempPath;\n\nuse proptest::prelude::*;\n\nuse std::str::FromStr;\n\nuse tokio::runtime::Runtime;\n\n\n", "file_path": "storage/backup/backup-cli/src/storage/command_adapter/tests.rs", "rank": 91, "score": 8.326780390855463 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#![forbid(unsafe_code)]\n\n\n\nuse crate::{\n\n cluster::Cluster,\n\n experiments::{Context, Experiment, ExperimentParam},\n\n tx_emitter::EmitJobRequest,\n\n};\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse futures::{sink::SinkExt, StreamExt};\n\nuse libra_config::{config::NodeConfig, network_id::NetworkId};\n\nuse libra_crypto::x25519;\n\nuse libra_mempool::network::{MempoolNetworkEvents, MempoolNetworkSender};\n\nuse libra_network_address::NetworkAddress;\n\nuse libra_types::chain_id::ChainId;\n\nuse network::{\n\n connectivity_manager::DiscoverySource, protocols::network::Event, ConnectivityRequest,\n", "file_path": "testsuite/cluster-test/src/experiments/load_test.rs", "rank": 92, "score": 8.325877973115194 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{\n\n counters,\n\n errors::JsonRpcError,\n\n methods::{build_registry, JsonRpcRequest, JsonRpcService, RpcRegistry},\n\n response::JsonRpcResponse,\n\n};\n\nuse futures::future::join_all;\n\nuse libra_config::config::{NodeConfig, RoleType};\n\nuse libra_mempool::MempoolClientSender;\n\nuse libra_types::{chain_id::ChainId, ledger_info::LedgerInfoWithSignatures};\n\nuse serde_json::{map::Map, Value};\n\nuse std::{net::SocketAddr, sync::Arc};\n\nuse storage_interface::DbReader;\n\nuse tokio::runtime::{Builder, Runtime};\n\nuse warp::{\n\n reject::{self, Reject},\n\n Filter,\n", "file_path": "json-rpc/src/runtime.rs", "rank": 93, "score": 8.325877973115194 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse backup_service::start_backup_service;\n\nuse consensus::{consensus_provider::start_consensus, gen_consensus_reconfig_subscription};\n\nuse debug_interface::node_debug_service::NodeDebugService;\n\nuse executor::{db_bootstrapper::maybe_bootstrap, Executor};\n\nuse executor_types::ChunkExecutor;\n\nuse futures::{channel::mpsc::channel, executor::block_on};\n\nuse libra_config::{\n\n config::{NetworkConfig, NodeConfig, RoleType},\n\n network_id::NodeNetworkId,\n\n utils::get_genesis_txn,\n\n};\n\nuse libra_json_rpc::bootstrap_from_config as bootstrap_rpc;\n\nuse libra_logger::{prelude::*, Logger};\n\nuse libra_mempool::gen_mempool_reconfig_subscription;\n\nuse libra_metrics::metric_server;\n\nuse libra_types::{\n\n account_config::libra_root_address, account_state::AccountState, chain_id::ChainId,\n", "file_path": "libra-node/src/main_node.rs", "rank": 94, "score": 8.321178344947429 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#![forbid(unsafe_code)]\n\n\n\nuse crate::cluster_swarm::cluster_swarm_kube::ClusterSwarmKube;\n\nuse anyhow::{format_err, Result};\n\nuse debug_interface::AsyncNodeDebugClient;\n\nuse libra_config::config::NodeConfig;\n\nuse libra_json_rpc_client::{JsonRpcAsyncClient, JsonRpcBatch};\n\nuse reqwest::{Client, Url};\n\nuse serde_json::Value;\n\nuse std::{\n\n collections::HashSet,\n\n fmt,\n\n process::Stdio,\n\n str::FromStr,\n\n time::{Duration, Instant},\n\n};\n\nuse tokio::{process::Command, time};\n", "file_path": "testsuite/cluster-test/src/instance.rs", "rank": 95, "score": 8.320313788651566 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse crate::{libra_interface::JsonRpcLibraInterface, Action, Error, KeyManager, LibraInterface};\n\nuse anyhow::Result;\n\nuse executor::Executor;\n\nuse executor_types::BlockExecutor;\n\nuse futures::{channel::mpsc::channel, StreamExt};\n\nuse libra_config::{\n\n config::{KeyManagerConfig, NodeConfig},\n\n utils,\n\n utils::get_genesis_txn,\n\n};\n\nuse libra_crypto::{ed25519::Ed25519PrivateKey, HashValue, PrivateKey, Uniform};\n\nuse libra_global_constants::{\n\n CONSENSUS_KEY, OPERATOR_ACCOUNT, OPERATOR_KEY, OWNER_ACCOUNT, OWNER_KEY,\n\n};\n\nuse libra_secure_storage::{InMemoryStorageInternal, KVStorage};\n\nuse libra_secure_time::{MockTimeService, TimeService};\n\nuse libra_types::{\n", "file_path": "secure/key-manager/src/tests.rs", "rank": 96, "score": 8.316203905077295 }, { "content": "/// the Vault service between runs.\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::storage_helper::StorageHelper;\n\n use libra_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, Uniform};\n\n use libra_global_constants::{OPERATOR_KEY, OWNER_KEY};\n\n use libra_management::constants;\n\n use libra_secure_storage::KVStorage;\n\n use libra_types::{\n\n account_address,\n\n chain_id::ChainId,\n\n transaction::{TransactionArgument, TransactionPayload},\n\n };\n\n use std::{\n\n fs::File,\n\n io::{Read, Write},\n\n };\n\n\n\n #[test]\n", "file_path": "config/management/genesis/src/command.rs", "rank": 97, "score": 8.313369007749376 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse anyhow::{Context, Result};\n\nuse config_builder::SwarmConfig;\n\nuse debug_interface::NodeDebugClient;\n\nuse libra_config::config::{NodeConfig, RoleType};\n\nuse libra_genesis_tool::config_builder::{FullnodeBuilder, FullnodeType, ValidatorBuilder};\n\nuse libra_logger::prelude::*;\n\nuse libra_temppath::TempPath;\n\nuse libra_types::account_address::AccountAddress;\n\nuse std::{\n\n collections::HashMap,\n\n env,\n\n fs::File,\n\n io::{self, Read},\n\n path::{Path, PathBuf},\n\n process::{Child, Command},\n\n str::FromStr,\n\n};\n", "file_path": "testsuite/libra-swarm/src/swarm.rs", "rank": 98, "score": 8.31163172543721 }, { "content": "// Copyright (c) The Libra Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse anyhow::Context;\n\nuse bytecode_source_map::source_map::SourceMap;\n\nuse ir_to_bytecode::{compiler::compile_module, parser::parse_module};\n\nuse libra_types::account_address::AccountAddress;\n\nuse move_ir_types::location::Loc;\n\nuse std::{fs, path::Path};\n\nuse vm::{access::ModuleAccess, file_format::CompiledModule};\n\n\n", "file_path": "language/compiler/src/util.rs", "rank": 99, "score": 8.304457020535944 } ]
Rust
src/bin/delete-reaction.rs
netguru/commentable-rs
be95c83cfc2ff65d6175cc0a979461cc235512c5
use lambda_http::{lambda, Request, Response, Body, RequestExt}; use rusoto_core::Region; use rusoto_dynamodb::{DynamoDbClient}; use serde::Deserialize; use commentable_rs::utils::db::{DynamoDbModel, CommentableId}; use commentable_rs::utils::http::{ok, bad_request, internal_server_error, HttpError}; use commentable_rs::utils::current_user::CurrentUser; use commentable_rs::utils::current_comment::CurrentComment; use commentable_rs::models::{ user::{AuthToken, User, UserId}, comment::{CommentId, Comment}, reaction::{reaction_id, Reaction, ReactionType}, }; #[derive(Deserialize)] struct Params { auth_token: AuthToken, comment_id: CommentId, reaction_type: ReactionType, } struct DeleteReaction { db: DynamoDbClient, commentable_id: CommentableId, params: Params, current_user: Option<User>, current_comment: Option<Comment>, reaction: Option<Reaction>, } impl CurrentUser for DeleteReaction { fn db(&self) -> &DynamoDbClient { &self.db } fn auth_token(&self) -> Option<AuthToken> { Some(self.params.auth_token.clone()) } fn set_current_user(&mut self, user: Option<User>) { self.current_user = user; } } impl CurrentComment for DeleteReaction { fn db(&self) -> &DynamoDbClient { &self.db } fn commentable_id(&self) -> CommentableId { self.commentable_id.clone() } fn comment_id(&self) -> CommentId { self.params.comment_id.clone() } fn set_current_comment(&mut self, comment: Comment) { self.current_comment = Some(comment); } } impl DeleteReaction { pub fn respond_to(request: Request) -> Result<Response<Body>, HttpError> { if let Some(commentable_id) = request.path_parameters().get("id") { Self::new(request, commentable_id.to_string())? .validate_params()? .fetch_current_user()? .fetch_current_comment()? .fetch_reaction()? .delete()? .serialize() } else { Err(bad_request("Invalid path parameters: 'id' is required.")) } } pub fn new(request: Request, commentable_id: CommentableId) -> Result<Self, HttpError> { if let Ok(Some(params)) = request.payload::<Params>() { Ok(Self { db: DynamoDbClient::new(Region::default()), commentable_id, current_comment: None, current_user: None, reaction: None, params, }) } else { Err(bad_request("Invalid parameters")) } } fn current_user_id(&self) -> &UserId { &self.current_user.as_ref().unwrap().id } fn current_comment_id(&self) -> &CommentId { &self.current_comment.as_ref().unwrap().id } pub fn validate_params(&mut self) -> Result<&mut Self, HttpError> { if self.params.auth_token.trim().len() == 0 { Err(bad_request("Invalid request parameters: auth_token is required")) } else if self.params.comment_id.trim().len() == 0 { Err(bad_request("Invalid request parameters: comment_id is required")) } else if self.params.reaction_type.trim().len() == 0 { Err(bad_request("Invalid request parameters: reaction_type is required")) } else { Ok(self) } } pub fn fetch_reaction(&mut self) -> Result<&mut Self, HttpError> { let id = reaction_id(self.current_comment_id(), self.current_user_id(), &self.params.reaction_type); match Reaction::find(&self.db, self.commentable_id.clone(), id) { Ok(Some(reaction)) => self.reaction = Some(reaction), Ok(None) => return Err(bad_request("Could not delete reaction.")), Err(err) => return Err(internal_server_error(err)), } Ok(self) } pub fn delete(&mut self) -> Result<&mut Self, HttpError> { let id = reaction_id(self.current_comment_id(), self.current_user_id(), &self.params.reaction_type); Reaction::delete(&self.db, self.commentable_id.clone(), id) .map_err(|err| internal_server_error(err))?; Ok(self) } pub fn serialize(&self) -> Result<Response<Body>, HttpError> { Ok(ok("")) } } fn main() { lambda!(|request, _| DeleteReaction::respond_to(request) .or_else(|error_response| Ok(error_response)) ); }
use lambda_http::{lambda, Request, Response, Body, RequestExt}; use rusoto_core::Region; use rusoto_dynamodb::{DynamoDbClient}; use serde::Deserialize; use commentable_rs::utils::db::{DynamoDbModel, CommentableId}; use commentable_rs::utils::http::{ok, bad_request, internal_server_error, HttpError}; use commentable_rs::utils::current_user::CurrentUser; use commentable_rs::utils::current_comment::CurrentComment; use commentable_rs::models::{ user::{AuthToken, User, UserId}, comment::{CommentId, Comment}, reaction::{reaction_id, Reaction, ReactionType}, }; #[derive(Deserialize)] struct Params { auth_token: AuthToken, comment_id: CommentId, reaction_type: ReactionType, } struct DeleteReaction { db: DynamoDbClient, commentable_id: CommentableId, params: Params, current_user: Option<User>, current_comment: Option<Comment>, reaction: Option<Reaction>, } impl CurrentUser for DeleteReaction { fn db(&self) -> &DynamoDbClient { &self.db } fn auth_token(&self) -> Option<AuthToken> { Some(self.params.auth_token.clone()) } fn set_current_user(&mut self, user: Option<User>) { self.current_user = user; } } impl CurrentComment for DeleteReaction { fn db(&self) -> &DynamoDbClient { &self.db } fn commentable_id(&self) -> CommentableId { self.commentable_id.clone() } fn comment_id(&self) -> CommentId { self.params.comment_id.clone() } fn set_current_comment(&mut self, comment: Comment) { self.current_comment = Some(comment); } } impl DeleteReaction { pub fn respond_to(request: Request) -> Result<Response<Body>, HttpError> { if let Some(commentable_id) = request.path_parameters().get("id") { Self::new(request, commentable_id.to_string())? .validate_params()? .fetch_current_user()? .fetch_current_comment()? .fetch_reaction()? .delete()? .serialize() } else { Err(bad_request("Invalid path parameters: 'id' is required.")) } } pub fn new(request: Request, commentable_id: CommentableId) -> Result<Self, HttpError> { if let Ok(Some(params)) = request.payload::<Params>() {
} else { Err(bad_request("Invalid parameters")) } } fn current_user_id(&self) -> &UserId { &self.current_user.as_ref().unwrap().id } fn current_comment_id(&self) -> &CommentId { &self.current_comment.as_ref().unwrap().id } pub fn validate_params(&mut self) -> Result<&mut Self, HttpError> { if self.params.auth_token.trim().len() == 0 { Err(bad_request("Invalid request parameters: auth_token is required")) } else if self.params.comment_id.trim().len() == 0 { Err(bad_request("Invalid request parameters: comment_id is required")) } else if self.params.reaction_type.trim().len() == 0 { Err(bad_request("Invalid request parameters: reaction_type is required")) } else { Ok(self) } } pub fn fetch_reaction(&mut self) -> Result<&mut Self, HttpError> { let id = reaction_id(self.current_comment_id(), self.current_user_id(), &self.params.reaction_type); match Reaction::find(&self.db, self.commentable_id.clone(), id) { Ok(Some(reaction)) => self.reaction = Some(reaction), Ok(None) => return Err(bad_request("Could not delete reaction.")), Err(err) => return Err(internal_server_error(err)), } Ok(self) } pub fn delete(&mut self) -> Result<&mut Self, HttpError> { let id = reaction_id(self.current_comment_id(), self.current_user_id(), &self.params.reaction_type); Reaction::delete(&self.db, self.commentable_id.clone(), id) .map_err(|err| internal_server_error(err))?; Ok(self) } pub fn serialize(&self) -> Result<Response<Body>, HttpError> { Ok(ok("")) } } fn main() { lambda!(|request, _| DeleteReaction::respond_to(request) .or_else(|error_response| Ok(error_response)) ); }
Ok(Self { db: DynamoDbClient::new(Region::default()), commentable_id, current_comment: None, current_user: None, reaction: None, params, })
call_expression
[ { "content": "pub fn comment_id(commentable_id: &CommentableId, user_id: &UserId) -> String {\n\n let id = hash(&format!(\"{}{}{}\", commentable_id, user_id, Utc::now().to_string()));\n\n format!(\"{}{}{}\", COMMENT_ID_PREFIX, Utc::now().timestamp_millis(), id)\n\n}\n", "file_path": "src/models/comment.rs", "rank": 0, "score": 170128.81332175698 }, { "content": "pub fn reaction_id(comment_id: &CommentId, user_id: &UserId, reaction_type: &ReactionType) -> ReactionId {\n\n let id = hash(&format!(\"{}{}{}\", comment_id, user_id, reaction_type));\n\n format!(\"{}{}\", REACTION_ID_PREFIX, id)\n\n}\n", "file_path": "src/models/reaction.rs", "rank": 1, "score": 167650.11228669522 }, { "content": "pub fn missing_path_param(param: &str) -> String {\n\n format!(\"Invalid path parameters: {} is required\", param)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 2, "score": 111725.15662110811 }, { "content": "pub fn missing_request_param(param: &str) -> String {\n\n format!(\"Invalid request parameters: {} is required\", param)\n\n}\n", "file_path": "src/utils/http.rs", "rank": 3, "score": 111637.69669809757 }, { "content": "pub fn bad_request<T>(body: T) -> Response<Body>\n\nwhere T: ToString {\n\n http_response(body.to_string(), StatusCode::BAD_REQUEST)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 4, "score": 103990.28106334957 }, { "content": "// Main trait for handling DynamoDB records\n\npub trait DynamoDbModel where Self: Sized + Serialize {\n\n // #new is used internally to create structs from DynamoDB records\n\n fn new(attributes: DynamoDbAttributes) -> Result<Self, DbError>;\n\n\n\n fn find(db: &DynamoDbClient, key: PrimaryKey, id: SortKey) -> Result<Option<Self>, DbError> {\n\n db.get_item(GetItemInput {\n\n key: hashmap!{\n\n String::from(\"primary_key\") => attribute_value(key),\n\n String::from(\"id\") => attribute_value(id),\n\n },\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))\n\n .and_then(|output| {\n\n Ok(output.item.and_then(|attributes| {\n\n // The unwrapping below should be safe, as we're restoring the struct from an existing record\n\n Some(Self::new(attributes).unwrap())\n\n }))\n\n })\n", "file_path": "src/utils/db.rs", "rank": 5, "score": 98355.97984140893 }, { "content": "pub fn internal_server_error<T>(body: T) -> Response<Body>\n\nwhere T: ToString {\n\n http_response(body.to_string(), StatusCode::INTERNAL_SERVER_ERROR)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 6, "score": 84557.5438836195 }, { "content": "// Trait for models that can implement #list (Comment & Reaction)\n\npub trait DynamoDbListableModel where Self: DynamoDbModel {\n\n fn id_prefix() -> String;\n\n\n\n fn list(db: &DynamoDbClient, commentable_id: CommentableId) -> Result<Vec<Self>, DbError> {\n\n Self::query(&db, QueryInput {\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n key_condition_expression: String::from(\"primary_key = :v1 and begins_with(id, :v2)\").into(),\n\n expression_attribute_values: hashmap!{\n\n String::from(\":v1\") => attribute_value(commentable_id),\n\n String::from(\":v2\") => attribute_value(Self::id_prefix()),\n\n }.into(),\n\n ..Default::default()\n\n })?\n\n .drain(..)\n\n .map(|attributes| Self::new(attributes))\n\n .collect::<Result<Vec<Self>, DbError>>()\n\n }\n\n}\n\n\n\nimpl From<String> for IntoAttributeValue {\n", "file_path": "src/utils/db.rs", "rank": 7, "score": 81793.2159635615 }, { "content": "pub fn not_found<T>(body: T) -> Response<Body>\n\nwhere T: ToString {\n\n http_response(body.to_string(), StatusCode::NOT_FOUND)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 8, "score": 76982.08438523156 }, { "content": "pub fn unauthorized<T>(body: T) -> Response<Body>\n\nwhere T: ToString {\n\n http_response(body.to_string(), StatusCode::UNAUTHORIZED)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 9, "score": 76982.08438523156 }, { "content": "pub fn forbidden<T>(body: T) -> Response<Body>\n\nwhere T: ToString {\n\n http_response(body.to_string(), StatusCode::FORBIDDEN)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 10, "score": 76982.08438523156 }, { "content": "pub fn ok<T>(body: T) -> Response<Body>\n\nwhere T: ToString {\n\n http_response(body.to_string(), StatusCode::OK)\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 11, "score": 76982.08438523156 }, { "content": "pub fn hash(text: &str) -> String {\n\n let mut hasher = Sha3::sha3_256();\n\n hasher.input_str(text);\n\n hasher.result_str()\n\n}\n\n\n", "file_path": "src/utils/db.rs", "rank": 12, "score": 76185.5598176519 }, { "content": "pub fn attribute_value<T: Into<IntoAttributeValue>>(value: T) -> AttributeValue {\n\n value // = Into<IntoAttributeValue>\n\n .into() // -> IntoAttributeValue\n\n .into() // -> AttributeValue\n\n}\n", "file_path": "src/utils/db.rs", "rank": 13, "score": 63349.312833849996 }, { "content": "pub trait CurrentUser {\n\n fn db(&self) -> &DynamoDbClient;\n\n fn auth_token(&self) -> Option<String>;\n\n fn set_current_user(&mut self, user: Option<User>);\n\n\n\n fn user_id(&self) -> Result<String, HttpError> {\n\n Ok(format!(\"USER_{}\",\n\n self.auth_token()\n\n .ok_or(unauthorized(\"Invalid access token.\"))?\n\n .split(TOKEN_DELIMITER)\n\n .next()\n\n .ok_or(unauthorized(\"Invalid access token.\"))?\n\n ))\n\n }\n\n\n\n fn fetch_current_user(&mut self) -> Result<&mut Self, HttpError> {\n\n match User::find(self.db(), self.user_id()?.clone(), self.user_id()?) {\n\n Ok(Some(user)) => {\n\n // The unwrap is safe, because self.user_id() already checks for token presence\n\n if user.auth_token == self.auth_token().unwrap() {\n", "file_path": "src/utils/current_user.rs", "rank": 14, "score": 58711.45321963345 }, { "content": "// A helper trait that allows for easier access to AttributeValue contents with validations\n\npub trait DynamoDbRecord {\n\n fn string(&mut self, field_name: &str) -> Result<String, DbError>;\n\n fn timestamp(&mut self, field_name: &str) -> Result<DateTime<Utc>, DbError>;\n\n fn optional_string(&mut self, field_name: &str) -> Option<String>;\n\n}\n\n\n\nimpl DynamoDbRecord for DynamoDbAttributes {\n\n fn string(&mut self, field_name: &str) -> Result<String, DbError> {\n\n self.remove(field_name)\n\n .and_then(|value| value.s)\n\n .ok_or(DbError::RecordInvalid(format!(\"Missing field '{}'.\", field_name)))\n\n }\n\n\n\n fn timestamp(&mut self, field_name: &str) -> Result<DateTime<Utc>, DbError> {\n\n self.remove(field_name) // -> Option<AttributeValue>\n\n .and_then(|value| value.s) // -> Option<String>\n\n .ok_or(DbError::RecordInvalid(format!(\"Missing field '{}'.\", field_name))) // -> Result<String, DbError>\n\n .and_then(|string|\n\n DateTime::parse_from_rfc3339(&string).map_err(|_|\n\n DbError::Error(format!(\"Error parsing timestamps in field '{}'\", field_name))\n", "file_path": "src/utils/db.rs", "rank": 15, "score": 57717.91729745577 }, { "content": "pub trait CurrentComment {\n\n fn db(&self) -> &DynamoDbClient;\n\n fn comment_id(&self) -> CommentId;\n\n fn commentable_id(&self) -> CommentableId;\n\n fn set_current_comment(&mut self, comment: Comment);\n\n\n\n fn fetch_current_comment(&mut self) -> Result<&mut Self, HttpError> {\n\n match Comment::find(self.db(), self.commentable_id(), self.comment_id()) {\n\n Ok(Some(comment)) => self.set_current_comment(comment),\n\n Ok(None) => return Err(not_found(\"Comment not found\")),\n\n Err(err) => return Err(internal_server_error(err)),\n\n }\n\n Ok(self)\n\n }\n\n}\n", "file_path": "src/utils/current_comment.rs", "rank": 16, "score": 57405.27704324927 }, { "content": "fn http_response(body: String, status: StatusCode) -> Response<Body> {\n\n let mut builder = Response::builder();\n\n // Setup CORS\n\n builder.header(\"Access-Control-Allow-Origin\", \"*\");\n\n builder.header(\"Access-Control-Allow-Headers\", \"Content-Type,Authorization\");\n\n\n\n if body.is_empty() {\n\n builder.status(status).body(Body::Empty).unwrap()\n\n } else {\n\n builder.status(status).body(Body::Text(body)).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 17, "score": 49253.533048945734 }, { "content": " hash,\n\n};\n\n\n\npub type ReactionId = String;\n\npub type ReactionType = String;\n\n\n\npub static REACTION_ID_PREFIX: &str = \"REACTION_\";\n\n\n\n#[derive(Serialize, Debug)]\n\npub struct Reaction {\n\n pub primary_key: CommentableId,\n\n pub id: ReactionId,\n\n pub user_id: UserId,\n\n pub comment_id: CommentId,\n\n pub reaction_type: ReactionType,\n\n pub created_at: DateTime<Utc>,\n\n}\n\n\n\nimpl DynamoDbModel for Reaction {\n\n fn new(mut attributes: DynamoDbAttributes) -> Result<Self, DbError> {\n", "file_path": "src/models/reaction.rs", "rank": 18, "score": 23923.527080657037 }, { "content": " Ok(Self {\n\n primary_key: attributes.string(\"primary_key\")?,\n\n id: attributes.string(\"id\")?,\n\n user_id: attributes.string(\"user_id\")?,\n\n comment_id: attributes.string(\"comment_id\")?,\n\n reaction_type: attributes.string(\"type\")?,\n\n created_at: attributes.timestamp(\"created_at\")?,\n\n })\n\n }\n\n}\n\n\n\nimpl DynamoDbListableModel for Reaction {\n\n fn id_prefix() -> String {\n\n REACTION_ID_PREFIX.to_string()\n\n }\n\n}\n\n\n\nimpl Reaction {\n\n pub fn remove_all_for_comment(db: &DynamoDbClient, commentable_id: CommentableId, comment_id: CommentId) -> Result<(), DbError> {\n\n let reactions =\n", "file_path": "src/models/reaction.rs", "rank": 19, "score": 23921.40803569916 }, { "content": "use chrono::{DateTime, Utc};\n\nuse maplit::hashmap;\n\nuse rusoto_dynamodb::{\n\n DynamoDbClient,\n\n QueryInput,\n\n};\n\nuse serde::Serialize;\n\n\n\nuse crate::models::comment::CommentId;\n\nuse crate::models::user::UserId;\n\nuse crate::utils::db::{\n\n COMMENTABLE_RS_TABLE_NAME,\n\n REACTIONS_INDEX_NAME,\n\n CommentableId,\n\n DynamoDbModel,\n\n DynamoDbListableModel,\n\n DynamoDbAttributes,\n\n DynamoDbRecord,\n\n DbError,\n\n attribute_value,\n", "file_path": "src/models/reaction.rs", "rank": 20, "score": 23918.99898660812 }, { "content": " Self::query(&db, QueryInput {\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n index_name: Some(REACTIONS_INDEX_NAME.to_string()),\n\n key_condition_expression: String::from(\"primary_key = :v1 and comment_id = :v2\").into(),\n\n expression_attribute_values: hashmap!{\n\n String::from(\":v1\") => attribute_value(commentable_id),\n\n String::from(\":v2\") => attribute_value(comment_id),\n\n }.into(),\n\n ..Default::default()\n\n })?.drain(..)\n\n .map(|mut key: DynamoDbAttributes| (key.string(\"primary_key\").unwrap(), key.string(\"id\").unwrap()))\n\n .collect::<Vec<(CommentableId, ReactionId)>>();\n\n\n\n if reactions.len() > 0 {\n\n Reaction::batch_delete(&db, reactions)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/models/reaction.rs", "rank": 21, "score": 23916.270583125086 }, { "content": "};\n\n\n\npub type UserId = String;\n\npub type AuthToken = String;\n\n\n\npub static TOKEN_DELIMITER: &str = \"-=#=-\";\n\n\n\n#[derive(Serialize, Debug)]\n\npub struct User {\n\n pub primary_key: UserId,\n\n pub id: UserId,\n\n pub email: String,\n\n pub name: String,\n\n pub picture_url: String,\n\n pub auth_token: String,\n\n pub created_at: DateTime<Utc>,\n\n}\n\n\n\nimpl DynamoDbModel for User {\n\n fn new(mut attributes: DynamoDbAttributes) -> Result<User, DbError> {\n", "file_path": "src/models/user.rs", "rank": 22, "score": 23463.460793687635 }, { "content": " request_items: hashmap! {\n\n String::from(COMMENTABLE_RS_TABLE_NAME) => KeysAndAttributes {\n\n keys: slice.iter().map(|id| hashmap! {\n\n String::from(\"primary_key\") => attribute_value(id.to_string()),\n\n String::from(\"id\") => attribute_value(id.to_string()),\n\n }).collect(),\n\n ..Default::default()\n\n }\n\n },\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))?\n\n .responses.unwrap()\n\n .remove(COMMENTABLE_RS_TABLE_NAME).unwrap()\n\n .drain(..)\n\n .map(|user_attributes| User::new(user_attributes))\n\n .collect::<Result<Vec<Self>, DbError>>()?\n\n );\n\n }\n\n Ok(users)\n\n }\n\n}\n\n\n\nimpl fmt::Display for User {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{} ({})\", self.name, self.id)\n\n }\n\n}\n", "file_path": "src/models/user.rs", "rank": 23, "score": 23462.2971555021 }, { "content": " Ok(User {\n\n primary_key: attributes.string(\"primary_key\")?,\n\n id: attributes.string(\"id\")?,\n\n email: attributes.string(\"email\")?,\n\n name: attributes.string(\"name\")?,\n\n auth_token: attributes.string(\"auth_token\")?,\n\n picture_url: attributes.string(\"picture_url\")?,\n\n created_at: attributes.timestamp(\"created_at\")?\n\n })\n\n }\n\n}\n\n\n\nimpl User {\n\n pub fn batch_get(db: &DynamoDbClient, mut ids: HashSet<&UserId>) -> Result<Vec<Self>, DbError> {\n\n let mut users: Vec<Self> = vec![];\n\n /* 100 is the maximum amount of records allowed\n\n per single BatchGetItem operation in DynamoDB */\n\n for slice in ids.drain().collect::<Vec<_>>().chunks(100) {\n\n users.append(\n\n &mut db.batch_get_item(BatchGetItemInput {\n", "file_path": "src/models/user.rs", "rank": 24, "score": 23461.280917953114 }, { "content": "use std::fmt;\n\nuse std::collections::HashSet;\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse maplit::hashmap;\n\nuse rusoto_dynamodb::{\n\n DynamoDb,\n\n DynamoDbClient,\n\n BatchGetItemInput,\n\n KeysAndAttributes,\n\n};\n\nuse serde::Serialize;\n\n\n\nuse crate::utils::db::{\n\n attribute_value,\n\n DynamoDbModel,\n\n DynamoDbAttributes,\n\n DynamoDbRecord,\n\n DbError,\n\n COMMENTABLE_RS_TABLE_NAME,\n", "file_path": "src/models/user.rs", "rank": 25, "score": 23458.39513429048 }, { "content": "use serde::Serialize;\n\n\n\npub type CommentableId = String;\n\npub type PrimaryKey = String;\n\npub type SortKey = String;\n\n\n\npub static COMMENTABLE_RS_TABLE_NAME: &str = \"CommentableRsTable\";\n\npub static REPLIES_INDEX_NAME: &str = \"replies-index\";\n\npub static REACTIONS_INDEX_NAME: &str = \"reactions-index\";\n\n\n\n#[derive(Debug)]\n\npub enum DbError {\n\n Error(String),\n\n RecordInvalid(String),\n\n}\n\n\n\nimpl fmt::Display for DbError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"\\\"{}\\\"\", match self {\n\n DbError::Error(msg) => format!(\"DbError::Error -> {}\", msg),\n\n DbError::RecordInvalid(msg) => format!(\"DbError::RecordInvalid -> {}\", msg),\n\n })\n\n }\n\n}\n\n\n\n\n\npub type DynamoDbAttributes = HashMap<String, AttributeValue>;\n\n\n\n// A helper trait that allows for easier access to AttributeValue contents with validations\n", "file_path": "src/utils/db.rs", "rank": 26, "score": 22580.81328239058 }, { "content": " String::from(\"id\") => attribute_value(id),\n\n },\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn batch_delete(db: &DynamoDbClient, mut keys: Vec<(PrimaryKey, SortKey)>) -> Result<(), DbError> {\n\n let mut request_items: HashMap<String, Vec<WriteRequest>> = hashmap!{\n\n String::from(COMMENTABLE_RS_TABLE_NAME) =>\n\n keys\n\n .drain(..)\n\n .map(|(primary_key, sort_key)| WriteRequest {\n\n delete_request: Some(DeleteRequest {\n\n key: hashmap!{\n\n String::from(\"primary_key\") => attribute_value(primary_key),\n\n String::from(\"id\") => attribute_value(sort_key),\n", "file_path": "src/utils/db.rs", "rank": 27, "score": 22578.515492255945 }, { "content": "use std::fmt;\n\nuse std::collections::HashMap;\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse crypto::digest::Digest;\n\nuse crypto::sha3::Sha3;\n\nuse maplit::hashmap;\n\nuse rusoto_dynamodb::{\n\n DynamoDb,\n\n DynamoDbClient,\n\n GetItemInput,\n\n QueryInput,\n\n PutItemInput,\n\n UpdateItemInput,\n\n DeleteItemInput,\n\n BatchWriteItemInput,\n\n WriteRequest,\n\n DeleteRequest,\n\n AttributeValue\n\n};\n", "file_path": "src/utils/db.rs", "rank": 28, "score": 22576.759051604327 }, { "content": " key: hashmap!{\n\n String::from(\"primary_key\") => attribute_value(key),\n\n String::from(\"id\") => attribute_value(id),\n\n },\n\n update_expression: Some(expression),\n\n expression_attribute_values: Some(values),\n\n return_values: Some(String::from(\"ALL_NEW\")),\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))\n\n .and_then(|output| Ok(\n\n // The unwrapping below should be safe, as we're restoring the struct from an existing record\n\n Self::new(output.attributes.unwrap()).unwrap()\n\n ))\n\n }\n\n\n\n fn delete(db: &DynamoDbClient, key: PrimaryKey, id: SortKey) -> Result<(), DbError> {\n\n db.delete_item(DeleteItemInput {\n\n key: hashmap!{\n\n String::from(\"primary_key\") => attribute_value(key),\n", "file_path": "src/utils/db.rs", "rank": 29, "score": 22575.68688960563 }, { "content": " }\n\n }),\n\n ..Default::default()\n\n }).collect(),\n\n };\n\n // Each request can delete max 25 items, we add 2 to account for any unexpected DB or Network errors\n\n let max_iterations = request_items.get(COMMENTABLE_RS_TABLE_NAME).unwrap().len() as f32 / 25.0 + 2.0;\n\n let mut current_iteration = 0.0;\n\n\n\n 'pagination: loop {\n\n current_iteration += 1.0;\n\n\n\n db.batch_write_item(BatchWriteItemInput {\n\n request_items: request_items.clone(),\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))\n\n .and_then(|output| {\n\n request_items = output.unprocessed_items.unwrap();\n\n Ok(())\n", "file_path": "src/utils/db.rs", "rank": 30, "score": 22575.264670240187 }, { "content": " }\n\n }\n\n\n\n return Ok(results);\n\n }\n\n\n\n fn create(db: &DynamoDbClient, attributes: IntoDynamoDbAttributes) -> Result<Self, DbError> {\n\n let attributes: DynamoDbAttributes = attributes.into();\n\n db.put_item(PutItemInput {\n\n item: attributes.clone(),\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))\n\n .and_then(|_| Self::new(attributes))\n\n }\n\n\n\n fn update(db: &DynamoDbClient, key: PrimaryKey, id: SortKey, expression: String, values: HashMap<String, AttributeValue>) -> Result<Self, DbError> {\n\n db.update_item(UpdateItemInput {\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n", "file_path": "src/utils/db.rs", "rank": 31, "score": 22575.19255216183 }, { "content": " )\n\n ) // -> Result<DateTime<FixedOffset>, DbError>\n\n .and_then(|datetime| Ok(datetime.with_timezone(&Utc))) // -> Result<DateTime<Utc>, DbError>\n\n }\n\n\n\n fn optional_string(&mut self, field_name: &str) -> Option<String> {\n\n self.remove(field_name)\n\n .and_then(|value| value.s)\n\n }\n\n}\n\n\n\n// This struct allows us to easily create DynamoDbAttributes\n\n// by implementing Into<IntoDynamoDbAttributes> for custom structs.\n\npub struct IntoDynamoDbAttributes {\n\n pub attributes: HashMap<String, IntoAttributeValue>\n\n}\n\n\n\n// This trait implementation allows us to create DynamoDbAttributes\n\n// from any HashMap<String, IntoAttributeValue>\n\nimpl From<IntoDynamoDbAttributes> for DynamoDbAttributes {\n", "file_path": "src/utils/db.rs", "rank": 32, "score": 22573.38081419492 }, { "content": " fn from(mut attributes: IntoDynamoDbAttributes) -> Self {\n\n attributes.attributes.drain().map(|(key, value)| {\n\n (key, value.into())\n\n }).collect()\n\n }\n\n}\n\n\n\n// A wrapper struct that allows for implementation of Into<AttributeValue>\n\n// for arbitraty types like Strings, u32s etc\n\npub struct IntoAttributeValue {\n\n attribute_value: AttributeValue,\n\n}\n\n\n", "file_path": "src/utils/db.rs", "rank": 33, "score": 22572.93521147535 }, { "content": " fn from(string: String) -> Self {\n\n let attribute_value = AttributeValue {\n\n s: Some(string),\n\n ..Default::default()\n\n };\n\n IntoAttributeValue { attribute_value }\n\n }\n\n}\n\n\n\nimpl From<bool> for IntoAttributeValue {\n\n fn from(value: bool) -> Self {\n\n let attribute_value = AttributeValue {\n\n bool: Some(value),\n\n ..Default::default()\n\n };\n\n IntoAttributeValue { attribute_value }\n\n }\n\n}\n\n\n\nimpl From<IntoAttributeValue> for AttributeValue {\n\n fn from(wrapper: IntoAttributeValue) -> Self {\n\n wrapper.attribute_value\n\n }\n\n}\n\n\n", "file_path": "src/utils/db.rs", "rank": 34, "score": 22572.47591649945 }, { "content": " }\n\n\n\n fn query(db: &DynamoDbClient, query_input: QueryInput) -> Result<Vec<DynamoDbAttributes>, DbError> {\n\n let mut results: Vec<DynamoDbAttributes> = vec![];\n\n let mut last_evaluated_key = None;\n\n\n\n 'pagination: loop {\n\n db.query(QueryInput {\n\n exclusive_start_key: last_evaluated_key.clone(),\n\n ..query_input.clone()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))\n\n .and_then(|query_output| {\n\n results.append(query_output.items.unwrap_or(vec![]).as_mut());\n\n last_evaluated_key = query_output.last_evaluated_key;\n\n Ok(())\n\n })?;\n\n\n\n if last_evaluated_key == None {\n\n break 'pagination;\n", "file_path": "src/utils/db.rs", "rank": 35, "score": 22569.989417806053 }, { "content": " })?;\n\n\n\n if request_items.is_empty() || current_iteration > max_iterations {\n\n break 'pagination;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn json(&self) -> String {\n\n serde_json::to_string(&self).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/utils/db.rs", "rank": 36, "score": 22569.934426415966 }, { "content": "use rusoto_dynamodb::DynamoDbClient;\n\n\n\nuse crate::{\n\n utils::{\n\n db::DynamoDbModel,\n\n http::{unauthorized, internal_server_error, HttpError},\n\n },\n\n models::user::{TOKEN_DELIMITER, User},\n\n};\n\n\n", "file_path": "src/utils/current_user.rs", "rank": 37, "score": 22448.097192690097 }, { "content": " self.set_current_user(Some(user));\n\n } else {\n\n return Err(unauthorized(\"Invalid access token.\"));\n\n }\n\n },\n\n Ok(None) => return Err(unauthorized(\"Invalid access token.\")),\n\n Err(err) => return Err(internal_server_error(err)),\n\n };\n\n Ok(self)\n\n }\n\n\n\n fn try_fetch_current_user(&mut self) -> &mut Self {\n\n let _ = self.fetch_current_user();\n\n self\n\n }\n\n}\n", "file_path": "src/utils/current_user.rs", "rank": 38, "score": 22445.37666951814 }, { "content": " attribute_value,\n\n hash,\n\n};\n\n\n\npub type CommentId = String;\n\n\n\npub static COMMENT_ID_PREFIX: &str = \"COMMENT_\";\n\n\n\n#[derive(Serialize, Debug)]\n\npub struct Comment {\n\n pub primary_key: CommentableId,\n\n pub id: CommentId,\n\n pub user_id: Option<UserId>,\n\n pub replies_to: Option<CommentId>,\n\n pub body: String,\n\n pub is_deleted: Option<bool>,\n\n pub created_at: DateTime<Utc>,\n\n}\n\n\n\nimpl DynamoDbModel for Comment {\n", "file_path": "src/models/comment.rs", "rank": 39, "score": 22308.51163952817 }, { "content": " fn new(mut attributes: DynamoDbAttributes) -> Result<Self, DbError> {\n\n Ok(Self {\n\n primary_key: attributes.string(\"primary_key\")?,\n\n id: attributes.string(\"id\")?,\n\n user_id: attributes.optional_string(\"user_id\"),\n\n replies_to: attributes.optional_string(\"replies_to\"),\n\n body: attributes.string(\"body\")?,\n\n is_deleted: None,\n\n created_at: attributes.timestamp(\"created_at\")?,\n\n })\n\n }\n\n}\n\n\n\nimpl DynamoDbListableModel for Comment {\n\n fn id_prefix() -> String {\n\n COMMENT_ID_PREFIX.to_string()\n\n }\n\n}\n\n\n\nimpl Comment {\n", "file_path": "src/models/comment.rs", "rank": 40, "score": 22307.072122096666 }, { "content": " String::from(\"id\") => attribute_value(self.id.clone()),\n\n },\n\n update_expression: Some(String::from(\"SET is_deleted = :is_deleted, body = :body REMOVE user_id\")),\n\n expression_attribute_values: Some(hashmap!{\n\n String::from(\":is_deleted\") => attribute_value(true),\n\n String::from(\":body\") => attribute_value(\"This comment has been deleted.\".to_string()),\n\n }),\n\n ..Default::default()\n\n }).sync()\n\n .map_err(|err| DbError::Error(err.to_string()))\n\n .and_then(|_| {\n\n self.body = \"This comment has been deleted.\".to_string();\n\n self.is_deleted = Some(true);\n\n self.user_id = None;\n\n Ok(())\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/models/comment.rs", "rank": 41, "score": 22303.793621276076 }, { "content": "use chrono::{DateTime, Utc};\n\nuse maplit::hashmap;\n\nuse rusoto_dynamodb::{\n\n DynamoDb,\n\n DynamoDbClient,\n\n QueryInput,\n\n UpdateItemInput,\n\n};\n\nuse serde::Serialize;\n\n\n\nuse crate::models::user::UserId;\n\nuse crate::utils::db::{\n\n COMMENTABLE_RS_TABLE_NAME,\n\n REPLIES_INDEX_NAME,\n\n CommentableId,\n\n DynamoDbModel,\n\n DynamoDbListableModel,\n\n DynamoDbAttributes,\n\n DynamoDbRecord,\n\n DbError,\n", "file_path": "src/models/comment.rs", "rank": 42, "score": 22302.906775024883 }, { "content": " pub fn has_replies(&self, db: &DynamoDbClient) -> Result<bool, DbError> {\n\n let replies = Self::query(&db, QueryInput {\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n index_name: Some(REPLIES_INDEX_NAME.to_string()),\n\n key_condition_expression: String::from(\"primary_key = :v1 and replies_to = :v2\").into(),\n\n expression_attribute_values: hashmap!{\n\n String::from(\":v1\") => attribute_value(self.primary_key.clone()),\n\n String::from(\":v2\") => attribute_value(self.id.clone()),\n\n }.into(),\n\n ..Default::default()\n\n })?;\n\n\n\n if replies.len() > 0 { Ok(true) } else { Ok(false) }\n\n }\n\n\n\n pub fn erase(&mut self, db: &DynamoDbClient) -> Result<(), DbError> {\n\n db.update_item(UpdateItemInput {\n\n table_name: COMMENTABLE_RS_TABLE_NAME.to_string(),\n\n key: hashmap!{\n\n String::from(\"primary_key\") => attribute_value(self.primary_key.clone()),\n", "file_path": "src/models/comment.rs", "rank": 43, "score": 22297.955948209 }, { "content": "use rusoto_dynamodb::DynamoDbClient;\n\n\n\nuse crate::{\n\n utils::{\n\n db::{CommentableId, DynamoDbModel},\n\n http::{not_found, internal_server_error, HttpError},\n\n },\n\n models::comment::{Comment, CommentId},\n\n};\n\n\n", "file_path": "src/utils/current_comment.rs", "rank": 44, "score": 21340.594395302993 }, { "content": "## Commentable.rs - A privacy oriented, serverless comment hosting solution written in Rust.\n\n\n\n#### This repository contains the backend part of Commentable.rs, if you're looking for client-side libraries, see [https://github.com/netguru/commentable-js](https://github.com/netguru/commentable-js)\n\n\n\n### Why?\n\nThe biggest selling point of Commentable.rs is the fact that it can be installed in less than a minute, is completely self-hosted and scales with your traffic thanks to the Serverless architecture.\n\nIt supports regular comments, nested replies (with unlimited depth) and custom reactions.\n\nIt uses Single Sign On with Google Auth and if your website already supports it, the authentication process is 100% automatic (that means users don't have to log in twice to post a comment like in other similar solutions)\n\n\n\n### How?\n\nCommentable.rs is written in the [Rust programming language](https://www.rust-lang.org/) and running on [AWS Lambda](https://aws.amazon.com/lambda/) to achieve high scalability.\n\nIt uses [AWS DynamoDB](https://aws.amazon.com/dynamodb/) for storage, which is a NoSQL database with a very flexible billing system. This choice of architecture allows users to install the whole app with just one press of a button. It also makes it free-tier eligible, which is very handy for small websites and during development (we were never billed more than $0, even during heavy testing).\n\n\n", "file_path": "README.md", "rank": 45, "score": 15383.06574401457 }, { "content": "### Installation\n\nThe easiest way to install Commentable.rs is by using AWS Serverless Application Repository. Log in to you AWS account then visit [this link](https://serverlessrepo.aws.amazon.com/applications/arn:aws:serverlessrepo:us-east-2:403528576742:applications~commentable-rs) and press `Deploy`.\n\nFollow these screenshots to get to the Url of your new application:\n\n\n\n![](docs/step1.png)\n\n\n\n![](docs/step2.png)\n\n\n\n![](docs/step3.png)\n\n\n\nAlternatively, if you want, you can easily install Commentable.rs using the command line. You will need Docker for compilation as well as AWS CLI and AWS SAM CLI for deployment. The whole process is abstracted into a single make command, which makes it very straightforward.\n\n\n\n[How to install AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html)\n\n\n\n[How to configure AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html#cli-quick-configuration)\n\n\n\n[How to install AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)\n\n\n\n[How to install Docker](https://docs.docker.com/install/)\n\n\n\n```shell\n\n$> git clone git@github.com:netguru/commentable-rs.git\n\n$> cd commentable-rs\n\n# <your_bucket_name> is the name of the AWS S3 bucket that will contain your binaries - it has to be globally unique so you need to provide your own name\n\n$> BUCKET_NAME=<your_bucket_name> make install\n\n```\n\n\n\n### Configuration\n\nNo additional configuration is required on this part, but you will need to pass the URL of your application to the client library, so keep it handy. Follow the steps in [https://github.com/netguru/commentable-js](https://github.com/netguru/commentable-js) to implement and connect the UI on your website.\n", "file_path": "README.md", "rank": 46, "score": 15379.192790010817 }, { "content": " the conditions stated in this License.\n\n\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n\n with Licensor regarding such Contributions.\n\n\n\n 6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n", "file_path": "LICENSE.md", "rank": 47, "score": 15378.946367955772 }, { "content": " APPENDIX: How to apply the Apache License to your work.\n\n\n\n To apply the Apache License to your work, attach the following\n\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n\n replaced with your own identifying information. (Don't include\n\n the brackets!) The text should be enclosed in the appropriate\n\n comment syntax for the file format. We also recommend that a\n\n file or class name and description of purpose be included on the\n\n same \"printed page\" as the copyright notice for easier\n\n identification within third-party archives.\n\n\n\n Copyright 2020 Wojciech Olejnik\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n", "file_path": "LICENSE.md", "rank": 48, "score": 15378.717038705498 }, { "content": " risks associated with Your exercise of permissions under this License.\n\n\n\n 8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n\n has been advised of the possibility of such damages.\n\n\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\n END OF TERMS AND CONDITIONS\n\n\n", "file_path": "LICENSE.md", "rank": 49, "score": 15377.88237048482 }, { "content": " stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n\n the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n", "file_path": "LICENSE.md", "rank": 50, "score": 15376.652611122288 }, { "content": " Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n 1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE.md", "rank": 51, "score": 15376.609379502306 }, { "content": " subsequently incorporated within the Work.\n\n\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n\n Work and such Derivative Works in Source or Object form.\n\n\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n 4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n", "file_path": "LICENSE.md", "rank": 52, "score": 15376.049833440291 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n", "file_path": "LICENSE.md", "rank": 53, "score": 15375.195945704585 }, { "content": "pub mod user;\n\npub mod comment;\n\npub mod reaction;\n", "file_path": "src/models/mod.rs", "rank": 54, "score": 10.135521129546255 }, { "content": "pub mod db;\n\npub mod http;\n\npub mod current_user;\n\npub mod current_comment;\n", "file_path": "src/utils/mod.rs", "rank": 55, "score": 9.33677067894085 }, { "content": "use lambda_http::{Response, Body, http::StatusCode};\n\n\n\npub type HttpError = Response<Body>;\n\n\n", "file_path": "src/utils/http.rs", "rank": 56, "score": 7.507671523168723 }, { "content": "pub mod utils;\n\npub mod models;\n", "file_path": "src/lib.rs", "rank": 57, "score": 2.929546224619373 } ]
Rust
src/u32x8_.rs
nathanvoglsam/wide
00de1af88cced28b9fb64fbe393261f203573c96
use super::*; pick! { if #[cfg(target_feature="avx2")] { #[derive(Default, Clone, Copy, PartialEq, Eq)] #[repr(C, align(32))] pub struct u32x8 { avx2: m256i } } else if #[cfg(target_feature="ssse3")] { #[derive(Default, Clone, Copy, PartialEq, Eq)] #[repr(C, align(32))] pub struct u32x8 { sse0: m128i, sse1: m128i } } else { #[derive(Default, Clone, Copy, PartialEq, Eq)] #[repr(C, align(32))] pub struct u32x8 { arr: [u32;8] } } } unsafe impl Zeroable for u32x8 {} unsafe impl Pod for u32x8 {} impl Add for u32x8 { type Output = Self; #[inline] #[must_use] fn add(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: add_i32_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: add_i32_m128i(self.sse0, rhs.sse0) , sse1: add_i32_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].wrapping_add(rhs.arr[0]), self.arr[1].wrapping_add(rhs.arr[1]), self.arr[2].wrapping_add(rhs.arr[2]), self.arr[3].wrapping_add(rhs.arr[3]), self.arr[4].wrapping_add(rhs.arr[4]), self.arr[5].wrapping_add(rhs.arr[5]), self.arr[6].wrapping_add(rhs.arr[6]), self.arr[7].wrapping_add(rhs.arr[7]), ]} } } } } impl Sub for u32x8 { type Output = Self; #[inline] #[must_use] fn sub(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: sub_i32_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: sub_i32_m128i(self.sse0, rhs.sse0) , sse1: sub_i32_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].wrapping_sub(rhs.arr[0]), self.arr[1].wrapping_sub(rhs.arr[1]), self.arr[2].wrapping_sub(rhs.arr[2]), self.arr[3].wrapping_sub(rhs.arr[3]), self.arr[4].wrapping_sub(rhs.arr[4]), self.arr[5].wrapping_sub(rhs.arr[5]), self.arr[6].wrapping_sub(rhs.arr[6]), self.arr[7].wrapping_sub(rhs.arr[7]), ]} } } } } impl Mul for u32x8 { type Output = Self; #[inline] #[must_use] fn mul(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: mul_i32_keep_low_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: mul_i32_keep_low_m128i(self.sse0, rhs.sse0) , sse1: mul_i32_keep_low_m128i(self.sse1, rhs.sse1)} } else { let arr1: [u32; 8] = cast(self); let arr2: [u32; 8] = cast(rhs); cast([ arr1[0].wrapping_mul(arr2[0]), arr1[1].wrapping_mul(arr2[1]), arr1[2].wrapping_mul(arr2[2]), arr1[3].wrapping_mul(arr2[3]), arr1[4].wrapping_mul(arr2[4]), arr1[5].wrapping_mul(arr2[5]), arr1[6].wrapping_mul(arr2[6]), arr1[7].wrapping_mul(arr2[7]), ]) } } } } impl BitAnd for u32x8 { type Output = Self; #[inline] #[must_use] fn bitand(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: bitand_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: bitand_m128i(self.sse0, rhs.sse0) , sse1: bitand_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].bitand(rhs.arr[0]), self.arr[1].bitand(rhs.arr[1]), self.arr[2].bitand(rhs.arr[2]), self.arr[3].bitand(rhs.arr[3]), self.arr[4].bitand(rhs.arr[4]), self.arr[5].bitand(rhs.arr[5]), self.arr[6].bitand(rhs.arr[6]), self.arr[7].bitand(rhs.arr[7]), ]} } } } } impl BitOr for u32x8 { type Output = Self; #[inline] #[must_use] fn bitor(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: bitor_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: bitor_m128i(self.sse0, rhs.sse0) , sse1: bitor_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].bitor(rhs.arr[0]), self.arr[1].bitor(rhs.arr[1]), self.arr[2].bitor(rhs.arr[2]), self.arr[3].bitor(rhs.arr[3]), self.arr[4].bitor(rhs.arr[4]), self.arr[5].bitor(rhs.arr[5]), self.arr[6].bitor(rhs.arr[6]), self.arr[7].bitor(rhs.arr[7]), ]} } } } } impl BitXor for u32x8 { type Output = Self; #[inline] #[must_use] fn bitxor(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: bitxor_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: bitxor_m128i(self.sse0, rhs.sse0) , sse1: bitxor_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].bitxor(rhs.arr[0]), self.arr[1].bitxor(rhs.arr[1]), self.arr[2].bitxor(rhs.arr[2]), self.arr[3].bitxor(rhs.arr[3]), self.arr[4].bitxor(rhs.arr[4]), self.arr[5].bitxor(rhs.arr[5]), self.arr[6].bitxor(rhs.arr[6]), self.arr[7].bitxor(rhs.arr[7]), ]} } } } } macro_rules! impl_shl_t_for_u32x8 { ($($shift_type:ty),+ $(,)?) => { $(impl Shl<$shift_type> for u32x8 { type Output = Self; #[inline] #[must_use] fn shl(self, rhs: $shift_type) -> Self::Output { let u = rhs as u64; pick! { if #[cfg(target_feature="avx2")] { let shift = cast([u, 0]); Self { avx2: shl_all_u32_m256i(self.avx2, shift) } } else if #[cfg(target_feature="ssse3")] { let shift = cast([u, 0]); Self { sse0: shl_all_u32_m128i(self.sse0, shift) , sse1: shl_all_u32_m128i(self.sse1, shift)} } else { Self { arr: [ self.arr[0] << u, self.arr[1] << u, self.arr[2] << u, self.arr[3] << u, self.arr[4] << u, self.arr[5] << u, self.arr[6] << u, self.arr[7] << u, ]} } } } })+ }; } impl_shl_t_for_u32x8!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128); macro_rules! impl_shr_t_for_u32x8 { ($($shift_type:ty),+ $(,)?) => { $(impl Shr<$shift_type> for u32x8 { type Output = Self; #[inline] #[must_use] fn shr(self, rhs: $shift_type) -> Self::Output { let u = rhs as u64; pick! { if #[cfg(target_feature="avx2")] { let shift = cast([u, 0]); Self { avx2: shr_all_u32_m256i(self.avx2, shift) } } else if #[cfg(target_feature="ssse3")] { let shift = cast([u, 0]); Self { sse0: shr_all_u32_m128i(self.sse0, shift) , sse1: shr_all_u32_m128i(self.sse1, shift)} } else { Self { arr: [ self.arr[0] >> u, self.arr[1] >> u, self.arr[2] >> u, self.arr[3] >> u, self.arr[4] >> u, self.arr[5] >> u, self.arr[6] >> u, self.arr[7] >> u, ]} } } } })+ }; } impl_shr_t_for_u32x8!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128); impl u32x8 { #[inline] #[must_use] pub fn cmp_eq(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: cmp_eq_mask_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: cmp_eq_mask_i32_m128i(self.sse0,rhs.sse0), sse1: cmp_eq_mask_i32_m128i(self.sse1,rhs.sse1), } } else { Self { arr: [ if self.arr[0] == rhs.arr[0] { u32::MAX } else { 0 }, if self.arr[1] == rhs.arr[1] { u32::MAX } else { 0 }, if self.arr[2] == rhs.arr[2] { u32::MAX } else { 0 }, if self.arr[3] == rhs.arr[3] { u32::MAX } else { 0 }, if self.arr[4] == rhs.arr[4] { u32::MAX } else { 0 }, if self.arr[5] == rhs.arr[5] { u32::MAX } else { 0 }, if self.arr[6] == rhs.arr[6] { u32::MAX } else { 0 }, if self.arr[7] == rhs.arr[7] { u32::MAX } else { 0 }, ]} } } } #[inline] #[must_use] pub fn cmp_gt(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: cmp_gt_mask_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: cmp_gt_mask_i32_m128i(self.sse0,rhs.sse0), sse1: cmp_gt_mask_i32_m128i(self.sse1,rhs.sse1), } } else { Self { arr: [ if self.arr[0] > rhs.arr[0] { u32::MAX } else { 0 }, if self.arr[1] > rhs.arr[1] { u32::MAX } else { 0 }, if self.arr[2] > rhs.arr[2] { u32::MAX } else { 0 }, if self.arr[3] > rhs.arr[3] { u32::MAX } else { 0 }, if self.arr[4] > rhs.arr[4] { u32::MAX } else { 0 }, if self.arr[5] > rhs.arr[5] { u32::MAX } else { 0 }, if self.arr[6] > rhs.arr[6] { u32::MAX } else { 0 }, if self.arr[7] > rhs.arr[7] { u32::MAX } else { 0 }, ]} } } } #[inline] #[must_use] pub fn cmp_lt(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: cmp_eq_mask_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: cmp_lt_mask_i32_m128i(self.sse0,rhs.sse0), sse1: cmp_lt_mask_i32_m128i(self.sse1,rhs.sse1), } } else { Self { arr: [ if self.arr[0] < rhs.arr[0] { u32::MAX } else { 0 }, if self.arr[1] < rhs.arr[1] { u32::MAX } else { 0 }, if self.arr[2] < rhs.arr[2] { u32::MAX } else { 0 }, if self.arr[3] < rhs.arr[3] { u32::MAX } else { 0 }, if self.arr[4] < rhs.arr[4] { u32::MAX } else { 0 }, if self.arr[5] < rhs.arr[5] { u32::MAX } else { 0 }, if self.arr[6] < rhs.arr[6] { u32::MAX } else { 0 }, if self.arr[7] < rhs.arr[7] { u32::MAX } else { 0 }, ]} } } } #[inline] #[must_use] pub fn blend(self, t: Self, f: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: blend_varying_i8_m256i(f.avx2, t.avx2, self.avx2) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: blend_varying_i8_m128i(f.sse0, t.sse0, self.sse0) , sse1: blend_varying_i8_m128i(f.sse1, t.sse1, self.sse1)} } else { generic_bit_blend(self, t, f) } } } #[inline] #[must_use] pub fn max(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: max_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: max_i32_m128i(self.sse0, rhs.sse0), sse1: max_i32_m128i(self.sse1, rhs.sse1) } } else { self.cmp_lt(rhs).blend(rhs, self) } } } #[inline] #[must_use] pub fn min(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: max_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: max_i32_m128i(self.sse0, rhs.sse0), sse1: max_i32_m128i(self.sse1, rhs.sse1) } } else { self.cmp_lt(rhs).blend(self, rhs) } } } } impl Not for u32x8 { type Output = Self; fn not(self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: self.avx2.not() } } else if #[cfg(target_feature="ssse3")] { Self { sse0: self.sse0.not() , sse1: self.sse1.not() } } else { Self { arr: [ !self.arr[0], !self.arr[1], !self.arr[2], !self.arr[3], !self.arr[4], !self.arr[5], !self.arr[6], !self.arr[7], ]} } } } }
use super::*; pick! { if #[cfg(target_feature="avx2")] { #[derive(Default, Clone, Copy, PartialEq, Eq)] #[repr(C, align(32))] pub struct u32x8 { avx2: m256i } } else if #[cfg(target_feature="ssse3")] { #[derive(Default, Clone, Copy, PartialEq, Eq)] #[repr(C, align(32))] pub struct u32x8 { sse0: m128i, sse1: m128i } } else { #[derive(Default, Clone, Copy, PartialEq, Eq)] #[repr(C, align(32))] pub struct u32x8 { arr: [u32;8] } } } unsafe impl Zeroable for u32x8 {} unsafe impl Pod for u32x8 {} impl Add for u32x8 { type Output = Self; #[inline] #[must_use] fn add(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: add_i32_m256i(
} impl Sub for u32x8 { type Output = Self; #[inline] #[must_use] fn sub(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: sub_i32_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: sub_i32_m128i(self.sse0, rhs.sse0) , sse1: sub_i32_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].wrapping_sub(rhs.arr[0]), self.arr[1].wrapping_sub(rhs.arr[1]), self.arr[2].wrapping_sub(rhs.arr[2]), self.arr[3].wrapping_sub(rhs.arr[3]), self.arr[4].wrapping_sub(rhs.arr[4]), self.arr[5].wrapping_sub(rhs.arr[5]), self.arr[6].wrapping_sub(rhs.arr[6]), self.arr[7].wrapping_sub(rhs.arr[7]), ]} } } } } impl Mul for u32x8 { type Output = Self; #[inline] #[must_use] fn mul(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: mul_i32_keep_low_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: mul_i32_keep_low_m128i(self.sse0, rhs.sse0) , sse1: mul_i32_keep_low_m128i(self.sse1, rhs.sse1)} } else { let arr1: [u32; 8] = cast(self); let arr2: [u32; 8] = cast(rhs); cast([ arr1[0].wrapping_mul(arr2[0]), arr1[1].wrapping_mul(arr2[1]), arr1[2].wrapping_mul(arr2[2]), arr1[3].wrapping_mul(arr2[3]), arr1[4].wrapping_mul(arr2[4]), arr1[5].wrapping_mul(arr2[5]), arr1[6].wrapping_mul(arr2[6]), arr1[7].wrapping_mul(arr2[7]), ]) } } } } impl BitAnd for u32x8 { type Output = Self; #[inline] #[must_use] fn bitand(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: bitand_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: bitand_m128i(self.sse0, rhs.sse0) , sse1: bitand_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].bitand(rhs.arr[0]), self.arr[1].bitand(rhs.arr[1]), self.arr[2].bitand(rhs.arr[2]), self.arr[3].bitand(rhs.arr[3]), self.arr[4].bitand(rhs.arr[4]), self.arr[5].bitand(rhs.arr[5]), self.arr[6].bitand(rhs.arr[6]), self.arr[7].bitand(rhs.arr[7]), ]} } } } } impl BitOr for u32x8 { type Output = Self; #[inline] #[must_use] fn bitor(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: bitor_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: bitor_m128i(self.sse0, rhs.sse0) , sse1: bitor_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].bitor(rhs.arr[0]), self.arr[1].bitor(rhs.arr[1]), self.arr[2].bitor(rhs.arr[2]), self.arr[3].bitor(rhs.arr[3]), self.arr[4].bitor(rhs.arr[4]), self.arr[5].bitor(rhs.arr[5]), self.arr[6].bitor(rhs.arr[6]), self.arr[7].bitor(rhs.arr[7]), ]} } } } } impl BitXor for u32x8 { type Output = Self; #[inline] #[must_use] fn bitxor(self, rhs: Self) -> Self::Output { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: bitxor_m256i(self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: bitxor_m128i(self.sse0, rhs.sse0) , sse1: bitxor_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].bitxor(rhs.arr[0]), self.arr[1].bitxor(rhs.arr[1]), self.arr[2].bitxor(rhs.arr[2]), self.arr[3].bitxor(rhs.arr[3]), self.arr[4].bitxor(rhs.arr[4]), self.arr[5].bitxor(rhs.arr[5]), self.arr[6].bitxor(rhs.arr[6]), self.arr[7].bitxor(rhs.arr[7]), ]} } } } } macro_rules! impl_shl_t_for_u32x8 { ($($shift_type:ty),+ $(,)?) => { $(impl Shl<$shift_type> for u32x8 { type Output = Self; #[inline] #[must_use] fn shl(self, rhs: $shift_type) -> Self::Output { let u = rhs as u64; pick! { if #[cfg(target_feature="avx2")] { let shift = cast([u, 0]); Self { avx2: shl_all_u32_m256i(self.avx2, shift) } } else if #[cfg(target_feature="ssse3")] { let shift = cast([u, 0]); Self { sse0: shl_all_u32_m128i(self.sse0, shift) , sse1: shl_all_u32_m128i(self.sse1, shift)} } else { Self { arr: [ self.arr[0] << u, self.arr[1] << u, self.arr[2] << u, self.arr[3] << u, self.arr[4] << u, self.arr[5] << u, self.arr[6] << u, self.arr[7] << u, ]} } } } })+ }; } impl_shl_t_for_u32x8!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128); macro_rules! impl_shr_t_for_u32x8 { ($($shift_type:ty),+ $(,)?) => { $(impl Shr<$shift_type> for u32x8 { type Output = Self; #[inline] #[must_use] fn shr(self, rhs: $shift_type) -> Self::Output { let u = rhs as u64; pick! { if #[cfg(target_feature="avx2")] { let shift = cast([u, 0]); Self { avx2: shr_all_u32_m256i(self.avx2, shift) } } else if #[cfg(target_feature="ssse3")] { let shift = cast([u, 0]); Self { sse0: shr_all_u32_m128i(self.sse0, shift) , sse1: shr_all_u32_m128i(self.sse1, shift)} } else { Self { arr: [ self.arr[0] >> u, self.arr[1] >> u, self.arr[2] >> u, self.arr[3] >> u, self.arr[4] >> u, self.arr[5] >> u, self.arr[6] >> u, self.arr[7] >> u, ]} } } } })+ }; } impl_shr_t_for_u32x8!(i8, u8, i16, u16, i32, u32, i64, u64, i128, u128); impl u32x8 { #[inline] #[must_use] pub fn cmp_eq(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: cmp_eq_mask_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: cmp_eq_mask_i32_m128i(self.sse0,rhs.sse0), sse1: cmp_eq_mask_i32_m128i(self.sse1,rhs.sse1), } } else { Self { arr: [ if self.arr[0] == rhs.arr[0] { u32::MAX } else { 0 }, if self.arr[1] == rhs.arr[1] { u32::MAX } else { 0 }, if self.arr[2] == rhs.arr[2] { u32::MAX } else { 0 }, if self.arr[3] == rhs.arr[3] { u32::MAX } else { 0 }, if self.arr[4] == rhs.arr[4] { u32::MAX } else { 0 }, if self.arr[5] == rhs.arr[5] { u32::MAX } else { 0 }, if self.arr[6] == rhs.arr[6] { u32::MAX } else { 0 }, if self.arr[7] == rhs.arr[7] { u32::MAX } else { 0 }, ]} } } } #[inline] #[must_use] pub fn cmp_gt(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: cmp_gt_mask_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: cmp_gt_mask_i32_m128i(self.sse0,rhs.sse0), sse1: cmp_gt_mask_i32_m128i(self.sse1,rhs.sse1), } } else { Self { arr: [ if self.arr[0] > rhs.arr[0] { u32::MAX } else { 0 }, if self.arr[1] > rhs.arr[1] { u32::MAX } else { 0 }, if self.arr[2] > rhs.arr[2] { u32::MAX } else { 0 }, if self.arr[3] > rhs.arr[3] { u32::MAX } else { 0 }, if self.arr[4] > rhs.arr[4] { u32::MAX } else { 0 }, if self.arr[5] > rhs.arr[5] { u32::MAX } else { 0 }, if self.arr[6] > rhs.arr[6] { u32::MAX } else { 0 }, if self.arr[7] > rhs.arr[7] { u32::MAX } else { 0 }, ]} } } } #[inline] #[must_use] pub fn cmp_lt(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: cmp_eq_mask_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: cmp_lt_mask_i32_m128i(self.sse0,rhs.sse0), sse1: cmp_lt_mask_i32_m128i(self.sse1,rhs.sse1), } } else { Self { arr: [ if self.arr[0] < rhs.arr[0] { u32::MAX } else { 0 }, if self.arr[1] < rhs.arr[1] { u32::MAX } else { 0 }, if self.arr[2] < rhs.arr[2] { u32::MAX } else { 0 }, if self.arr[3] < rhs.arr[3] { u32::MAX } else { 0 }, if self.arr[4] < rhs.arr[4] { u32::MAX } else { 0 }, if self.arr[5] < rhs.arr[5] { u32::MAX } else { 0 }, if self.arr[6] < rhs.arr[6] { u32::MAX } else { 0 }, if self.arr[7] < rhs.arr[7] { u32::MAX } else { 0 }, ]} } } } #[inline] #[must_use] pub fn blend(self, t: Self, f: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: blend_varying_i8_m256i(f.avx2, t.avx2, self.avx2) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: blend_varying_i8_m128i(f.sse0, t.sse0, self.sse0) , sse1: blend_varying_i8_m128i(f.sse1, t.sse1, self.sse1)} } else { generic_bit_blend(self, t, f) } } } #[inline] #[must_use] pub fn max(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: max_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: max_i32_m128i(self.sse0, rhs.sse0), sse1: max_i32_m128i(self.sse1, rhs.sse1) } } else { self.cmp_lt(rhs).blend(rhs, self) } } } #[inline] #[must_use] pub fn min(self, rhs: Self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: max_i32_m256i(self.avx2, rhs.avx2 ) } } else if #[cfg(target_feature="sse4.1")] { Self { sse0: max_i32_m128i(self.sse0, rhs.sse0), sse1: max_i32_m128i(self.sse1, rhs.sse1) } } else { self.cmp_lt(rhs).blend(self, rhs) } } } } impl Not for u32x8 { type Output = Self; fn not(self) -> Self { pick! { if #[cfg(target_feature="avx2")] { Self { avx2: self.avx2.not() } } else if #[cfg(target_feature="ssse3")] { Self { sse0: self.sse0.not() , sse1: self.sse1.not() } } else { Self { arr: [ !self.arr[0], !self.arr[1], !self.arr[2], !self.arr[3], !self.arr[4], !self.arr[5], !self.arr[6], !self.arr[7], ]} } } } }
self.avx2, rhs.avx2) } } else if #[cfg(target_feature="ssse3")] { Self { sse0: add_i32_m128i(self.sse0, rhs.sse0) , sse1: add_i32_m128i(self.sse1, rhs.sse1)} } else { Self { arr: [ self.arr[0].wrapping_add(rhs.arr[0]), self.arr[1].wrapping_add(rhs.arr[1]), self.arr[2].wrapping_add(rhs.arr[2]), self.arr[3].wrapping_add(rhs.arr[3]), self.arr[4].wrapping_add(rhs.arr[4]), self.arr[5].wrapping_add(rhs.arr[5]), self.arr[6].wrapping_add(rhs.arr[6]), self.arr[7].wrapping_add(rhs.arr[7]), ]} } } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn impl_add_for_u32x8() {\n\n let a = u32x8::from([1, 2, u32::MAX - 1, u32::MAX - 1, 31, 72, 13, 53]);\n\n let b = u32x8::from([17, 18, 1, 2, 12, 12, 634, 15]);\n\n let expected = u32x8::from([18, 20, u32::MAX, u32::MIN, 43, 84, 647, 68]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 0, "score": 160237.7126758735 }, { "content": "#[test]\n\nfn impl_u32x8_cmp_eq() {\n\n let a = i32x8::from([1, 2, 3, 4, 2, 1, 8, 2]);\n\n let b = i32x8::from([2_i32; 8]);\n\n let expected = i32x8::from([0, -1, 0, 0, -1, 0, 0, -1]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 1, "score": 153981.4390926125 }, { "content": "#[test]\n\nfn impl_u32x8_blend() {\n\n let use_t: i32 = -1;\n\n let t = i32x8::from([1, 2, 3, 4, 5, 6, 7, 8]);\n\n let f = i32x8::from([17, 18, 19, 20, 25, 30, 50, 90]);\n\n let mask = i32x8::from([use_t, 0, use_t, 0, 0, 0, 0, use_t]);\n\n let expected = i32x8::from([1, 18, 3, 20, 25, 30, 50, 8]);\n\n let actual = mask.blend(t, f);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 2, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_u32x8_max() {\n\n let a = i32x8::from([1, 2, i32::MIN + 1, i32::MIN, 6, -8, 12, 9]);\n\n let b = i32x8::from([17, -18, 1, 1, 19, -5, -1, -9]);\n\n let expected = i32x8::from([17, 2, 1, 1, 19, -5, 12, 9]);\n\n let actual = a.max(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 3, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_bitor_for_u32x8() {\n\n let a = i32x8::from([0, 0, 1, 1, 1, 0, 0, 1]);\n\n let b = i32x8::from([0, 1, 0, 1, 0, 1, 1, 1]);\n\n let expected = i32x8::from([0, 1, 1, 1, 1, 1, 1, 1]);\n\n let actual = a | b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 4, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_bitxor_for_u32x8() {\n\n let a = i32x8::from([0, 0, 1, 1, 1, 0, 0, 1]);\n\n let b = i32x8::from([0, 1, 0, 1, 0, 1, 1, 1]);\n\n let expected = i32x8::from([0, 1, 1, 0, 1, 1, 1, 0]);\n\n let actual = a ^ b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 5, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_shl_for_u32x8() {\n\n let a = i32x8::from([1, 2, i32::MAX - 1, i32::MAX - 1, 128, 255, 590, 5667]);\n\n let b = 2;\n\n let expected = i32x8::from([\n\n 1 << 2,\n\n 2 << 2,\n\n (i32::MAX - 1) << 2,\n\n (i32::MAX - 1) << 2,\n\n 128 << 2,\n\n 255 << 2,\n\n 590 << 2,\n\n 5667 << 2,\n\n ]);\n\n let actual = a << b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 6, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_bitand_for_u32x8() {\n\n let a = u32x8::from([0, 0, 1, 1, 1, 0, 0, 1]);\n\n let b = u32x8::from([0, 1, 0, 1, 0, 1, 1, 1]);\n\n let expected = u32x8::from([0, 0, 0, 1, 0, 0, 0, 1]);\n\n let actual = a & b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 7, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_shr_for_u32x8() {\n\n let a = i32x8::from([1, 2, i32::MAX - 1, i32::MAX - 1, 128, 255, 590, 5667]);\n\n let b = 2;\n\n let expected = i32x8::from([\n\n 1 >> 2,\n\n 2 >> 2,\n\n (i32::MAX - 1) >> 2,\n\n (i32::MAX - 1) >> 2,\n\n 128 >> 2,\n\n 255 >> 2,\n\n 590 >> 2,\n\n 5667 >> 2,\n\n ]);\n\n let actual = a >> b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 8, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_u32x8_min() {\n\n let a = i32x8::from([1, 2, i32::MIN + 1, i32::MIN, 6, -8, 12, 9]);\n\n let b = i32x8::from([17, -18, 1, 1, 19, -5, -1, -9]);\n\n let expected = i32x8::from([1, -18, i32::MIN + 1, i32::MIN, 6, -8, -1, -9]);\n\n let actual = a.min(b);\n\n assert_eq!(expected, actual);\n\n}\n", "file_path": "tests/t_u32x8.rs", "rank": 9, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_sub_for_u32x8() {\n\n let a = u32x8::from([9001, 2, 1, 0, 12, 1, 9, 10]);\n\n let b = u32x8::from([17, 18, 1, 1, 15, 1, 2, 5]);\n\n let expected =\n\n u32x8::from([8984, 4294967280, 0, u32::MAX, 4294967293, 0, 7, 5]);\n\n let actual = a - b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 10, "score": 127777.5588387546 }, { "content": "#[test]\n\nfn impl_u32x8_cmp_gt() {\n\n let a = i32x8::from([1, 2, 9, 4, 1, 2, 8, 10]);\n\n let b = i32x8::from([5_i32; 8]);\n\n let expected = i32x8::from([0, 0, -1, 0, 0, 0, -1, -1]);\n\n let actual = a.cmp_gt(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 11, "score": 123113.01556972475 }, { "content": "#[test]\n\nfn impl_add_for_i32x4() {\n\n let a = i32x4::from([1, 2, i32::MAX - 1, i32::MAX - 1]);\n\n let b = i32x4::from([17, 18, 1, 2]);\n\n let expected = i32x4::from([18, 20, i32::MAX, i32::MIN]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i32x4.rs", "rank": 12, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_u8x16() {\n\n let a =\n\n u8x16::from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 250, 250]);\n\n let b =\n\n u8x16::from([17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 5, 6]);\n\n let expected = u8x16::from([\n\n 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 255, 0,\n\n ]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u8x16.rs", "rank": 13, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_u64x4() {\n\n let a = u64x4::from([u64::MAX - 1, u64::MAX - 1, 6, 9]);\n\n let b = u64x4::from([1, 2, 3, 4]);\n\n let expected = u64x4::from([u64::MAX, u64::MIN, 9, 13]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u64x4.rs", "rank": 14, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_f64x2() {\n\n let a = f64x2::from([1.0, 2.0]);\n\n let b = f64x2::from([5.0, 6.0]);\n\n let expected = f64x2::from([6.0, 8.0]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x2.rs", "rank": 15, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_i8x16() {\n\n let a =\n\n i8x16::from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 126, 127]);\n\n let b =\n\n i8x16::from([17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 1, 1]);\n\n let expected = i8x16::from([\n\n 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 127, -128,\n\n ]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i8x16.rs", "rank": 16, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_i8x32() {\n\n let a = i8x32::from([\n\n 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 126, 127, 1, 2, 3, 4, 5, 6,\n\n 7, 8, 9, 10, 11, 12, 13, 14, 126, 127,\n\n ]);\n\n let b = i8x32::from([\n\n 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 1, 1, 17, 18, 19,\n\n 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 1, 1,\n\n ]);\n\n let expected = i8x32::from([\n\n 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 127, -128, 18, 20,\n\n 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 127, -128,\n\n ]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i8x32.rs", "rank": 17, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_f64x4() {\n\n let a = f64x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let b = f64x4::from([5.0, 6.0, 7.0, 8.0]);\n\n let expected = f64x4::from([6.0, 8.0, 10.0, 12.0]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x4.rs", "rank": 18, "score": 116341.24541295189 }, { "content": "#[test]\n\nfn impl_add_for_i64x2() {\n\n let a = i64x2::from([i64::MAX - 1, i64::MAX - 1]);\n\n let b = i64x2::from([1, 2]);\n\n let expected = i64x2::from([i64::MAX, i64::MIN]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i64x2.rs", "rank": 19, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_u16x8() {\n\n let a = u16x8::from([1, 2, 3, 4, 5, 6, u16::MAX - 1, u16::MAX - 1]);\n\n let b = u16x8::from([17, 18, 19, 20, 21, 22, 1, 2]);\n\n let expected = u16x8::from([18, 20, 22, 24, 26, 28, u16::MAX, 0]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u16x8.rs", "rank": 20, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_i64x4() {\n\n let a = i64x4::from([i64::MAX - 1, i64::MAX - 1, 6, 9]);\n\n let b = i64x4::from([1, 2, 3, 4]);\n\n let expected = i64x4::from([i64::MAX, i64::MIN, 9, 13]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i64x4.rs", "rank": 21, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_f32x8() {\n\n let a = f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]);\n\n let b = f32x8::from([5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0]);\n\n let expected = f32x8::from([6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 22, "score": 116341.24541295189 }, { "content": "#[test]\n\nfn impl_add_for_u64x2() {\n\n let a = u64x2::from([u64::MAX - 1, u64::MAX - 1]);\n\n let b = u64x2::from([1, 2]);\n\n let expected = u64x2::from([u64::MAX, u64::MIN]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u64x2.rs", "rank": 23, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_u32x4() {\n\n let a = u32x4::from([1, 2, u32::MAX - 1, u32::MAX - 1]);\n\n let b = u32x4::from([17, 18, 1, 2]);\n\n let expected = u32x4::from([18, 20, u32::MAX, u32::MIN]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x4.rs", "rank": 24, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_i32x8() {\n\n let a = i32x8::from([1, 2, i32::MAX - 1, i32::MAX - 1, 15, 20, 5000, 2990]);\n\n let b = i32x8::from([17, 18, 1, 2, 20, 5, 900, 900]);\n\n let expected = i32x8::from([18, 20, i32::MAX, i32::MIN, 35, 25, 5900, 3890]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i32x8.rs", "rank": 25, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_i16x8() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MAX - 1, i16::MAX - 1]);\n\n let b = i16x8::from([17, 18, 19, 20, 21, 22, 1, 2]);\n\n let expected = i16x8::from([18, 20, 22, 24, 26, 28, i16::MAX, i16::MIN]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 26, "score": 116341.24541295192 }, { "content": "#[test]\n\nfn impl_add_for_f32x4() {\n\n let a = f32x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let b = f32x4::from([5.0, 6.0, 7.0, 8.0]);\n\n let expected = f32x4::from([6.0, 8.0, 10.0, 12.0]);\n\n let actual = a + b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 27, "score": 116341.24541295189 }, { "content": "#[test]\n\nfn impl_f32x8_cmp_eq() {\n\n let a = f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 2.0, 1.0]);\n\n let b = f32x8::from([2.0; 8]);\n\n let expected: [i32; 8] = [0, -1, 0, 0, 0, 0, -1, 0];\n\n let actual: [i32; 8] = cast(a.cmp_eq(b));\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 28, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i64x4_cmp_eq() {\n\n let a = i64x4::from([1_i64, 4, i64::MAX, 5]);\n\n let b = i64x4::from([3_i64, 4, i64::MAX, 1]);\n\n let expected = i64x4::from([0, -1, -1, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n", "file_path": "tests/t_i64x4.rs", "rank": 29, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i32x8_cmp_eq() {\n\n let a = i32x8::from([1, 2, 3, 4, 2, 1, 8, 2]);\n\n let b = i32x8::from([2_i32; 8]);\n\n let expected = i32x8::from([0, -1, 0, 0, -1, 0, 0, -1]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i32x8.rs", "rank": 30, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i8x16_cmp_eq() {\n\n let a = i8x16::from([1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]);\n\n let b = i8x16::from([2_i8; 16]);\n\n let expected =\n\n i8x16::from([0, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i8x16.rs", "rank": 31, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_f64x2_cmp_eq() {\n\n let a = f64x2::from([1.0, 2.0]);\n\n let b = f64x2::from([2.0, 2.0]);\n\n let expected: [i64; 2] = [0, -1];\n\n let actual: [i64; 2] = cast(a.cmp_eq(b));\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x2.rs", "rank": 32, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i64x2_cmp_eq() {\n\n let a = i64x2::from([1_i64, 4]);\n\n let b = i64x2::from([3_i64, 4]);\n\n let expected = i64x2::from([0, -1]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i64x2.rs", "rank": 33, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_u32x4_cmp_eq() {\n\n let a = u32x4::from([1, 2, 3, 4]);\n\n let b = u32x4::from([2_u32; 4]);\n\n let expected = u32x4::from([0, u32::MAX, 0, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u32x4.rs", "rank": 34, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i8x32_cmp_eq() {\n\n let a = i8x32::from([\n\n 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1,\n\n 2, 3, 4, 1, 2, 3, 4,\n\n ]);\n\n let b = i8x32::from([2_i8; 32]);\n\n let expected = i8x32::from([\n\n 0, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0,\n\n 0, 0, -1, 0, 0, 0, -1, 0, 0,\n\n ]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i8x32.rs", "rank": 35, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_u64x4_cmp_eq() {\n\n let a = u64x4::from([1_u64, 4, u64::MAX, 5]);\n\n let b = u64x4::from([3_u64, 4, u64::MAX, 1]);\n\n let expected = u64x4::from([0, u64::MAX, u64::MAX, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n", "file_path": "tests/t_u64x4.rs", "rank": 36, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_u8x16_cmp_eq() {\n\n let a = u8x16::from([1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]);\n\n let b = u8x16::from([2_u8; 16]);\n\n let expected = u8x16::from([\n\n 0,\n\n u8::MAX,\n\n 0,\n\n 0,\n\n 0,\n\n u8::MAX,\n\n 0,\n\n 0,\n\n 0,\n\n u8::MAX,\n\n 0,\n\n 0,\n\n 0,\n\n u8::MAX,\n\n 0,\n\n 0,\n\n ]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u8x16.rs", "rank": 37, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i32x4_cmp_eq() {\n\n let a = i32x4::from([1, 2, 3, 4]);\n\n let b = i32x4::from([2_i32; 4]);\n\n let expected = i32x4::from([0, -1, 0, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i32x4.rs", "rank": 38, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_u16x8_cmp_eq() {\n\n let a = u16x8::from([1, 2, 3, 4, 1, 2, 3, 4]);\n\n let b = u16x8::from([2_u16; 8]);\n\n let expected = u16x8::from([0, u16::MAX, 0, 0, 0, u16::MAX, 0, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u16x8.rs", "rank": 39, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_i16x8_cmp_eq() {\n\n let a = i16x8::from([1, 2, 3, 4, 1, 2, 3, 4]);\n\n let b = i16x8::from([2_i16; 8]);\n\n let expected = i16x8::from([0, -1, 0, 0, 0, -1, 0, 0]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 40, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_u64x2_cmp_eq() {\n\n let a = u64x2::from([1_u64, 4]);\n\n let b = u64x2::from([3_u64, 4]);\n\n let expected = u64x2::from([0, u64::MAX]);\n\n let actual = a.cmp_eq(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_u64x2.rs", "rank": 41, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_f64x4_cmp_eq() {\n\n let a = f64x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let b = f64x4::from([2.0, 2.0, 2.0, 2.0]);\n\n let expected: [i64; 4] = [0, -1, 0, 0];\n\n let actual: [i64; 4] = cast(a.cmp_eq(b));\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x4.rs", "rank": 42, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_f32x4_cmp_eq() {\n\n let a = f32x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let b = f32x4::from([2.0, 2.0, 2.0, 2.0]);\n\n let expected: [i32; 4] = [0, -1, 0, 0];\n\n let actual: [i32; 4] = cast(a.cmp_eq(b));\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 43, "score": 111631.85631727101 }, { "content": "#[test]\n\nfn impl_f64x4_mul_add() {\n\n let a = f64x4::from([2.0, 3.0, 4.0, 5.0]);\n\n let b = f64x4::from([4.0, 5.0, 6.0, 7.0]);\n\n let c = f64x4::from([1.0, 1.0, 1.0, 1.0]);\n\n let expected = f64x4::from([9.0, 16.0, 25.0, 36.0]);\n\n let actual = a.mul_add(b, c);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x4.rs", "rank": 44, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f32x4_mul_add() {\n\n let a = f32x4::from([2.0, 3.0, 4.0, 5.0]);\n\n let b = f32x4::from([4.0, 5.0, 6.0, 7.0]);\n\n let c = f32x4::from([1.0, 1.0, 1.0, 1.0]);\n\n let expected = f32x4::from([9.0, 16.0, 25.0, 36.0]);\n\n let actual = a.mul_add(b, c);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 45, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f32x8_mul_add() {\n\n let a = f32x8::from([2.0, 3.0, 4.0, 5.0, 6.7, 9.2, 11.5, 12.2]);\n\n let b = f32x8::from([4.0, 5.0, 6.0, 7.0, 1.5, 8.9, 4.2, 5.6]);\n\n let c = f32x8::from([1.0; 8]);\n\n let expected: [f32; 8] =\n\n cast(f32x8::from([9.0, 16.0, 25.0, 36.0, 11.05, 82.88, 49.3, 69.32]));\n\n let actual: [f32; 8] = cast(a.mul_add(b, c));\n\n for (act, exp) in actual.iter().zip(expected.iter()) {\n\n assert!((exp - act).abs() < 0.000001);\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 46, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_add_const_for_f32x4() {\n\n let a = f32x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let expected = f32x4::from([6.0, 7.0, 8.0, 9.0]);\n\n let actual = a + 5.0;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 47, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_add_const_for_f32x8() {\n\n let a = f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]);\n\n let expected = f32x8::from([6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0]);\n\n let actual = a + 5.0;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 48, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f32x8_reduce_add() {\n\n let p = f32x8::from([0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.009]);\n\n assert!((p.reduce_add() - 0.037) < 0.000000001);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 49, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f64x2_mul_add() {\n\n let a = f64x2::from([2.0, 3.0]);\n\n let b = f64x2::from([4.0, 5.0]);\n\n let c = f64x2::from([1.0, 1.0]);\n\n let expected = f64x2::from([9.0, 16.0]);\n\n let actual = a.mul_add(b, c);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x2.rs", "rank": 50, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f64x2_reduce_add() {\n\n let p = f64x2::splat(0.001);\n\n assert_eq!(p.reduce_add(), 0.002);\n\n}\n\n\n", "file_path": "tests/t_f64x2.rs", "rank": 51, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f32x4_reduce_add() {\n\n let p = f32x4::splat(0.001);\n\n assert_eq!(p.reduce_add(), 0.004);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 52, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f64x4_reduce_add() {\n\n let p = f64x4::splat(0.001);\n\n assert_eq!(p.reduce_add(), 0.004);\n\n}\n\n\n", "file_path": "tests/t_f64x4.rs", "rank": 53, "score": 111560.4667359908 }, { "content": "#[test]\n\nfn impl_f64x4_mul_neg_add() {\n\n let a = f64x4::from([2.0, 3.0, 4.0, 5.0]);\n\n let b = f64x4::from([4.0, 5.0, 6.0, 7.0]);\n\n let c = f64x4::from([1.0, 1.0, 1.0, 1.0]);\n\n let expected = f64x4::from([-7.0, -14.0, -23.0, -34.0]);\n\n let actual = a.mul_neg_add(b, c);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x4.rs", "rank": 54, "score": 107245.704783919 }, { "content": "#[test]\n\nfn impl_f64x2_mul_neg_add() {\n\n let a = f64x2::from([2.0, 3.0]);\n\n let b = f64x2::from([4.0, 5.0]);\n\n let c = f64x2::from([1.0, 1.0]);\n\n let expected = f64x2::from([-7.0, -14.0]);\n\n let actual = a.mul_neg_add(b, c);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f64x2.rs", "rank": 55, "score": 107245.704783919 }, { "content": "#[test]\n\nfn impl_f32x4_mul_neg_add() {\n\n let a = f32x4::from([2.0, 3.0, 4.0, 5.0]);\n\n let b = f32x4::from([4.0, 5.0, 6.0, 7.0]);\n\n let c = f32x4::from([1.0, 1.0, 1.0, 1.0]);\n\n let expected = f32x4::from([-7.0, -14.0, -23.0, -34.0]);\n\n let actual = a.mul_neg_add(b, c);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 56, "score": 107245.704783919 }, { "content": "#[test]\n\nfn impl_f32x8_mul_neg_add() {\n\n let a = f32x8::from([2.0, 3.0, 4.0, 5.0, 6.7, 9.2, 11.5, 12.2]);\n\n let b = f32x8::from([4.0, 5.0, 6.0, 7.0, 1.5, 8.9, 4.2, -5.6]);\n\n let c = f32x8::from([1.0; 8]);\n\n let expected: [f32; 8] =\n\n cast(f32x8::from([-7.0, -14.0, -23.0, -34.0, -9.05, -80.88, -47.3, 69.32]));\n\n let actual: [f32; 8] = cast(a.mul_neg_add(b, c));\n\n for (act, exp) in actual.iter().zip(expected.iter()) {\n\n assert!((exp - act).abs() < 0.00001);\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 57, "score": 107245.704783919 }, { "content": "#[test]\n\nfn size_align() {\n\n assert_eq!(core::mem::size_of::<u32x8>(), 32);\n\n assert_eq!(core::mem::align_of::<u32x8>(), 32);\n\n}\n\n\n", "file_path": "tests/t_u32x8.rs", "rank": 58, "score": 89328.62209082837 }, { "content": "#[test]\n\nfn impl_bitand_for_i16x8() {\n\n let a = i16x8::from([0, 0, 1, 1, 0, 0, 1, 1]);\n\n let b = i16x8::from([0, 1, 0, 1, 0, 1, 0, 1]);\n\n let expected = i16x8::from([0, 0, 0, 1, 0, 0, 0, 1]);\n\n let actual = a & b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 59, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_sum() {\n\n let mut p = Vec::with_capacity(250_000);\n\n for _ in 0..250_000 {\n\n p.push(f32x4::splat(0.001));\n\n }\n\n let now = std::time::Instant::now();\n\n let sum: f32 = p.iter().map(|x| x.reduce_add()).sum();\n\n let duration = now.elapsed().as_micros();\n\n println!(\"Time take {} {}us\", sum, duration);\n\n\n\n let p = vec![0.001; 1_000_000];\n\n let now = std::time::Instant::now();\n\n let sum2: f32 = p.iter().sum();\n\n let duration = now.elapsed().as_micros();\n\n println!(\"Time take {} {}us\", sum2, duration);\n\n}\n", "file_path": "tests/t_f32x4.rs", "rank": 60, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitand_for_f32x4() {\n\n let a = f32x4::from([0.0, 0.0, 1.0, 1.0]);\n\n let b = f32x4::from([0.0, 1.0, 0.0, 1.0]);\n\n let expected = f32x4::from([0.0, 0.0, 0.0, 1.0]);\n\n let actual = a & b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 61, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitxor_for_i16x8() {\n\n let a = i16x8::from([0, 0, 1, 1, 0, 0, 1, 1]);\n\n let b = i16x8::from([0, 1, 0, 1, 0, 1, 0, 1]);\n\n let expected = i16x8::from([0, 1, 1, 0, 0, 1, 1, 0]);\n\n let actual = a ^ b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 62, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_debug_for_f32x8() {\n\n let expected = \"(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0)\";\n\n let actual =\n\n format!(\"{:?}\", f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]));\n\n assert_eq!(expected, actual);\n\n\n\n let expected = \"(1.000, 2.000, 3.000, 4.000, 5.000, 6.000, 7.000, 8.000)\";\n\n let actual =\n\n format!(\"{:.3?}\", f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]));\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 63, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_ln() {\n\n for f in [0.1, 0.5, 1.0, 2.718282, 10.0, 35.0, 1250.0].iter().copied() {\n\n let expected = f32x4::from((f as f32).ln());\n\n let actual = f32x4::from(f).ln();\n\n let diff_from_std: [f32; 4] = cast((actual - expected).abs());\n\n assert!(diff_from_std[0] < 0.000001);\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 64, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_i16x8_min() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MIN + 1, i16::MIN]);\n\n let b = i16x8::from([17, -18, 190, -20, 21, -22, 1, 1]);\n\n let expected = i16x8::from([1, -18, 3, -20, 5, -22, i16::MIN + 1, i16::MIN]);\n\n let actual = a.min(b);\n\n assert_eq!(expected, actual);\n\n}\n", "file_path": "tests/t_i16x8.rs", "rank": 65, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_to_degrees() {\n\n let pi = core::f32::consts::PI;\n\n let a = f32x4::from([0.0, pi / 2.0, pi, 2.0 * pi]);\n\n let expected = f32x4::from([0.0, 90.0, 180.0, 360.0]);\n\n let actual = a.to_degrees();\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 66, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_is_finite() {\n\n let a = f32x4::from([f32::NAN, 1.0, f32::INFINITY, f32::NEG_INFINITY]);\n\n let expected = [0, u32::MAX, 0, 0];\n\n let actual: [u32; 4] = cast(a.is_finite());\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 67, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_i16x8_max() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MIN + 1, i16::MIN]);\n\n let b = i16x8::from([17, -18, 190, -20, 21, -22, 1, 1]);\n\n let expected = i16x8::from([17, 2, 190, 4, 21, 6, 1, 1]);\n\n let actual = a.max(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 68, "score": 83881.09157583301 }, { "content": "#[cfg(target_feature = \"sse\")]\n\n#[test]\n\nfn impl_f32x4_acos() {\n\n let inc = 1.0 / 2501.0 / 4.0;\n\n for x in -2500..=2500 {\n\n let base = (x * 4) as f32 * inc;\n\n let origs = [base, base + inc, base + 2.0 * inc, base + 3.0 * inc];\n\n let actual_acoses = f32x4::from(origs).acos();\n\n for i in 0..4 {\n\n let orig = origs[i];\n\n let check = |name: &str, vals: f32x4, expected: f32| {\n\n let actual_arr: [f32; 4] = cast(vals);\n\n let actual = actual_arr[i];\n\n assert!(\n\n (actual - expected).abs() < 0.0000006,\n\n \"Wanted {name}({orig}) to be {expected} but got {actual}\",\n\n name = name,\n\n orig = orig,\n\n expected = expected,\n\n actual = actual\n\n );\n\n };\n\n check(\"acos\", actual_acoses, orig.acos());\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 69, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitxor_for_f32x4() {\n\n let a = f32x4::from([0.0, 0.0, 1.0, 1.0]);\n\n let b = f32x4::from([0.0, 1.0, 0.0, 1.0]);\n\n let expected = f32x4::from([0.0, 1.0, 1.0, 0.0]);\n\n let actual = a ^ b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 70, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_mul_for_f32x4() {\n\n let a = f32x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let b = f32x4::from([5.0, 7.0, 17.0, 1.0]);\n\n let expected = f32x4::from([5.0, 14.0, 51.0, 4.0]);\n\n let actual = a * b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 71, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_i16x8_abs() {\n\n let a = i16x8::from([1, -2, 3, -4, 5, -6, -7, i16::MIN]);\n\n let expected = i16x8::from([1, 2, 3, 4, 5, 6, 7, i16::MIN]);\n\n let actual = a.abs();\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 72, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_debug_for_f32x4() {\n\n let expected = \"(1.0, 2.0, 3.0, 4.0)\";\n\n let actual = format!(\"{:?}\", f32x4::from([1.0, 2.0, 3.0, 4.0]));\n\n assert_eq!(expected, actual);\n\n\n\n let expected = \"(1.000, 2.000, 3.000, 4.000)\";\n\n let actual = format!(\"{:.3?}\", f32x4::from([1.0, 2.0, 3.0, 4.0]));\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 73, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_pow_n() {\n\n let p = f32x4::from([29.0, 0.1, 0.5, 1.0]);\n\n let f = f32x4::from([1.2, 2.0, 3.0, 1.5]);\n\n let res = f.pow_f32x4(p);\n\n\n\n let p: [f32; 4] = cast(p);\n\n let f: [f32; 4] = cast(f);\n\n let res: [f32; 4] = cast(res);\n\n for i in 0..p.len() {\n\n let expected = f[i].powf(p[i]);\n\n if !(expected.is_nan() && res[i].is_nan()) {\n\n assert!((expected - res[i]).abs() < 0.0001);\n\n }\n\n }\n\n\n\n let p = f32x4::from([2.718282, -0.2, -1.5, 3.4]);\n\n let f = f32x4::from([9.2, 6.1, 2.5, -4.5]);\n\n let res = f.pow_f32x4(p);\n\n\n\n let p: [f32; 4] = cast(p);\n\n let f: [f32; 4] = cast(f);\n\n let res: [f32; 4] = cast(res);\n\n for i in 0..p.len() {\n\n let expected = f[i].powf(p[i]);\n\n if !(expected.is_nan() && res[i].is_nan()) {\n\n assert!((expected - res[i]).abs() < 0.0001);\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 74, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_shr_for_i16x8() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MIN + 1, i16::MIN]);\n\n let b = 2;\n\n let expected = i16x8::from([\n\n 1 >> 2,\n\n 2 >> 2,\n\n 3 >> 2,\n\n 4 >> 2,\n\n 5 >> 2,\n\n 6 >> 2,\n\n (i16::MIN + 1) >> 2,\n\n i16::MIN >> 2,\n\n ]);\n\n let actual = a >> b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 75, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_exp() {\n\n for f in [(-2.0), (-1.0), (0.0), (1.0), (1.5), (2.0), (10.0)].iter().copied()\n\n {\n\n let expected = f32x4::from((f as f32).exp());\n\n let actual = f32x4::from(f).exp();\n\n let diff_from_std: [f32; 4] = cast((actual - expected).abs());\n\n assert!(diff_from_std[0] < 0.000000000000001);\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 76, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitor_for_i16x8() {\n\n let a = i16x8::from([0, 0, 1, 1, 0, 0, 1, 1]);\n\n let b = i16x8::from([0, 1, 0, 1, 0, 1, 0, 1]);\n\n let expected = i16x8::from([0, 1, 1, 1, 0, 1, 1, 1]);\n\n let actual = a | b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 77, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_blend() {\n\n let use_t: f32 = f32::from_bits(u32::MAX);\n\n let t = f32x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let f = f32x4::from([5.0, 6.0, 7.0, 8.0]);\n\n let mask = f32x4::from([use_t, 0.0, use_t, 0.0]);\n\n let expected = f32x4::from([1.0, 6.0, 3.0, 8.0]);\n\n let actual = mask.blend(t, f);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 78, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_pow() {\n\n for f in [0.1, 0.5, 1.0, 2.718282, 3.0, 4.0, 2.5, -1.0].iter().copied() {\n\n let expected = f32x4::splat(2.0 as f32).powf(f);\n\n let actual = f32x4::from(2.0_f32.powf(f));\n\n let diff_from_std: [f32; 4] = cast((actual - expected).abs());\n\n assert!(diff_from_std[0] < 0.000001);\n\n }\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 79, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_i16x8_blend() {\n\n let use_t: i16 = -1;\n\n let t = i16x8::from([1, 2, 3, 4, 5, 6, 7, 8]);\n\n let f = i16x8::from([17, 18, 19, 20, 21, 22, 23, 24]);\n\n let mask = i16x8::from([use_t, 0, use_t, 0, use_t, 0, use_t, 0]);\n\n let expected = i16x8::from([1, 18, 3, 20, 5, 22, 7, 24]);\n\n let actual = mask.blend(t, f);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 80, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitor_for_f32x8() {\n\n let a = f32x8::from([0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0]);\n\n let b = f32x8::from([0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0]);\n\n let expected = f32x8::from([0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]);\n\n let actual = a | b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 81, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_mul_for_f32x8() {\n\n let a = f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]);\n\n let b = f32x8::from([5.0, 7.0, 17.0, 1.0, 5.0, 6.0, 7.0, 8.0]);\n\n let expected = f32x8::from([5.0, 14.0, 51.0, 4.0, 25.0, 36.0, 49.0, 64.0]);\n\n let actual = a * b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 82, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_sub_for_f32x8() {\n\n let a = f32x8::from([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]);\n\n let b = f32x8::from([5.0, 7.0, 17.0, 1.0, 1.0, 9.0, 2.0, 6.0]);\n\n let expected = f32x8::from([-4.0, -5.0, -14.0, 3.0, 4.0, -3.0, 5.0, 2.0]);\n\n let actual = a - b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 83, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_is_nan() {\n\n let a = f32x4::from([0.0, f32::NAN, f32::NAN, 0.0]);\n\n let expected = [0, u32::MAX, u32::MAX, 0];\n\n let actual: [u32; 4] = cast(a.is_nan());\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 84, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitand_for_f32x8() {\n\n let a = f32x8::from([0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0]);\n\n let b = f32x8::from([0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0]);\n\n let expected = f32x8::from([0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]);\n\n let actual = a & b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 85, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_sub_for_f32x4() {\n\n let a = f32x4::from([1.0, 2.0, 3.0, 4.0]);\n\n let b = f32x4::from([5.0, 7.0, 17.0, 1.0]);\n\n let expected = f32x4::from([-4.0, -5.0, -14.0, 3.0]);\n\n let actual = a - b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 86, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_abs() {\n\n let a = f32x4::from([-1.0, 2.0, -3.5, f32::NEG_INFINITY]);\n\n let expected = f32x4::from([1.0, 2.0, 3.5, f32::INFINITY]);\n\n let actual = a.abs();\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 87, "score": 83881.09157583301 }, { "content": "#[cfg(target_feature = \"sse\")]\n\n#[test]\n\nfn impl_f32x4_asin() {\n\n let inc = 1.0 / 2501.0 / 4.0;\n\n for x in -2500..=2500 {\n\n let base = (x * 4) as f32 * inc;\n\n let origs = [base, base + inc, base + 2.0 * inc, base + 3.0 * inc];\n\n let actual_asins = f32x4::from(origs).asin();\n\n for i in 0..4 {\n\n let orig = origs[i];\n\n let check = |name: &str, vals: f32x4, expected: f32| {\n\n let actual_arr: [f32; 4] = cast(vals);\n\n let actual = actual_arr[i];\n\n assert!(\n\n (actual - expected).abs() < 0.0000006,\n\n \"Wanted {name}({orig}) to be {expected} but got {actual}\",\n\n name = name,\n\n orig = orig,\n\n expected = expected,\n\n actual = actual\n\n );\n\n };\n\n check(\"asin\", actual_asins, orig.asin());\n\n }\n\n }\n\n}\n\n\n\n// FIXME: remove cfg requirement once masks as their own types are implemented\n", "file_path": "tests/t_f32x4.rs", "rank": 88, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_max() {\n\n let a = f32x4::from([1.0, 5.0, 3.0, f32::NAN]);\n\n let b = f32x4::from([2.0, f32::NEG_INFINITY, f32::INFINITY, 10.0]);\n\n let expected = f32x4::from([2.0, 5.0, f32::INFINITY, 10.0]);\n\n let actual = a.max(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 89, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_div_for_f32x8() {\n\n let a = f32x8::from([4.0, 9.0, 10.0, 12.0, 5.0, 6.0, 7.0, 8.0]);\n\n let b = f32x8::from([2.0, 2.0, 5.0, -3.0, 2.0, 1.5, 3.0, 2.5]);\n\n let expected = f32x8::from([2.0, 4.5, 2.0, -4.0, 2.5, 4.0, 2.3333333, 3.2]);\n\n let actual = a / b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x8.rs", "rank": 90, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_shl_for_i16x8() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MIN + 1, i16::MIN]);\n\n let b = 2;\n\n let expected = i16x8::from([\n\n 1 << 2,\n\n 2 << 2,\n\n 3 << 2,\n\n 4 << 2,\n\n 5 << 2,\n\n 6 << 2,\n\n (i16::MIN + 1) << 2,\n\n i16::MIN << 2,\n\n ]);\n\n let actual = a << b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 91, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_to_radians() {\n\n let pi = core::f32::consts::PI;\n\n let a = f32x4::from([0.0, 90.0, 180.0, 360.0]);\n\n let expected = f32x4::from([0.0, pi / 2.0, pi, 2.0 * pi]);\n\n let actual = a.to_radians();\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 92, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_sub_for_i16x8() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MIN + 1, i16::MIN]);\n\n let b = i16x8::from([17, -18, 190, -20, 21, -22, 1, 1]);\n\n let expected = i16x8::from([-16, 20, -187, 24, -16, 28, i16::MIN, i16::MAX]);\n\n let actual = a - b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 93, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_round() {\n\n let a = f32x4::from([1.1, 2.5, 3.7, 4.0]);\n\n let expected = f32x4::from([1.0, 2.0, 4.0, 4.0]);\n\n let actual = a.round();\n\n assert_eq!(expected, actual);\n\n //\n\n let a = f32x4::from([-1.1, -2.5, -3.7, -4.0]);\n\n let expected = f32x4::from([-1.0, -2.0, -4.0, -4.0]);\n\n let actual = a.round();\n\n assert_eq!(expected, actual);\n\n //\n\n let a = f32x4::from([f32::INFINITY, f32::NEG_INFINITY, 5.5, 5.0]);\n\n let expected = f32x4::from([f32::INFINITY, f32::NEG_INFINITY, 6.0, 5.0]);\n\n let actual = a.round();\n\n assert_eq!(expected, actual);\n\n //\n\n let a = f32x4::from(f32::NAN);\n\n let expected: [u32; 4] = [u32::MAX; 4];\n\n let actual: [u32; 4] = cast(a.round().is_nan());\n\n assert_eq!(expected, actual);\n\n //\n\n let a = f32x4::from(-0.0);\n\n let expected = a;\n\n let actual = a.round();\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 94, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_bitor_for_f32x4() {\n\n let a = f32x4::from([0.0, 0.0, 1.0, 1.0]);\n\n let b = f32x4::from([0.0, 1.0, 0.0, 1.0]);\n\n let expected = f32x4::from([0.0, 1.0, 1.0, 1.0]);\n\n let actual = a | b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 95, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_min() {\n\n let a = f32x4::from([1.0, 5.0, 3.0, f32::NAN]);\n\n let b = f32x4::from([2.0, f32::NEG_INFINITY, f32::INFINITY, 10.0]);\n\n let expected = f32x4::from([1.0, f32::NEG_INFINITY, 3.0, 10.0]);\n\n let actual = a.min(b);\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 96, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_div_for_f32x4() {\n\n let a = f32x4::from([4.0, 9.0, 10.0, 12.0]);\n\n let b = f32x4::from([2.0, 2.0, 5.0, -3.0]);\n\n let expected = f32x4::from([2.0, 4.5, 2.0, -4.0]);\n\n let actual = a / b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_f32x4.rs", "rank": 97, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_mul_for_i16x8() {\n\n let a = i16x8::from([1, 2, 3, 4, 5, 6, i16::MIN + 1, i16::MIN]);\n\n let b = i16x8::from([17, -18, 190, -20, 21, -22, 1, 1]);\n\n let expected =\n\n i16x8::from([17, -36, 570, -80, 105, -132, i16::MIN + 1, i16::MIN]);\n\n let actual = a * b;\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "tests/t_i16x8.rs", "rank": 98, "score": 83881.09157583301 }, { "content": "#[test]\n\nfn impl_f32x4_sqrt() {\n\n for (f, e) in [\n\n (f32::INFINITY, f32::INFINITY),\n\n (0.0, 0.0),\n\n (-0.0, -0.0),\n\n (4.0, 2.0),\n\n (9.0, 3.0),\n\n (16.0, 4.0),\n\n (25.0, 5.0),\n\n (5000.0 * 5000.0, 5000.0),\n\n ]\n\n .iter()\n\n .copied()\n\n {\n\n let expected = f32x4::from(e);\n\n let actual = f32x4::from(f).sqrt();\n\n assert_eq!(expected, actual);\n\n }\n\n assert_eq!(\n\n cast::<_, i32x4>(f32x4::from(f32::NAN).sqrt().is_nan()),\n", "file_path": "tests/t_f32x4.rs", "rank": 99, "score": 83881.09157583301 } ]
Rust
src/async/session.rs
reachkrr/serverless-wasm
0e521adc782a002aa1beae56d3661a88322d75e3
use mio::unix::UnixReady; use mio::net::TcpStream; use mio::{Poll, Ready}; use std::collections::HashMap; use std::iter::repeat; use std::rc::Rc; use std::io::{ErrorKind, Read, Write}; use std::cell::RefCell; use std::net::{SocketAddr, Shutdown}; use slab::Slab; use interpreter::WasmInstance; use super::host; use config::ApplicationState; use httparse; use wasmi::{ExternVal, ImportsBuilder, ModuleInstance, TrapKind, RuntimeValue}; #[derive(Debug, Clone, PartialEq)] pub enum ExecutionResult { WouldBlock, Close(Vec<usize>), Continue, ConnectBackend(SocketAddr), } #[derive(Debug)] pub struct Stream { pub readiness: UnixReady, pub interest: UnixReady, pub stream: TcpStream, pub index: usize, } pub struct Buf { buf: Vec<u8>, offset: usize, len: usize, } #[derive(Debug,Clone,PartialEq)] pub enum SessionState { WaitingForRequest, WaitingForBackendConnect(usize), TcpRead(i32, u32, usize), TcpWrite(i32, Vec<u8>, usize), Executing, Done, } pub struct Session { client: Stream, backends: HashMap<usize, Stream>, instance: Option<WasmInstance<host::State, host::AsyncHost>>, config: Rc<RefCell<ApplicationState>>, buffer: Buf, pub state: Option<SessionState>, method: Option<String>, path: Option<String>, env: Option<Rc<RefCell<host::State>>>, } impl Session { pub fn new(config: Rc<RefCell<ApplicationState>>, stream: TcpStream, index: usize) -> Session { let client = Stream { readiness: UnixReady::from(Ready::empty()), interest: UnixReady::from(Ready::readable()) | UnixReady::hup() | UnixReady::error(), stream, index, }; let capacity = 8192; let mut v = Vec::with_capacity(capacity); v.extend(repeat(0).take(capacity)); let buffer = Buf { buf: v, offset: 0, len: 0, }; Session { client, backends: HashMap::new(), instance: None, config, buffer, state: Some(SessionState::WaitingForRequest), method: None, path: None, env: None, } } pub fn add_backend(&mut self, stream: TcpStream, index: usize) { let s = Stream { readiness: UnixReady::from(Ready::empty()), interest: UnixReady::from(Ready::writable()) | UnixReady::hup() | UnixReady::error(), stream, index, }; self.backends.insert(index, s); self.state = Some(SessionState::WaitingForBackendConnect(index)); } pub fn resume(&mut self) -> ExecutionResult { let res = self.instance.as_mut().map(|instance| instance.resume()).unwrap(); println!("resume result: {:?}", res); match res { Err(t) => match t.kind() { TrapKind::Host(ref err) => { match err.as_ref().downcast_ref() { Some(host::AsyncHostError::Connecting(address)) => { println!("returning connect to backend server: {}", address); return ExecutionResult::ConnectBackend(address.clone()); }, Some(host::AsyncHostError::TcpWrite(fd, ptr, sz, written)) => { self.backends.get_mut(&(*fd as usize)).map(|backend| backend.interest.insert(UnixReady::from(Ready::writable()))); let buf = self.env.as_mut().and_then(|env| env.borrow_mut().get_buf(*ptr, *sz as usize)).unwrap(); self.state = Some(SessionState::TcpWrite(*fd, buf, *written)); return ExecutionResult::Continue; }, Some(host::AsyncHostError::TcpRead(fd, ptr, sz)) => { self.backends.get_mut(&(*fd as usize)).map(|backend| backend.interest.insert(UnixReady::from(Ready::readable()))); self.state = Some(SessionState::TcpRead(*fd, *ptr, *sz as usize)); return ExecutionResult::Continue; }, _ => { panic!("got host error: {:?}", err) } } }, _ => { panic!("got trap: {:?}", t); } }, Ok(_) => if self .instance .as_mut() .map(|instance| { println!( "set up response: {:?}", instance.state.borrow().prepared_response ); instance .state .borrow() .prepared_response .status_code .is_some() && instance.state.borrow().prepared_response.body.is_some() }) .unwrap_or(false) { self.client.interest.insert(Ready::writable()); return ExecutionResult::Continue } } ExecutionResult::Continue } pub fn create_instance(&mut self) -> ExecutionResult { let method = self.method.as_ref().unwrap(); let path = self.path.as_ref().unwrap(); if let Some((func_name, module, ref opt_env)) = self.config.borrow().route(method, path) { let mut env = host::State::new(); if let Some(h) = opt_env { env.db.extend( h.iter() .map(|(ref k, ref v)| (k.to_string(), v.to_string())), ); } let env = Rc::new(RefCell::new(env)); self.env = Some(env.clone()); let resolver = host::StateResolver { inner: env.clone() }; let main = ModuleInstance::new(&module, &ImportsBuilder::new().with_resolver("env", &resolver)) .expect("Failed to instantiate module") .assert_no_start(); if let Some(ExternVal::Func(func_ref)) = main.export_by_name(func_name) { let instance = WasmInstance::new(env, &func_ref, &[]); self.instance = Some(instance); ExecutionResult::Continue } else { println!("function not found"); self .client .stream .write(b"HTTP/1.1 404 Not Found\r\nContent-length: 19\r\n\r\nFunction not found\n"); self.client.stream.shutdown(Shutdown::Both); self.client.interest = UnixReady::from(Ready::empty()); ExecutionResult::Close(vec![self.client.index]) } } else { println!("route not found"); self .client .stream .write(b"HTTP/1.1 404 Not Found\r\nContent-length: 16\r\n\r\nRoute not found\n"); self.client.stream.shutdown(Shutdown::Both); self.client.interest = UnixReady::from(Ready::empty()); ExecutionResult::Close(vec![self.client.index]) } } pub fn process_events(&mut self, token: usize, events: Ready) -> bool { println!("client[{}]: token {} got events {:?}", self.client.index, token, events); if token == self.client.index { self.client.readiness = self.client.readiness | UnixReady::from(events); self.client.readiness & self.client.interest != UnixReady::from(Ready::empty()) } else { if let Some(ref mut stream) = self.backends.get_mut(&token) { println!("state: {:?}", self.state); if self.state == Some(SessionState::WaitingForBackendConnect(token)) { self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I32(token as i32))); self.state = Some(SessionState::Executing); } stream.readiness.insert(UnixReady::from(events)); stream.readiness & stream.interest != UnixReady::from(Ready::empty()) } else { println!("non existing backend {} got events {:?}", token, events); false } } } pub fn execute(&mut self) -> ExecutionResult { loop { let front_readiness = self.client.readiness & self.client.interest; if front_readiness.is_readable() { let res = self.front_readable(); if res != ExecutionResult::Continue { return res; } } if front_readiness.is_writable() { let res = self.front_writable(); if res != ExecutionResult::Continue { return res; } } let res = self.process(); if res != ExecutionResult::Continue { return res; } } } fn front_readable(&mut self) -> ExecutionResult { if self.state == Some(SessionState::WaitingForRequest) { loop { if self.buffer.offset + self.buffer.len == self.buffer.buf.len() { break; } match self .client .stream .read(&mut self.buffer.buf[self.buffer.offset + self.buffer.len..]) { Ok(0) => { return ExecutionResult::Close(vec![self.client.index]); } Ok(sz) => { self.buffer.len += sz; } Err(e) => { if e.kind() == ErrorKind::WouldBlock { self.client.readiness.remove(Ready::readable()); break; } } } } ExecutionResult::Continue } else { ExecutionResult::Close(vec![self.client.index]) } } fn process(&mut self) -> ExecutionResult { println!("[{}] process", self.client.index); let state = self.state.take().unwrap(); match state { SessionState::WaitingForRequest => { let (method, path) = { let mut headers = [httparse::Header { name: "", value: &[], }; 16]; let mut req = httparse::Request::new(&mut headers); match req.parse(&self.buffer.buf[self.buffer.offset..self.buffer.len]) { Err(e) => { println!("http parsing error: {:?}", e); self.state = Some(SessionState::WaitingForRequest); return ExecutionResult::Close(vec![self.client.index]); } Ok(httparse::Status::Partial) => { self.state = Some(SessionState::WaitingForRequest); return ExecutionResult::Continue; } Ok(httparse::Status::Complete(sz)) => { self.buffer.offset += sz; println!("got request: {:?}", req); ( req.method.unwrap().to_string(), req.path.unwrap().to_string(), ) } } }; self.client.interest.remove(Ready::readable()); self.method = Some(method); self.path = Some(path); self.state = Some(SessionState::Executing); ExecutionResult::Continue }, SessionState::Executing => { if self.instance.is_none() { let res = self.create_instance(); if res != ExecutionResult::Continue { self.state = Some(SessionState::Executing); return res; } } println!("resuming"); self.state = Some(SessionState::Executing); self.resume() }, SessionState::TcpRead(fd, ptr, sz) => { let readiness = self.backends[&(fd as usize)].readiness & self.backends[&(fd as usize)].interest; println!("tcpread({}): readiness: {:?}", fd, readiness); if readiness.is_readable() { let mut buffer = Vec::with_capacity(sz as usize); buffer.extend(repeat(0).take(sz as usize)); let mut read = 0usize; loop { match self.backends.get_mut(&(fd as usize)).unwrap().stream.read(&mut buffer[read..]) { Ok(0) => { println!("read 0"); self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::readable())); self.env.as_mut().map(|env| env.borrow_mut().write_buf(ptr, &buffer[..read])); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(read as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; }, Ok(sz) => { read += sz; println!("read {} bytes", read); if read == sz { self.env.as_mut().map(|env| env.borrow_mut().write_buf(ptr, &buffer[..read])); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(read as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; } }, Err(e) => match e.kind() { ErrorKind::WouldBlock => { println!("wouldblock"); self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::readable())); self.env.as_mut().map(|env| env.borrow_mut().write_buf(ptr, &buffer[..read])); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(read as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; }, e => { println!("backend socket error: {:?}", e); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(-1))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; } } } } } else { self.state = Some(SessionState::TcpRead(fd, ptr, sz)); ExecutionResult::WouldBlock } }, SessionState::TcpWrite(fd, buffer, mut written) => { let readiness = self.backends[&(fd as usize)].readiness & self.backends[&(fd as usize)].interest; if readiness.is_writable() { loop { match self.backends.get_mut(&(fd as usize)).unwrap().stream.write(&buffer[written..]) { Ok(0) => { self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::writable())); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(written as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; }, Ok(sz) => { written += sz; println!("wrote {} bytes", sz); if written == buffer.len() { self.state = Some(SessionState::Executing); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(written as i64))); return ExecutionResult::Continue; } }, Err(e) => match e.kind() { ErrorKind::WouldBlock => { println!("wouldblock"); self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::writable())); self.state = Some(SessionState::TcpWrite(fd, buffer, written)); return ExecutionResult::Continue; }, e => { println!("backend socket error: {:?}", e); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(-1))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; } } } } } else { self.state = Some(SessionState::TcpWrite(fd, buffer, written)); ExecutionResult::WouldBlock } }, SessionState::WaitingForBackendConnect(_) => { panic!("should not have called execute() in WaitingForBackendConnect"); }, SessionState::Done => { panic!("done"); } } } fn front_writable(&mut self) -> ExecutionResult { println!("[{}] front writable", self.client.index); let response = self .instance .as_mut() .map(|instance| instance.state.borrow().prepared_response.clone()) .unwrap(); self .client .stream .write_fmt(format_args!("HTTP/1.1 {} {}\r\n", response.status_code.unwrap(), response.reason.unwrap())); for header in response.headers.iter() { self .client .stream .write_fmt(format_args!("{}: {}\r\n", header.0, header.1)); } self.client.stream.write(b"\r\n"); self.client.stream.write(&response.body.unwrap()[..]); ExecutionResult::Close(vec![self.client.index]) } }
use mio::unix::UnixReady; use mio::net::TcpStream; use mio::{Poll, Ready}; use std::collections::HashMap; use std::iter::repeat; use std::rc::Rc; use std::io::{ErrorKind, Read, Write}; use std::cell::RefCell; use std::net::{SocketAddr, Shutdown}; use slab::Slab; use interpreter::WasmInstance; use super::host; use config::ApplicationState; use httparse; use wasmi::{ExternVal, ImportsBuilder, ModuleInstance, TrapKind, RuntimeValue}; #[derive(Debug, Clone, PartialEq)] pub enum ExecutionResult { WouldBlock, Close(Vec<usize>), Continue, ConnectBackend(SocketAddr), } #[derive(Debug)] pub struct Stream { pub readiness: UnixReady, pub interest: UnixReady, pub stream: TcpStream, pub index: usize, } pub struct Buf { buf: Vec<u8>, offset: usize, len: usize, } #[derive(Debug,Clone,PartialEq)] pub enum SessionState { WaitingForRequest, WaitingForBackendConnect(usize), TcpRead(i32, u32, usize), TcpWrite(i32, Vec<u8>, usize), Executing, Done, } pub struct Session { client: Stream, backends: HashMap<usize, Stream>, instance: Option<WasmInstance<host::State, host::AsyncHost>>, config: Rc<RefCell<ApplicationState>>, buffer: Buf, pub state: Option<SessionState>, method: Option<String>, path: Option<String>, env: Option<Rc<RefCell<host::State>>>, } impl Session { pub fn new(config: Rc<RefCell<ApplicationState>>, stream: TcpStream, index: usize) -> Session { let client = Stream { readiness: UnixReady::from(Ready::empty()), interest: UnixReady::from(Ready::readable()) | UnixReady::hup() | UnixReady::error(), stream, index, }; let capacity = 8192; let mut v = Vec::with_capacity(capacity); v.extend(repeat(0).take(capacity)); let buffer = Buf { buf: v, offset: 0, len: 0, }; Session { client, b
pub fn add_backend(&mut self, stream: TcpStream, index: usize) { let s = Stream { readiness: UnixReady::from(Ready::empty()), interest: UnixReady::from(Ready::writable()) | UnixReady::hup() | UnixReady::error(), stream, index, }; self.backends.insert(index, s); self.state = Some(SessionState::WaitingForBackendConnect(index)); } pub fn resume(&mut self) -> ExecutionResult { let res = self.instance.as_mut().map(|instance| instance.resume()).unwrap(); println!("resume result: {:?}", res); match res { Err(t) => match t.kind() { TrapKind::Host(ref err) => { match err.as_ref().downcast_ref() { Some(host::AsyncHostError::Connecting(address)) => { println!("returning connect to backend server: {}", address); return ExecutionResult::ConnectBackend(address.clone()); }, Some(host::AsyncHostError::TcpWrite(fd, ptr, sz, written)) => { self.backends.get_mut(&(*fd as usize)).map(|backend| backend.interest.insert(UnixReady::from(Ready::writable()))); let buf = self.env.as_mut().and_then(|env| env.borrow_mut().get_buf(*ptr, *sz as usize)).unwrap(); self.state = Some(SessionState::TcpWrite(*fd, buf, *written)); return ExecutionResult::Continue; }, Some(host::AsyncHostError::TcpRead(fd, ptr, sz)) => { self.backends.get_mut(&(*fd as usize)).map(|backend| backend.interest.insert(UnixReady::from(Ready::readable()))); self.state = Some(SessionState::TcpRead(*fd, *ptr, *sz as usize)); return ExecutionResult::Continue; }, _ => { panic!("got host error: {:?}", err) } } }, _ => { panic!("got trap: {:?}", t); } }, Ok(_) => if self .instance .as_mut() .map(|instance| { println!( "set up response: {:?}", instance.state.borrow().prepared_response ); instance .state .borrow() .prepared_response .status_code .is_some() && instance.state.borrow().prepared_response.body.is_some() }) .unwrap_or(false) { self.client.interest.insert(Ready::writable()); return ExecutionResult::Continue } } ExecutionResult::Continue } pub fn create_instance(&mut self) -> ExecutionResult { let method = self.method.as_ref().unwrap(); let path = self.path.as_ref().unwrap(); if let Some((func_name, module, ref opt_env)) = self.config.borrow().route(method, path) { let mut env = host::State::new(); if let Some(h) = opt_env { env.db.extend( h.iter() .map(|(ref k, ref v)| (k.to_string(), v.to_string())), ); } let env = Rc::new(RefCell::new(env)); self.env = Some(env.clone()); let resolver = host::StateResolver { inner: env.clone() }; let main = ModuleInstance::new(&module, &ImportsBuilder::new().with_resolver("env", &resolver)) .expect("Failed to instantiate module") .assert_no_start(); if let Some(ExternVal::Func(func_ref)) = main.export_by_name(func_name) { let instance = WasmInstance::new(env, &func_ref, &[]); self.instance = Some(instance); ExecutionResult::Continue } else { println!("function not found"); self .client .stream .write(b"HTTP/1.1 404 Not Found\r\nContent-length: 19\r\n\r\nFunction not found\n"); self.client.stream.shutdown(Shutdown::Both); self.client.interest = UnixReady::from(Ready::empty()); ExecutionResult::Close(vec![self.client.index]) } } else { println!("route not found"); self .client .stream .write(b"HTTP/1.1 404 Not Found\r\nContent-length: 16\r\n\r\nRoute not found\n"); self.client.stream.shutdown(Shutdown::Both); self.client.interest = UnixReady::from(Ready::empty()); ExecutionResult::Close(vec![self.client.index]) } } pub fn process_events(&mut self, token: usize, events: Ready) -> bool { println!("client[{}]: token {} got events {:?}", self.client.index, token, events); if token == self.client.index { self.client.readiness = self.client.readiness | UnixReady::from(events); self.client.readiness & self.client.interest != UnixReady::from(Ready::empty()) } else { if let Some(ref mut stream) = self.backends.get_mut(&token) { println!("state: {:?}", self.state); if self.state == Some(SessionState::WaitingForBackendConnect(token)) { self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I32(token as i32))); self.state = Some(SessionState::Executing); } stream.readiness.insert(UnixReady::from(events)); stream.readiness & stream.interest != UnixReady::from(Ready::empty()) } else { println!("non existing backend {} got events {:?}", token, events); false } } } pub fn execute(&mut self) -> ExecutionResult { loop { let front_readiness = self.client.readiness & self.client.interest; if front_readiness.is_readable() { let res = self.front_readable(); if res != ExecutionResult::Continue { return res; } } if front_readiness.is_writable() { let res = self.front_writable(); if res != ExecutionResult::Continue { return res; } } let res = self.process(); if res != ExecutionResult::Continue { return res; } } } fn front_readable(&mut self) -> ExecutionResult { if self.state == Some(SessionState::WaitingForRequest) { loop { if self.buffer.offset + self.buffer.len == self.buffer.buf.len() { break; } match self .client .stream .read(&mut self.buffer.buf[self.buffer.offset + self.buffer.len..]) { Ok(0) => { return ExecutionResult::Close(vec![self.client.index]); } Ok(sz) => { self.buffer.len += sz; } Err(e) => { if e.kind() == ErrorKind::WouldBlock { self.client.readiness.remove(Ready::readable()); break; } } } } ExecutionResult::Continue } else { ExecutionResult::Close(vec![self.client.index]) } } fn process(&mut self) -> ExecutionResult { println!("[{}] process", self.client.index); let state = self.state.take().unwrap(); match state { SessionState::WaitingForRequest => { let (method, path) = { let mut headers = [httparse::Header { name: "", value: &[], }; 16]; let mut req = httparse::Request::new(&mut headers); match req.parse(&self.buffer.buf[self.buffer.offset..self.buffer.len]) { Err(e) => { println!("http parsing error: {:?}", e); self.state = Some(SessionState::WaitingForRequest); return ExecutionResult::Close(vec![self.client.index]); } Ok(httparse::Status::Partial) => { self.state = Some(SessionState::WaitingForRequest); return ExecutionResult::Continue; } Ok(httparse::Status::Complete(sz)) => { self.buffer.offset += sz; println!("got request: {:?}", req); ( req.method.unwrap().to_string(), req.path.unwrap().to_string(), ) } } }; self.client.interest.remove(Ready::readable()); self.method = Some(method); self.path = Some(path); self.state = Some(SessionState::Executing); ExecutionResult::Continue }, SessionState::Executing => { if self.instance.is_none() { let res = self.create_instance(); if res != ExecutionResult::Continue { self.state = Some(SessionState::Executing); return res; } } println!("resuming"); self.state = Some(SessionState::Executing); self.resume() }, SessionState::TcpRead(fd, ptr, sz) => { let readiness = self.backends[&(fd as usize)].readiness & self.backends[&(fd as usize)].interest; println!("tcpread({}): readiness: {:?}", fd, readiness); if readiness.is_readable() { let mut buffer = Vec::with_capacity(sz as usize); buffer.extend(repeat(0).take(sz as usize)); let mut read = 0usize; loop { match self.backends.get_mut(&(fd as usize)).unwrap().stream.read(&mut buffer[read..]) { Ok(0) => { println!("read 0"); self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::readable())); self.env.as_mut().map(|env| env.borrow_mut().write_buf(ptr, &buffer[..read])); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(read as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; }, Ok(sz) => { read += sz; println!("read {} bytes", read); if read == sz { self.env.as_mut().map(|env| env.borrow_mut().write_buf(ptr, &buffer[..read])); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(read as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; } }, Err(e) => match e.kind() { ErrorKind::WouldBlock => { println!("wouldblock"); self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::readable())); self.env.as_mut().map(|env| env.borrow_mut().write_buf(ptr, &buffer[..read])); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(read as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; }, e => { println!("backend socket error: {:?}", e); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(-1))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; } } } } } else { self.state = Some(SessionState::TcpRead(fd, ptr, sz)); ExecutionResult::WouldBlock } }, SessionState::TcpWrite(fd, buffer, mut written) => { let readiness = self.backends[&(fd as usize)].readiness & self.backends[&(fd as usize)].interest; if readiness.is_writable() { loop { match self.backends.get_mut(&(fd as usize)).unwrap().stream.write(&buffer[written..]) { Ok(0) => { self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::writable())); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(written as i64))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; }, Ok(sz) => { written += sz; println!("wrote {} bytes", sz); if written == buffer.len() { self.state = Some(SessionState::Executing); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(written as i64))); return ExecutionResult::Continue; } }, Err(e) => match e.kind() { ErrorKind::WouldBlock => { println!("wouldblock"); self.backends.get_mut(&(fd as usize)).map(|backend| backend.readiness.remove(Ready::writable())); self.state = Some(SessionState::TcpWrite(fd, buffer, written)); return ExecutionResult::Continue; }, e => { println!("backend socket error: {:?}", e); self.instance.as_mut().map(|instance| instance.add_function_result(RuntimeValue::I64(-1))); self.state = Some(SessionState::Executing); return ExecutionResult::Continue; } } } } } else { self.state = Some(SessionState::TcpWrite(fd, buffer, written)); ExecutionResult::WouldBlock } }, SessionState::WaitingForBackendConnect(_) => { panic!("should not have called execute() in WaitingForBackendConnect"); }, SessionState::Done => { panic!("done"); } } } fn front_writable(&mut self) -> ExecutionResult { println!("[{}] front writable", self.client.index); let response = self .instance .as_mut() .map(|instance| instance.state.borrow().prepared_response.clone()) .unwrap(); self .client .stream .write_fmt(format_args!("HTTP/1.1 {} {}\r\n", response.status_code.unwrap(), response.reason.unwrap())); for header in response.headers.iter() { self .client .stream .write_fmt(format_args!("{}: {}\r\n", header.0, header.1)); } self.client.stream.write(b"\r\n"); self.client.stream.write(&response.body.unwrap()[..]); ExecutionResult::Close(vec![self.client.index]) } }
ackends: HashMap::new(), instance: None, config, buffer, state: Some(SessionState::WaitingForRequest), method: None, path: None, env: None, } }
function_block-function_prefixed
[ { "content": "pub fn server(config: Config) {\n\n let state = ApplicationState::new(&config);\n\n\n\n let addr = (&config.listen_address).parse().unwrap();\n\n let server = TcpListener::bind(&addr).unwrap();\n\n\n\n let mut poll = Poll::new().unwrap();\n\n\n\n poll\n\n .register(&server, SERVER, Ready::readable(), PollOpt::edge())\n\n .unwrap();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n\n\n let state = Rc::new(RefCell::new(state));\n\n let mut connections = Slab::with_capacity(1024);\n\n let mut ready = VecDeque::new();\n\n\n\n loop {\n\n poll.poll(&mut events, None).unwrap();\n", "file_path": "src/async/mod.rs", "rank": 0, "score": 104629.81805014657 }, { "content": "pub fn server(config: Config) {\n\n let state = ApplicationState::new(&config);\n\n\n\n rouille::start_server(&config.listen_address, move |request| {\n\n if let Some((func_name, module, ref opt_env)) = state.route(request.method(), &request.url()) {\n\n let mut env = host::State::new();\n\n if let Some(h) = opt_env {\n\n env.db.extend(\n\n h.iter()\n\n .map(|(ref k, ref v)| (k.to_string(), v.to_string())),\n\n );\n\n }\n\n let main = ModuleInstance::new(&module, &ImportsBuilder::new().with_resolver(\"env\", &env))\n\n .expect(\"Failed to instantiate module\")\n\n .assert_no_start();\n\n\n\n let mut response = env.prepared_response.clone();\n\n if let Some(ExternVal::Func(func_ref)) = main.export_by_name(func_name) {\n\n let mut instance: WasmInstance<host::State, host::SyncHost> = WasmInstance::new(Rc::new(RefCell::new(env)), &func_ref, &[]);\n\n let res = instance.resume().map_err(|t| Error::Trap(t));\n", "file_path": "src/sync/mod.rs", "rank": 1, "score": 104629.81805014657 }, { "content": "pub fn server(config: Config) {\n\n for app in config.applications.iter() {\n\n println!(\"loading {}:{} at '{} {}'\", app.file_path, app.function, app.method, app.url_path);\n\n if let Ok(mut file) = File::open(&app.file_path) {\n\n let mut data = Vec::new();\n\n file.read_to_end(&mut data);\n\n\n\n //let mut env = DummyEnvironment::default();\n\n\n\n let mut env = env::Env::new();\n\n\n\n translate_module(&data, &mut env).unwrap();\n\n\n\n //let func_env = env.func_env();\n\n //println!(\"bytecode:\\n{:?}\", env.func_bytecode_sizes);\n\n }\n\n }\n\n}\n", "file_path": "src/jit/mod.rs", "rank": 2, "score": 104629.81805014657 }, { "content": "pub fn load(file: &str) -> Option<Config> {\n\n if let Ok(mut file) = File::open(file) {\n\n let mut contents = String::new();\n\n if let Ok(_) = file.read_to_string(&mut contents) {\n\n return toml::from_str(&contents)\n\n .map_err(|e| {\n\n println!(\"configuration deserialization error: {:?}\", e);\n\n e\n\n })\n\n .ok();\n\n }\n\n }\n\n None\n\n}\n\n\n\npub struct ApplicationState {\n\n /// (method, url path) -> (function name, module path, env)\n\n pub routes: HashMap<(String, String), (String, String, Option<HashMap<String, String>>)>,\n\n /// module path -> Module\n\n pub modules: HashMap<String, Module>,\n", "file_path": "src/config.rs", "rank": 3, "score": 94092.17470841676 }, { "content": "fn get_func_name(func_index: FunctionIndex) -> ir::ExternalName {\n\n ExternalName::user(0, func_index as u32)\n\n}\n\n\n\nimpl<'data> ModuleEnvironment<'data> for Env {\n\n fn flags(&self) -> &Flags {\n\n &self.info.flags\n\n }\n\n\n\n fn get_func_name(&self, func_index: FunctionIndex) -> ExternalName {\n\n get_func_name(func_index)\n\n }\n\n\n\n fn declare_signature(&mut self, sig: &Signature) {\n\n self.info.signatures.push(sig.clone());\n\n }\n\n\n\n fn get_signature(&self, sig_index: SignatureIndex) -> &Signature {\n\n &self.info.signatures[sig_index]\n\n }\n", "file_path": "src/jit/env.rs", "rank": 4, "score": 65544.71249801577 }, { "content": "pub fn my_run_interpreter_loop<E>(\n\n interpreter: &mut Interpreter<E>,\n\n function_stack: &mut VecDeque<FunctionContext>,\n\n) -> Result<Option<RuntimeValue>, Trap>\n\nwhere\n\n E: Externals,\n\n{\n\n loop {\n\n let mut function_context = function_stack\n\n .pop_back()\n\n .expect(\"on loop entry - not empty; on loop continue - checking for emptiness; qed\");\n\n let function_ref = function_context.function.clone();\n\n let function_body = function_ref\n\n .body()\n\n .expect(\"Host functions checked in function_return below; Internal functions always have a body; qed\");\n\n if !function_context.is_initialized() {\n\n let return_type = function_context.return_type;\n\n function_context.initialize(&function_body.locals);\n\n function_context\n\n .push_frame(&function_body.labels, BlockFrameType::Function, return_type)\n", "file_path": "src/interpreter.rs", "rank": 5, "score": 59757.299621954044 }, { "content": "pub fn start(file: &str) {\n\n let module = load_module(file, \"handle\");\n\n let mut env = host::SyncHost::new();\n\n let main = ModuleInstance::new(&module, &ImportsBuilder::new().with_resolver(\"env\", &env))\n\n .expect(\"Failed to instantiate module\")\n\n .assert_no_start();\n\n\n\n println!(\n\n \"Result: {:?}\",\n\n main.invoke_export(\"handle\", &[], &mut env)\n\n );\n\n}\n\n*/\n", "file_path": "src/sync/mod.rs", "rank": 6, "score": 58524.250772180625 }, { "content": "pub fn log(s: &str) {\n\n unsafe { sys::log(s.as_ptr(), s.len() as u64) };\n\n}\n\n\n\npub mod db {\n\n use super::sys;\n\n use std::iter::repeat;\n\n\n\n pub fn get(key: &str) -> Option<String> {\n\n let mut empty = vec![];\n\n let read_sz = unsafe {\n\n sys::db_get(key.as_ptr(), key.len() as u64, (&mut empty).as_mut_ptr(), empty.len() as u64)\n\n };\n\n\n\n if read_sz < 0 {\n\n return None;\n\n } else if read_sz == 0 {\n\n return Some(String::new());\n\n }\n\n\n", "file_path": "serverless-api/src/lib.rs", "rank": 7, "score": 58524.250772180625 }, { "content": "pub fn load_module(file: &str, func_name: &str) -> Module {\n\n let module = parity_wasm::deserialize_file(file).expect(\"File to be deserialized\");\n\n\n\n // Extracts call arguments from command-line arguments\n\n let _args = {\n\n // Export section has an entry with a func_name with an index inside a module\n\n let export_section = module.export_section().expect(\"No export section found\");\n\n // It's a section with function declarations (which are references to the type section entries)\n\n let function_section = module\n\n .function_section()\n\n .expect(\"No function section found\");\n\n // Type section stores function types which are referenced by function_section entries\n\n let type_section = module.type_section().expect(\"No type section found\");\n\n\n\n // Given function name used to find export section entry which contains\n\n // an `internal` field which points to the index in the function index space\n\n let found_entry = export_section\n\n .entries()\n\n .iter()\n\n .find(|entry| func_name == entry.field())\n", "file_path": "src/interpreter.rs", "rank": 8, "score": 49014.297041175916 }, { "content": "pub fn create_stack(func: &FuncRef, args: &[RuntimeValue]) -> VecDeque<FunctionContext> {\n\n let context = FunctionContext::new(\n\n func.clone(),\n\n DEFAULT_VALUE_STACK_LIMIT,\n\n DEFAULT_FRAME_STACK_LIMIT,\n\n func.signature(),\n\n args.into_iter().cloned().collect(),\n\n );\n\n\n\n let mut function_stack = VecDeque::new();\n\n function_stack.push_back(context);\n\n\n\n function_stack\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 9, "score": 43413.14251298363 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nstruct HostErrorWithCode {\n\n error_code: u32,\n\n}\n\n\n\nimpl ::std::fmt::Display for HostErrorWithCode {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {\n\n write!(f, \"{}\", self.error_code)\n\n }\n\n}\n\n\n\nimpl HostError for HostErrorWithCode {}\n\n\n\n#[derive(Clone)]\n\npub struct PreparedResponse {\n\n pub status_code: Option<u16>,\n\n pub headers: Vec<(String, String)>,\n\n pub body: Option<Vec<u8>>,\n\n}\n\n\n\nimpl PreparedResponse {\n", "file_path": "src/sync/host.rs", "rank": 10, "score": 39878.82699592608 }, { "content": "fn main() {\n\n let args: Vec<_> = args().collect();\n\n if args.len() != 2 {\n\n println!(\"Usage: {} <config_file>\", args[0]);\n\n return;\n\n }\n\n\n\n if let Some(config) = config::load(&args[1]) {\n\n async::server(config);\n\n } else {\n\n println!(\"invalid configuration\");\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 11, "score": 36321.7613828305 }, { "content": "pub trait Host {\n\n type State;\n\n\n\n fn build(s: Rc<RefCell<Self::State>>) -> Self;\n\n}\n\n\n\npub struct WasmInstance<S, E: Externals + Host<State = S>> {\n\n pub state: Rc<RefCell<S>>,\n\n pub stack: VecDeque<FunctionContext>,\n\n _marker: marker::PhantomData<E>,\n\n}\n\n\n\nimpl<S, E: Externals + Host<State = S>> WasmInstance<S, E> {\n\n pub fn new(state: Rc<RefCell<S>>, func_ref: &FuncRef, args: &[RuntimeValue]) -> WasmInstance<S, E> {\n\n let stack = create_stack(&func_ref, args);\n\n\n\n WasmInstance {\n\n state: state,\n\n stack,\n\n _marker: marker::PhantomData,\n", "file_path": "src/interpreter.rs", "rank": 12, "score": 34054.719700916496 }, { "content": "pub trait HostBuilder<'a, S> {\n\n fn build(s: &'a mut S) -> Self;\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 13, "score": 30301.668491754375 }, { "content": "}\n\n\n\nimpl ApplicationState {\n\n pub fn new(config: &Config) -> ApplicationState {\n\n let mut routes = HashMap::new();\n\n let mut modules = HashMap::new();\n\n\n\n for app in config.applications.iter() {\n\n //FIXME: it might be good to not panic when we don't find the function in the module\n\n let module = load_module(&app.file_path, &app.function);\n\n\n\n if !modules.contains_key(&app.file_path) {\n\n modules.insert(app.file_path.clone(), module);\n\n }\n\n\n\n routes.insert(\n\n (app.method.clone(), app.url_path.clone()),\n\n (app.function.clone(), app.file_path.clone(), app.env.clone()),\n\n );\n\n }\n", "file_path": "src/config.rs", "rank": 14, "score": 27954.328800519706 }, { "content": "use interpreter::load_module;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse toml;\n\nuse wasmi::Module;\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct WasmApp {\n\n pub file_path: String,\n\n pub method: String,\n\n pub url_path: String,\n\n pub function: String,\n\n pub env: Option<HashMap<String, String>>,\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct Config {\n\n pub listen_address: String,\n\n pub applications: Vec<WasmApp>,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 15, "score": 27949.52732567961 }, { "content": "\n\n ApplicationState {\n\n routes: routes,\n\n modules: modules,\n\n }\n\n }\n\n\n\n pub fn route(&self, method: &str, url: &str) -> Option<(&str, &Module, &Option<HashMap<String, String>>)> {\n\n if let Some((func_name, module_path, ref opt_env)) = self.routes.get(&(method.to_string(), url.to_string())) {\n\n if let Some(module) = self.modules.get(module_path) {\n\n return Some((func_name, module, opt_env));\n\n }\n\n }\n\n\n\n None\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 16, "score": 27945.82254356216 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\npub struct FuncEnv<'env> {\n\n pub mod_info: &'env ModuleInfo,\n\n}\n\n\n\nimpl<'env> FuncEnv<'env> {\n\n pub fn new(mod_info: &'env ModuleInfo) -> Self {\n\n Self { mod_info }\n\n }\n\n\n\n // Create a signature for `sigidx` amended with a `vmctx` argument after the standard wasm\n\n // arguments.\n\n fn vmctx_sig(&self, sigidx: SignatureIndex) -> ir::Signature {\n\n let mut sig = self.mod_info.signatures[sigidx].clone();\n\n sig.params.push(ir::AbiParam::special(\n\n self.native_pointer(),\n\n ir::ArgumentPurpose::VMContext,\n", "file_path": "src/jit/env.rs", "rank": 17, "score": 25771.180374762072 }, { "content": " ));\n\n sig\n\n }\n\n}\n\n\n\nimpl<'env> FuncEnvironment for FuncEnv<'env> {\n\n fn flags(&self) -> &settings::Flags {\n\n &self.mod_info.flags\n\n }\n\n\n\n fn make_global(&mut self, func: &mut ir::Function, index: GlobalIndex) -> GlobalValue {\n\n // Just create a dummy `vmctx` global.\n\n let offset = ((index * 8) as i32 + 8).into();\n\n let gv = func.create_global_var(ir::GlobalVarData::VMContext { offset });\n\n GlobalValue::Memory {\n\n gv,\n\n ty: self.mod_info.globals[index].entity.ty,\n\n }\n\n }\n\n\n", "file_path": "src/jit/env.rs", "rank": 18, "score": 25771.112323914367 }, { "content": " fn declare_data_initialization(\n\n &mut self,\n\n memory_index: MemoryIndex,\n\n base: Option<GlobalIndex>,\n\n offset: usize, \n\n data: &'data [u8]\n\n ) {\n\n println!(\"declaring data init for memory n°{}, base {:?}, offset {}, data: {:?}\",\n\n memory_index, base, offset, data.len());\n\n }\n\n\n\n fn declare_func_export(\n\n &mut self,\n\n func_index: FunctionIndex,\n\n name: &'data str\n\n ) {\n\n println!(\"exporting function n°{} at '{}'\", func_index, name);\n\n self.info.functions[func_index].export_names.push(\n\n String::from(name)\n\n )\n", "file_path": "src/jit/env.rs", "rank": 19, "score": 25770.548747752888 }, { "content": "use cretonne_wasm::{\n\n ModuleEnvironment, GlobalIndex, MemoryIndex, TableIndex,\n\n FunctionIndex, Table, Memory, Global, SignatureIndex,\n\n FuncTranslator, FuncEnvironment, GlobalValue\n\n};\n\nuse cretonne::prelude::{settings::{self, Flags}, types::*, InstBuilder, Signature};\n\nuse cretonne::codegen::{\n\n ir::{self, ExternalName, Function},\n\n cursor::FuncCursor\n\n};\n\n\n\npub struct Exportable<T> {\n\n /// A wasm entity.\n\n pub entity: T,\n\n\n\n /// Names under which the entity is exported.\n\n pub export_names: Vec<String>,\n\n}\n\n\n\nimpl<T> Exportable<T> {\n", "file_path": "src/jit/env.rs", "rank": 20, "score": 25768.797325204545 }, { "content": " fn declare_table(&mut self, table: Table) {\n\n self.info.tables.push(Exportable::new(table));\n\n }\n\n\n\n fn declare_table_elements(\n\n &mut self,\n\n table_index: TableIndex,\n\n base: Option<GlobalIndex>,\n\n offset: usize,\n\n elements: Vec<FunctionIndex>\n\n ) {\n\n //println!(\"declaring table elements at table n°{} base {:?} offset {}:{:?}\", table_index, base, offset, elements);\n\n }\n\n\n\n fn declare_memory(&mut self, memory: Memory) {\n\n println!(\"declaring new memory zone, min: {}, max: {:?}, shared: {}\", memory.pages_count, memory.maximum,\n\n memory.shared);\n\n self.info.memories.push(Exportable::new(memory));\n\n }\n\n\n", "file_path": "src/jit/env.rs", "rank": 21, "score": 25768.094312628444 }, { "content": " self.info.start_func = Some(index);\n\n }\n\n\n\n fn define_function_body(\n\n &mut self,\n\n body_bytes: &'data [u8]\n\n ) -> Result<(), String> {\n\n let func = {\n\n let mut func_environ = FuncEnv::new(&self.info);\n\n let function_index = self.get_num_func_imports() + self.info.function_bodies.len();\n\n let name = get_func_name(function_index);\n\n let sig = func_environ.vmctx_sig(self.get_func_type(function_index));\n\n let mut func = Function::with_name_signature(name, sig);\n\n self.trans\n\n .translate(body_bytes, &mut func, &mut func_environ)\n\n .map_err(|e| format!(\"{}\", e))?;\n\n func\n\n };\n\n\n\n self.info.function_bodies.push(func);\n", "file_path": "src/jit/env.rs", "rank": 22, "score": 25767.27649564557 }, { "content": " fn get_num_func_imports(&self) -> usize {\n\n self.info.imported_funcs.len()\n\n }\n\n\n\n fn declare_func_type(&mut self, sig_index: SignatureIndex) {\n\n self.info.functions.push(Exportable::new(sig_index));\n\n }\n\n\n\n fn get_func_type(&self, func_index: FunctionIndex) -> SignatureIndex {\n\n self.info.functions[func_index].entity\n\n }\n\n\n\n fn declare_global(&mut self, global: Global) {\n\n self.info.globals.push(Exportable::new(global));\n\n }\n\n\n\n fn get_global(&self, global_index: GlobalIndex) -> &Global {\n\n &self.info.globals[global_index].entity\n\n }\n\n\n", "file_path": "src/jit/env.rs", "rank": 23, "score": 25767.233702186917 }, { "content": "\n\nimpl Env {\n\n pub fn new() -> Env {\n\n Env {\n\n info: ModuleInfo::new(),\n\n trans: FuncTranslator::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/jit/env.rs", "rank": 24, "score": 25767.098039267643 }, { "content": " fn make_heap(&mut self, func: &mut ir::Function, _index: MemoryIndex) -> ir::Heap {\n\n // Create a static heap whose base address is stored at `vmctx+0`.\n\n let gv = func.create_global_var(ir::GlobalVarData::VMContext { offset: 0.into() });\n\n\n\n func.create_heap(ir::HeapData {\n\n base: ir::HeapBase::GlobalVar(gv),\n\n min_size: 0.into(),\n\n guard_size: 0x8000_0000.into(),\n\n style: ir::HeapStyle::Static { bound: 0x1_0000_0000.into() },\n\n })\n\n }\n\n\n\n fn make_indirect_sig(&mut self, func: &mut ir::Function, index: SignatureIndex) -> ir::SigRef {\n\n // A real implementation would probably change the calling convention and add `vmctx` and\n\n // signature index arguments.\n\n func.import_signature(self.vmctx_sig(index))\n\n }\n\n\n\n fn make_direct_func(&mut self, func: &mut ir::Function, index: FunctionIndex) -> ir::FuncRef {\n\n let sigidx = self.mod_info.functions[index].entity;\n", "file_path": "src/jit/env.rs", "rank": 25, "score": 25765.388329677615 }, { "content": "\n\n fn declare_func_import(\n\n &mut self,\n\n sig_index: SignatureIndex,\n\n module: &'data str,\n\n field: &'data str\n\n ) {\n\n assert_eq!(\n\n self.info.functions.len(),\n\n self.info.imported_funcs.len(),\n\n \"Imported functions must be declared first\"\n\n );\n\n self.info.functions.push(Exportable::new(sig_index));\n\n self.info.imported_funcs.push((\n\n String::from(module),\n\n String::from(field),\n\n ));\n\n println!(\"declared function import {}:{}\", module, field);\n\n }\n\n\n", "file_path": "src/jit/env.rs", "rank": 26, "score": 25765.281838899547 }, { "content": "impl ModuleInfo {\n\n pub fn new() -> ModuleInfo {\n\n ModuleInfo {\n\n flags: settings::Flags::new(settings::builder()),\n\n signatures: Vec::new(),\n\n imported_funcs: Vec::new(),\n\n functions: Vec::new(),\n\n function_bodies: Vec::new(),\n\n memories: Vec::new(),\n\n tables: Vec::new(),\n\n globals: Vec::new(),\n\n start_func: None,\n\n }\n\n }\n\n}\n\n\n\npub struct Env {\n\n pub info: ModuleInfo,\n\n trans: FuncTranslator,\n\n}\n", "file_path": "src/jit/env.rs", "rank": 27, "score": 25764.727955933075 }, { "content": " let vmctx = pos.func\n\n .special_param(ir::ArgumentPurpose::VMContext)\n\n .expect(\"Missing vmctx parameter\");\n\n\n\n // Build a value list for the call instruction containing the call_args and the vmctx\n\n // parameter.\n\n let mut args = ir::ValueList::default();\n\n args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);\n\n args.push(vmctx, &mut pos.func.dfg.value_lists);\n\n\n\n pos.ins().Call(ir::Opcode::Call, VOID, callee, args).0\n\n }\n\n\n\n fn translate_grow_memory(\n\n &mut self,\n\n mut pos: FuncCursor,\n\n _index: MemoryIndex,\n\n _heap: ir::Heap,\n\n _val: ir::Value,\n\n ) -> ir::Value {\n", "file_path": "src/jit/env.rs", "rank": 28, "score": 25764.46394028741 }, { "content": " // Build a value list for the indirect call instruction containing the callee, call_args,\n\n // and the vmctx parameter.\n\n let mut args = ir::ValueList::default();\n\n args.push(func_ptr, &mut pos.func.dfg.value_lists);\n\n args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);\n\n args.push(vmctx, &mut pos.func.dfg.value_lists);\n\n\n\n pos.ins()\n\n .CallIndirect(ir::Opcode::CallIndirect, VOID, sig_ref, args)\n\n .0\n\n }\n\n\n\n fn translate_call(\n\n &mut self,\n\n mut pos: FuncCursor,\n\n _callee_index: FunctionIndex,\n\n callee: ir::FuncRef,\n\n call_args: &[ir::Value],\n\n ) -> ir::Inst {\n\n // Pass the current function's vmctx parameter on to the callee.\n", "file_path": "src/jit/env.rs", "rank": 29, "score": 25764.36174177803 }, { "content": " }\n\n\n\n fn declare_table_export(\n\n &mut self,\n\n table_index: TableIndex,\n\n name: &'data str\n\n ) { unimplemented!() }\n\n fn declare_memory_export(\n\n &mut self,\n\n memory_index: MemoryIndex,\n\n name: &'data str\n\n ) { unimplemented!() }\n\n fn declare_global_export(\n\n &mut self,\n\n global_index: GlobalIndex,\n\n name: &'data str\n\n ) { unimplemented!() }\n\n\n\n fn declare_start_func(&mut self, index: FunctionIndex) {\n\n debug_assert!(self.info.start_func.is_none());\n", "file_path": "src/jit/env.rs", "rank": 30, "score": 25764.289362213472 }, { "content": " pub fn new(entity: T) -> Self {\n\n Self {\n\n entity,\n\n export_names: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\npub struct ModuleInfo {\n\n pub flags: Flags,\n\n pub signatures: Vec<Signature>,\n\n pub imported_funcs: Vec<(String, String)>,\n\n pub functions: Vec<Exportable<SignatureIndex>>,\n\n pub function_bodies: Vec<Function>,\n\n pub memories: Vec<Exportable<Memory>>,\n\n pub tables: Vec<Exportable<Table>>,\n\n pub globals: Vec<Exportable<Global>>,\n\n pub start_func: Option<FunctionIndex>,\n\n}\n\n\n", "file_path": "src/jit/env.rs", "rank": 31, "score": 25763.964203818967 }, { "content": " pos.ins().iconst(I32, -1)\n\n }\n\n\n\n fn translate_current_memory(\n\n &mut self,\n\n mut pos: FuncCursor,\n\n _index: MemoryIndex,\n\n _heap: ir::Heap,\n\n ) -> ir::Value {\n\n pos.ins().iconst(I32, -1)\n\n }\n\n}\n", "file_path": "src/jit/env.rs", "rank": 32, "score": 25763.534458676026 }, { "content": " // A real implementation would probably add a `vmctx` argument.\n\n // And maybe attempt some signature de-duplication.\n\n let signature = func.import_signature(self.vmctx_sig(sigidx));\n\n let name = get_func_name(index);\n\n func.import_function(ir::ExtFuncData {\n\n name,\n\n signature,\n\n colocated: false,\n\n })\n\n }\n\n\n\n fn translate_call_indirect(\n\n &mut self,\n\n mut pos: FuncCursor,\n\n _table_index: TableIndex,\n\n _sig_index: SignatureIndex,\n\n sig_ref: ir::SigRef,\n\n callee: ir::Value,\n\n call_args: &[ir::Value],\n\n ) -> ir::Inst {\n", "file_path": "src/jit/env.rs", "rank": 33, "score": 25763.137805374758 }, { "content": " // Pass the current function's vmctx parameter on to the callee.\n\n let vmctx = pos.func\n\n .special_param(ir::ArgumentPurpose::VMContext)\n\n .expect(\"Missing vmctx parameter\");\n\n\n\n // The `callee` value is an index into a table of function pointers.\n\n // Apparently, that table is stored at absolute address 0 in this dummy environment.\n\n // TODO: Generate bounds checking code.\n\n let ptr = self.native_pointer();\n\n let callee_offset = if ptr == I32 {\n\n pos.ins().imul_imm(callee, 4)\n\n } else {\n\n let ext = pos.ins().uextend(I64, callee);\n\n pos.ins().imul_imm(ext, 4)\n\n };\n\n let mut mflags = ir::MemFlags::new();\n\n mflags.set_notrap();\n\n mflags.set_aligned();\n\n let func_ptr = pos.ins().load(ptr, mflags, callee_offset, 0);\n\n\n", "file_path": "src/jit/env.rs", "rank": 34, "score": 25762.93983028143 }, { "content": " State {\n\n memory: None,//Some(MemoryInstance::alloc(Pages(3), Some(Pages(100))).unwrap()),\n\n instance: None,\n\n prepared_response: PreparedResponse::new(),\n\n connections: Slab::with_capacity(100),\n\n db: HashMap::new(),\n\n }\n\n }\n\n}\n\nimpl State {\n\n pub fn get_buf(&mut self, ptr: u32, size: usize) -> Option<Vec<u8>> {\n\n self.memory.as_ref().and_then(|mref| {\n\n mref.get(ptr, size).map_err(|e| println!(\"get buf error: {:?}\", e)).ok()\n\n })\n\n }\n\n\n\n pub fn write_buf(&mut self, ptr: u32, data: &[u8]) {\n\n self.memory.as_ref().map(|m| m.set(ptr, data));\n\n }\n\n}\n", "file_path": "src/async/host.rs", "rank": 59, "score": 22.70278297163739 }, { "content": " session::ExecutionResult::Close(tokens) => {\n\n for t in tokens.iter() {\n\n connections.remove(client_token);\n\n }\n\n },\n\n session::ExecutionResult::ConnectBackend(address) => {\n\n let client = connections.get(client_token).unwrap().clone();\n\n\n\n match connections.vacant_entry() {\n\n None => {\n\n println!(\"error: no more room for new connections\");\n\n }\n\n Some(entry) => {\n\n let index = entry.index();\n\n let stream = TcpStream::connect(&address).unwrap();\n\n poll.register(\n\n &stream,\n\n Token(index + 1),\n\n Ready::readable() | Ready::writable() | Ready::from(UnixReady::hup() | UnixReady::error()),\n\n PollOpt::edge(),\n", "file_path": "src/async/mod.rs", "rank": 60, "score": 21.374535853844936 }, { "content": " }\n\n}\n\n\n\npub struct TcpStream {\n\n fd: i32\n\n}\n\n\n\nimpl TcpStream {\n\n pub fn connect(address: &str) -> Option<TcpStream> {\n\n let fd = unsafe { sys::tcp_connect(address.as_ptr(), address.len() as u64) };\n\n if fd < 0 {\n\n None\n\n } else {\n\n Some(TcpStream { fd })\n\n }\n\n }\n\n\n\n pub fn write(&mut self, data: &[u8]) -> Option<usize> {\n\n let res = unsafe { sys::tcp_write(self.fd, data.as_ptr(), data.len() as u64) };\n\n if res < 0 {\n", "file_path": "serverless-api/src/lib.rs", "rank": 61, "score": 17.89470926140583 }, { "content": " println!(\n\n \"non existing token {:?} got events {:?}\",\n\n client_token,\n\n event.readiness()\n\n );\n\n }\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n for client_token in ready.drain(..) {\n\n let mut cont = session::ExecutionResult::Continue;\n\n if let Some(ref mut client) = connections.get_mut(client_token) {\n\n cont = client.borrow_mut().execute();\n\n } else {\n\n println!(\"non existing token {:?} was marked as ready\", client_token);\n\n }\n\n\n\n match cont {\n", "file_path": "src/async/mod.rs", "rank": 62, "score": 17.485078072715176 }, { "content": " }\n\n }\n\n TCP_READ => {\n\n let fd: i32 = args.nth(0);\n\n let ptr: u32 = args.nth(1);\n\n let sz: u64 = args.nth(2);\n\n let mut v = Vec::with_capacity(sz as usize);\n\n v.extend(repeat(0).take(sz as usize));\n\n let mut state = self.inner.borrow_mut();\n\n if let Ok(sz) = state.connections[fd as usize].read(&mut v) {\n\n state.memory.as_ref().map(|m| m.set(ptr, &v[..sz]));\n\n\n\n Ok(Some(RuntimeValue::I64(sz as i64)))\n\n } else {\n\n Ok(Some(RuntimeValue::I64(-1)))\n\n }\n\n }\n\n TCP_WRITE => {\n\n let fd: i32 = args.nth(0);\n\n let ptr: u32 = args.nth(1);\n", "file_path": "src/sync/host.rs", "rank": 63, "score": 17.438971592225563 }, { "content": " state.clone(),\n\n sock,\n\n index,\n\n )));\n\n entry.insert(client);\n\n }\n\n }\n\n }\n\n }\n\n Token(i) => {\n\n let client_token = i - 1;\n\n\n\n if let Some(ref mut client) = connections.get_mut(client_token) {\n\n if client\n\n .borrow_mut()\n\n .process_events(client_token, event.readiness())\n\n {\n\n ready.push_back(client_token);\n\n }\n\n } else {\n", "file_path": "src/async/mod.rs", "rank": 64, "score": 17.185212981394038 }, { "content": " TcpWrite(i32, u32, u64, usize),\n\n}\n\n\n\nimpl ::std::fmt::Display for AsyncHostError {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n\n\n\nimpl HostError for AsyncHostError {}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct PreparedResponse {\n\n pub status_code: Option<u16>,\n\n pub reason: Option<String>,\n\n pub headers: Vec<(String, String)>,\n\n pub body: Option<Vec<u8>>,\n\n}\n\n\n\nimpl PreparedResponse {\n", "file_path": "src/async/host.rs", "rank": 65, "score": 16.520851196675878 }, { "content": "\n\n/// log(ptr: *mut u8, size: u64)\n\n///\n\n/// Returns value at the given address in memory. This function\n\n/// requires attached memory.\n\nconst LOG_INDEX: usize = 0;\n\n\n\nconst RESPONSE_SET_STATUS_LINE: usize = 1;\n\nconst RESPONSE_SET_HEADER: usize = 2;\n\nconst RESPONSE_SET_BODY: usize = 3;\n\nconst TCP_CONNECT: usize = 4;\n\nconst TCP_READ: usize = 5;\n\nconst TCP_WRITE: usize = 6;\n\nconst DB_GET: usize = 7;\n\n\n\nimpl Externals for SyncHost {\n\n fn invoke_index(&mut self, index: usize, args: RuntimeArgs) -> Result<Option<RuntimeValue>, Trap> {\n\n match index {\n\n LOG_INDEX => {\n\n let ptr: u32 = args.nth(0);\n", "file_path": "src/sync/host.rs", "rank": 66, "score": 16.077486857672994 }, { "content": "const RESPONSE_SET_STATUS_LINE: usize = 1;\n\nconst RESPONSE_SET_HEADER: usize = 2;\n\nconst RESPONSE_SET_BODY: usize = 3;\n\nconst TCP_CONNECT: usize = 4;\n\nconst TCP_READ: usize = 5;\n\nconst TCP_WRITE: usize = 6;\n\nconst DB_GET: usize = 7;\n\n\n\nimpl Externals for AsyncHost {\n\n fn invoke_index(&mut self, index: usize, args: RuntimeArgs) -> Result<Option<RuntimeValue>, Trap> {\n\n match index {\n\n LOG_INDEX => {\n\n let ptr: u32 = args.nth(0);\n\n let sz: u64 = args.nth(1);\n\n\n\n let v = self\n\n .inner\n\n .borrow()\n\n .memory\n\n .as_ref()\n", "file_path": "src/async/host.rs", "rank": 67, "score": 15.9923654897114 }, { "content": "use config::{ApplicationState, Config};\n\n\n\nuse mio::*;\n\nuse mio::net::{TcpListener, TcpStream};\n\nuse mio::unix::UnixReady;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse std::collections::VecDeque;\n\nuse slab::Slab;\n\n\n\nmod host;\n\nmod session;\n\n\n\nconst SERVER: Token = Token(0);\n\n\n", "file_path": "src/async/mod.rs", "rank": 68, "score": 15.588297509059924 }, { "content": " );\n\n client.borrow_mut().add_backend(stream, index);\n\n\n\n entry.insert(client);\n\n }\n\n }\n\n },\n\n _ => {}\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/async/mod.rs", "rank": 69, "score": 15.366867101196089 }, { "content": " None\n\n } else {\n\n Some(res as usize)\n\n }\n\n }\n\n\n\n pub fn read(&mut self, data: &mut [u8]) -> Option<usize> {\n\n let res = unsafe { sys::tcp_read(self.fd, data.as_mut_ptr(), data.len() as u64) };\n\n if res < 0 {\n\n None\n\n } else {\n\n Some(res as usize)\n\n }\n\n }\n\n}\n\n\n", "file_path": "serverless-api/src/lib.rs", "rank": 70, "score": 14.607449357441947 }, { "content": " let mut v = Vec::with_capacity(read_sz as usize);\n\n v.extend(repeat(0).take(read_sz as usize));\n\n\n\n let sz = unsafe {\n\n sys::db_get(key.as_ptr(), key.len() as u64, v.as_mut_ptr(), v.len() as u64)\n\n };\n\n\n\n if sz < 0 {\n\n return None;\n\n } else if sz == 0 {\n\n return Some(String::new());\n\n }\n\n\n\n if sz as usize != v.len() {\n\n None\n\n } else {\n\n String::from_utf8(v).ok()\n\n }\n\n }\n\n}\n", "file_path": "serverless-api/src/lib.rs", "rank": 71, "score": 14.477894208601846 }, { "content": "\n\n\n\npub struct AsyncHost {\n\n pub inner: Rc<RefCell<State>>,\n\n}\n\n\n\nimpl Host for AsyncHost {\n\n type State = State;\n\n\n\n fn build(s: Rc<RefCell<Self::State>>) -> Self {\n\n AsyncHost { inner: s }\n\n }\n\n}\n\n\n\n/// log(ptr: *mut u8, size: u64)\n\n///\n\n/// Returns value at the given address in memory. This function\n\n/// requires attached memory.\n\nconst LOG_INDEX: usize = 0;\n\n\n", "file_path": "src/async/host.rs", "rank": 72, "score": 14.346540333898053 }, { "content": " v.extend(repeat(0).take(sz as usize));\n\n\n\n let mut state = self.inner.borrow_mut();\n\n if let Ok(sz) = state.connections[fd as usize].read(&mut v) {\n\n state.memory.as_ref().map(|m| m.set(ptr, &v[..sz]));\n\n\n\n Ok(Some(RuntimeValue::I64(sz as i64)))\n\n } else {\n\n Ok(Some(RuntimeValue::I64(-1)))\n\n }\n\n */\n\n }\n\n TCP_WRITE => {\n\n let fd: i32 = args.nth(0);\n\n let ptr: u32 = args.nth(1);\n\n let sz: u64 = args.nth(2);\n\n\n\n let error = AsyncHostError::TcpWrite(fd, ptr, sz, 0);\n\n Err(Trap::new(TrapKind::Host(Box::new(error))))\n\n\n", "file_path": "src/async/host.rs", "rank": 73, "score": 13.89665654636794 }, { "content": "pub struct StateResolver {\n\n pub inner: Rc<RefCell<State>>,\n\n}\n\n\n\nimpl ModuleImportResolver for StateResolver {\n\n fn resolve_func(&self, field_name: &str, signature: &Signature) -> Result<FuncRef, Error> {\n\n let index = match field_name {\n\n \"log\" => LOG_INDEX,\n\n \"response_set_status_line\" => RESPONSE_SET_STATUS_LINE,\n\n \"response_set_header\" => RESPONSE_SET_HEADER,\n\n \"response_set_body\" => RESPONSE_SET_BODY,\n\n \"tcp_connect\" => TCP_CONNECT,\n\n \"tcp_read\" => TCP_READ,\n\n \"tcp_write\" => TCP_WRITE,\n\n \"db_get\" => DB_GET,\n\n _ => {\n\n return Err(Error::Instantiation(format!(\n\n \"Export {} not found\",\n\n field_name\n\n )))\n", "file_path": "src/async/host.rs", "rank": 74, "score": 13.820389577879036 }, { "content": " .borrow()\n\n .memory\n\n .as_ref()\n\n .map(|m| m.set(value_ptr, (&value[..to_write]).as_bytes()));\n\n Ok(Some(RuntimeValue::I64(value.len() as i64)))\n\n }\n\n }\n\n }\n\n _ => panic!(\"env doesn't provide function at index {}\", index),\n\n }\n\n }\n\n}\n\n\n\nimpl State {\n\n fn check_signature(&self, index: usize, signature: &Signature) -> bool {\n\n let (params, ret_ty): (&[ValueType], Option<ValueType>) = match index {\n\n LOG_INDEX => (&[ValueType::I32, ValueType::I64], None),\n\n RESPONSE_SET_STATUS_LINE => (&[ValueType::I32, ValueType::I32, ValueType::I64], None),\n\n RESPONSE_SET_HEADER => (\n\n &[\n", "file_path": "src/async/host.rs", "rank": 75, "score": 13.494407283251576 }, { "content": " .borrow()\n\n .memory\n\n .as_ref()\n\n .map(|m| m.set(value_ptr, (&value[..to_write]).as_bytes()));\n\n Ok(Some(RuntimeValue::I64(value.len() as i64)))\n\n }\n\n }\n\n }\n\n _ => panic!(\"env doesn't provide function at index {}\", index),\n\n }\n\n }\n\n}\n\n\n\nimpl State {\n\n fn check_signature(&self, index: usize, signature: &Signature) -> bool {\n\n let (params, ret_ty): (&[ValueType], Option<ValueType>) = match index {\n\n LOG_INDEX => (&[ValueType::I32, ValueType::I64], None),\n\n RESPONSE_SET_STATUS_LINE => (&[ValueType::I32, ValueType::I32, ValueType::I64], None),\n\n RESPONSE_SET_HEADER => (\n\n &[\n", "file_path": "src/sync/host.rs", "rank": 76, "score": 13.494407283251576 }, { "content": " pub fn new() -> PreparedResponse {\n\n PreparedResponse {\n\n status_code: None,\n\n headers: Vec::new(),\n\n body: None,\n\n }\n\n }\n\n}\n\n\n\npub struct State {\n\n memory: Option<MemoryRef>,\n\n instance: Option<ModuleRef>,\n\n pub prepared_response: PreparedResponse,\n\n connections: Slab<TcpStream>,\n\n pub db: HashMap<String, String>,\n\n}\n\n\n\nimpl State {\n\n pub fn new() -> State {\n\n State {\n", "file_path": "src/sync/host.rs", "rank": 77, "score": 13.304248387691569 }, { "content": " memory: Some(MemoryInstance::alloc(Pages(3), Some(Pages(10))).unwrap()),\n\n instance: None,\n\n prepared_response: PreparedResponse::new(),\n\n connections: Slab::with_capacity(100),\n\n db: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\npub struct SyncHost {\n\n pub inner: Rc<RefCell<State>>,\n\n}\n\n\n\nimpl Host for SyncHost {\n\n type State = State;\n\n\n\n fn build(s: Rc<RefCell<Self::State>>) -> Self {\n\n SyncHost { inner: s }\n\n }\n\n}\n", "file_path": "src/sync/host.rs", "rank": 78, "score": 13.267837560805402 }, { "content": " }\n\n }\n\n\n\n pub fn resume(&mut self) -> Result<Option<RuntimeValue>, Trap> {\n\n let mut host = E::build(self.state.clone());\n\n let mut interpreter = Interpreter::new(&mut host);\n\n\n\n println!(\"WasmInstance::resume: stack\\n{:?}\", self.stack);\n\n my_run_interpreter_loop(&mut interpreter, &mut self.stack)\n\n }\n\n\n\n pub fn add_function_result(&mut self, return_value: RuntimeValue) {\n\n self.stack.back_mut().map(|function_context| {\n\n function_context.value_stack_mut().push(return_value).expect(\"should have pushed the return value\");\n\n println!(\"adding return value to {:?} initialized: {}\",\n\n function_context.function, function_context.is_initialized);\n\n });\n\n println!(\"added function result {:?}, stack len:{}\", return_value, self.stack.len());\n\n }\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 79, "score": 13.042752818630573 }, { "content": " pub fn new() -> PreparedResponse {\n\n PreparedResponse {\n\n status_code: None,\n\n reason: None,\n\n headers: Vec::new(),\n\n body: None,\n\n }\n\n }\n\n}\n\n\n\npub struct State {\n\n pub memory: Option<MemoryRef>,\n\n pub instance: Option<ModuleRef>,\n\n pub prepared_response: PreparedResponse,\n\n pub connections: Slab<TcpStream>,\n\n pub db: HashMap<String, String>,\n\n}\n\n\n\nimpl State {\n\n pub fn new() -> State {\n", "file_path": "src/async/host.rs", "rank": 80, "score": 13.039810700144095 }, { "content": "//! from https://github.com/paritytech/wasmi/blob/master/src/tests/host.rs\n\n\n\nuse slab::Slab;\n\nuse std::collections::HashMap;\n\nuse std::io::{Read, Write};\n\nuse std::iter::repeat;\n\nuse mio::net::TcpStream;\n\nuse std::net::SocketAddr;\n\nuse std::str;\n\nuse std::cmp;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse wasmi::memory_units::Pages;\n\nuse wasmi::*;\n\nuse interpreter::Host;\n\n\n\n#[derive(Debug)]\n\npub enum AsyncHostError {\n\n Connecting(SocketAddr),\n\n TcpRead(i32, u32, u64),\n", "file_path": "src/async/host.rs", "rank": 81, "score": 13.007072456972047 }, { "content": " Some(mut socket) => {\n\n match socket.write(b\"hello\\n\") {\n\n None => {\n\n body = \"could not write to backend server\".to_string();\n\n },\n\n Some(_) => {\n\n let mut res: [u8; 100] = [0u8; 100];\n\n match socket.read(&mut res) {\n\n None => {\n\n body = \"could not read from backend server\".to_string();\n\n },\n\n Some(sz) => {\n\n api::log(&format!(\"read data from backend: \\\"{:?}\\\"\", str::from_utf8(&res[..sz]).unwrap()));\n\n\n\n body = format!(\"Hello world from wasm!\\nanswer from backend:\\n{}\\n\", str::from_utf8(&res[..sz]).unwrap());\n\n api::response::set_status(200, \"Ok\");\n\n api::response::set_header(\"Content-length\", &body.len().to_string());\n\n api::response::set_body(body.as_bytes());\n\n }\n\n }\n", "file_path": "samples/testbackend/src/lib.rs", "rank": 82, "score": 12.899938921005717 }, { "content": "use config::Config;\n\n\n\nuse cretonne_wasm::{translate_module, DummyEnvironment};\n\nuse std::fs::File;\n\nuse std::io::Read;\n\n\n\nmod env;\n\n\n", "file_path": "src/jit/mod.rs", "rank": 83, "score": 12.831561235216842 }, { "content": "use rouille;\n\nuse wasmi::{Error, ExternVal, ImportsBuilder, ModuleInstance};\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\n\n\nuse config::{ApplicationState, Config};\n\nuse interpreter::WasmInstance;\n\n\n\nmod host;\n\n\n", "file_path": "src/sync/mod.rs", "rank": 84, "score": 12.663878780113082 }, { "content": " \"tcp_connect\" => TCP_CONNECT,\n\n \"tcp_read\" => TCP_READ,\n\n \"tcp_write\" => TCP_WRITE,\n\n \"db_get\" => DB_GET,\n\n _ => {\n\n return Err(Error::Instantiation(format!(\n\n \"Export {} not found\",\n\n field_name\n\n )))\n\n }\n\n };\n\n\n\n if !self.check_signature(index, signature) {\n\n return Err(Error::Instantiation(format!(\n\n \"Export `{}` doesnt match expected type {:?}\",\n\n field_name, signature\n\n )));\n\n }\n\n\n\n Ok(FuncInstance::alloc_host(signature.clone(), index))\n", "file_path": "src/async/host.rs", "rank": 85, "score": 12.427623396886052 }, { "content": " \"tcp_connect\" => TCP_CONNECT,\n\n \"tcp_read\" => TCP_READ,\n\n \"tcp_write\" => TCP_WRITE,\n\n \"db_get\" => DB_GET,\n\n _ => {\n\n return Err(Error::Instantiation(format!(\n\n \"Export {} not found\",\n\n field_name\n\n )))\n\n }\n\n };\n\n\n\n if !self.check_signature(index, signature) {\n\n return Err(Error::Instantiation(format!(\n\n \"Export `{}` doesnt match expected type {:?}\",\n\n field_name, signature\n\n )));\n\n }\n\n\n\n Ok(FuncInstance::alloc_host(signature.clone(), index))\n", "file_path": "src/sync/host.rs", "rank": 86, "score": 12.427623396886052 }, { "content": "//! from https://github.com/paritytech/wasmi/blob/master/src/tests/host.rs\n\n\n\nuse slab::Slab;\n\nuse std::collections::HashMap;\n\nuse std::io::{Read, Write};\n\nuse std::iter::repeat;\n\nuse std::net::TcpStream;\n\nuse std::str;\n\nuse std::cmp;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse wasmi::memory_units::Pages;\n\nuse wasmi::*;\n\nuse interpreter::Host;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n", "file_path": "src/sync/host.rs", "rank": 87, "score": 11.366464938975186 }, { "content": " /*\n\n let buf = self\n\n .inner\n\n .borrow()\n\n .memory\n\n .as_ref()\n\n .expect(\"Function 'inc_mem' expects attached memory\")\n\n .get(ptr, sz as usize)\n\n .unwrap();\n\n\n\n if let Ok(sz) = self.inner.borrow_mut().connections[fd as usize].write(&buf) {\n\n Ok(Some(RuntimeValue::I64(sz as i64)))\n\n } else {\n\n Ok(Some(RuntimeValue::I64(-1)))\n\n }\n\n */\n\n }\n\n DB_GET => {\n\n let key_ptr: u32 = args.nth(0);\n\n let key_sz: u64 = args.nth(1);\n", "file_path": "src/async/host.rs", "rank": 88, "score": 11.348926347897374 }, { "content": " }\n\n };\n\n\n\n if !self.inner.borrow().check_signature(index, signature) {\n\n return Err(Error::Instantiation(format!(\n\n \"Export `{}` doesnt match expected type {:?}\",\n\n field_name, signature\n\n )));\n\n }\n\n\n\n Ok(FuncInstance::alloc_host(signature.clone(), index))\n\n }\n\n\n\n fn resolve_memory(&self, _field_name: &str, _memory_type: &MemoryDescriptor) -> Result<MemoryRef, Error> {\n\n self.inner.borrow_mut().memory = Some(MemoryInstance::alloc(Pages(_memory_type.initial() as usize), Some(Pages(100))).unwrap());\n\n Ok(self.inner.borrow().memory.as_ref().unwrap().clone())\n\n }\n\n}\n", "file_path": "src/async/host.rs", "rank": 89, "score": 11.230585738112854 }, { "content": "use std::str;\n\nextern crate serverless_api as api;\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn handle() {\n\n api::log(\"Hello world with api!\");\n\n let body;\n\n\n\n let key = \"/env/backend\";\n\n match api::db::get(key) {\n\n None => {\n\n body = format!(\"could not get value for key {}\", key);\n\n },\n\n Some(address) => {\n\n api::log(&format!(\"connecting to backend at {}\", address));\n\n\n\n match api::TcpStream::connect(&address) {\n\n None => {\n\n body = \"could not connect to backend\".to_string();\n\n },\n", "file_path": "samples/testbackend/src/lib.rs", "rank": 90, "score": 11.199890910258059 }, { "content": " println!(\"got events: {:?}\", events);\n\n\n\n for event in events.iter() {\n\n match event.token() {\n\n SERVER => {\n\n if let Ok((sock, addr)) = server.accept() {\n\n match connections.vacant_entry() {\n\n None => {\n\n println!(\"error: no more room for new connections\");\n\n }\n\n Some(entry) => {\n\n let index = entry.index();\n\n poll.register(\n\n &sock,\n\n Token(index + 1),\n\n Ready::readable() | Ready::writable() | Ready::from(UnixReady::hup() | UnixReady::error()),\n\n PollOpt::edge(),\n\n );\n\n\n\n let client = Rc::new(RefCell::new(session::Session::new(\n", "file_path": "src/async/mod.rs", "rank": 91, "score": 11.079009974066079 }, { "content": " let sz: u64 = args.nth(2);\n\n\n\n let buf = self\n\n .inner\n\n .borrow()\n\n .memory\n\n .as_ref()\n\n .expect(\"Function 'inc_mem' expects attached memory\")\n\n .get(ptr, sz as usize)\n\n .unwrap();\n\n\n\n if let Ok(sz) = self.inner.borrow_mut().connections[fd as usize].write(&buf) {\n\n Ok(Some(RuntimeValue::I64(sz as i64)))\n\n } else {\n\n Ok(Some(RuntimeValue::I64(-1)))\n\n }\n\n }\n\n DB_GET => {\n\n let key_ptr: u32 = args.nth(0);\n\n let key_sz: u64 = args.nth(1);\n", "file_path": "src/sync/host.rs", "rank": 92, "score": 11.033810169096991 }, { "content": "use std::str;\n\n\n\nmod sys {\n\n extern {\n\n pub fn log(ptr: *const u8, size: u64);\n\n pub fn response_set_status_line(status: u32, ptr: *const u8, size: u64);\n\n pub fn response_set_header(name_ptr: *const u8, name_size: u64, value_ptr: *const u8, value_size: u64);\n\n pub fn response_set_body(ptr: *const u8, size: u64);\n\n pub fn tcp_connect(ptr: *const u8, size: u64) -> i32;\n\n pub fn tcp_read(fd: i32, ptr: *mut u8, size: u64) -> i64;\n\n pub fn tcp_write(fd: i32, ptr: *const u8, size: u64) -> i64;\n\n pub fn db_get(key_ptr: *const u8, key_size: u64, value_ptr: *const u8, value_size: u64) -> i64;\n\n }\n\n}\n\n\n", "file_path": "serverless-api/src/lib.rs", "rank": 93, "score": 10.827099938758614 }, { "content": "use parity_wasm;\n\nuse parity_wasm::elements::{External, FunctionType, Internal, Type, ValueType};\n\nuse std::collections::VecDeque;\n\nuse wasmi::{self, Module};\n\nuse wasmi::{BlockFrameType, Externals, FuncInstance, FuncRef, FunctionContext, Interpreter, RunResult, RuntimeValue, Trap, TrapKind};\n\nuse std::marker;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\n\n\npub const DEFAULT_VALUE_STACK_LIMIT: usize = 16384;\n\npub const DEFAULT_FRAME_STACK_LIMIT: usize = 16384;\n\n\n", "file_path": "src/interpreter.rs", "rank": 94, "score": 10.519841395950682 }, { "content": " .memory\n\n .as_ref()\n\n .expect(\"Function 'inc_mem' expects attached memory\")\n\n .get(ptr, sz as usize)\n\n .unwrap();\n\n let address = String::from_utf8(v).unwrap();\n\n println!(\"received tcp_connect for {:?}\", address);\n\n let error = AsyncHostError::Connecting(address.parse().unwrap());\n\n Err(Trap::new(TrapKind::Host(Box::new(error))))\n\n }\n\n TCP_READ => {\n\n let fd: i32 = args.nth(0);\n\n let ptr: u32 = args.nth(1);\n\n let sz: u64 = args.nth(2);\n\n\n\n let error = AsyncHostError::TcpRead(fd, ptr, sz);\n\n Err(Trap::new(TrapKind::Host(Box::new(error))))\n\n\n\n /*\n\n let mut v = Vec::with_capacity(sz as usize);\n", "file_path": "src/async/host.rs", "rank": 95, "score": 9.498513920408252 }, { "content": " _ => false,\n\n })\n\n .count(),\n\n None => 0,\n\n };\n\n\n\n // Calculates a function index within module's function section\n\n let function_index_in_section = function_index - import_section_len;\n\n\n\n // Getting a type reference from a function section entry\n\n let func_type_ref: usize = function_section.entries()[function_index_in_section].type_ref() as usize;\n\n\n\n // Use the reference to get an actual function type\n\n let function_type: &FunctionType = match &type_section.types()[func_type_ref] {\n\n &Type::Function(ref func_type) => func_type,\n\n };\n\n\n\n // Parses arguments and constructs runtime values in correspondence of their types\n\n function_type\n\n .params()\n", "file_path": "src/interpreter.rs", "rank": 96, "score": 9.368374954134259 }, { "content": " },\n\n RunResult::NestedCall(nested_func) => {\n\n //println!(\"calling nested func, stack len={}\", function_stack.len());\n\n match FuncInstance::invoke_context(&nested_func, &mut function_context, interpreter.externals) {\n\n Err(t) => {\n\n if let TrapKind::Host(_) = t.kind() {\n\n //function_context.value_stack_mut().push(RuntimeValue::I32(42)).expect(\"should have pushed the return value\");\n\n function_stack.push_back(function_context);\n\n println!(\"got host trapkind\");\n\n return Err(t);\n\n } else {\n\n println!(\"resume got error: {:?}\", t);\n\n return Err(t);\n\n }\n\n },\n\n Ok(None) => {\n\n function_stack.push_back(function_context);\n\n //println!(\"got ok(none) stack len={}\", function_stack.len());\n\n }\n\n Ok(Some(nested_context)) => {\n", "file_path": "src/interpreter.rs", "rank": 97, "score": 8.531312873150801 }, { "content": "\n\npub mod response {\n\n use super::sys;\n\n\n\n pub fn set_status(status: u16, reason: &str) {\n\n unsafe {\n\n sys::response_set_status_line(status.into(), reason.as_ptr(), reason.len() as u64);\n\n }\n\n }\n\n\n\n pub fn set_header(name: &str, value: &str) {\n\n unsafe {\n\n sys::response_set_header(name.as_ptr(), name.len() as u64, value.as_ptr(), value.len() as u64);\n\n }\n\n }\n\n\n\n pub fn set_body(body: &[u8]) {\n\n unsafe {\n\n sys::response_set_body(body.as_ptr(), body.len() as u64);\n\n }\n", "file_path": "serverless-api/src/lib.rs", "rank": 98, "score": 8.38207845559849 }, { "content": " let value_ptr: u32 = args.nth(2);\n\n let value_sz: u64 = args.nth(3);\n\n\n\n let v = self\n\n .inner\n\n .borrow()\n\n .memory\n\n .as_ref()\n\n .expect(\"Function 'inc_mem' expects attached memory\")\n\n .get(key_ptr, key_sz as usize)\n\n .unwrap();\n\n let key = String::from_utf8(v).unwrap();\n\n println!(\"requested value for key {}\", key);\n\n\n\n match self.inner.borrow().db.get(&key) {\n\n None => Ok(Some(RuntimeValue::I64(-1))),\n\n Some(value) => {\n\n let to_write = cmp::min(value.len(), value_sz as usize);\n\n self\n\n .inner\n", "file_path": "src/async/host.rs", "rank": 99, "score": 8.122569406991701 } ]
Rust
modules/world/tests/graph.rs
drunkenme/lemon3d-rs
48d4e879996e2502e0faaf36e4dbcebfca9961b0
extern crate crayon; extern crate crayon_world; extern crate rand; use crayon::prelude::*; use crayon::*; use crayon_world::prelude::*; use crayon_world::renderable::headless::HeadlessRenderer; #[test] pub fn hierachy() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); let e4 = scene.create("e4"); scene.set_parent(e4, e3, false).unwrap(); scene.set_parent(e3, e1, false).unwrap(); scene.set_parent(e2, e1, false).unwrap(); assert!(scene.is_ancestor(e2, e1)); assert!(scene.is_ancestor(e3, e1)); assert!(scene.is_ancestor(e4, e1)); assert!(scene.is_ancestor(e4, e3)); assert!(!scene.is_ancestor(e1, e1)); assert!(!scene.is_ancestor(e1, e2)); assert!(!scene.is_ancestor(e1, e3)); assert!(!scene.is_ancestor(e1, e4)); assert!(!scene.is_ancestor(e2, e4)); assert!(scene.is_root(e1)); assert!(!scene.is_root(e2)); assert!(!scene.is_root(e3)); assert!(!scene.is_root(e4)); assert!(!scene.is_leaf(e1)); assert!(scene.is_leaf(e2)); assert!(!scene.is_leaf(e3)); assert!(scene.is_leaf(e4)); let point = [1.0, 0.0, 0.0]; scene.set_position(e3, point); assert_ulps_eq!(scene.position(e4).unwrap(), point.into()); let point = [1.0, 0.0, 2.0]; scene.set_position(e1, point); assert_ulps_eq!(scene.position(e4).unwrap(), [2.0, 0.0, 2.0].into()); assert_ulps_eq!(scene.local_position(e4).unwrap(), [0.0, 0.0, 0.0].into()); scene.set_parent(e4, Some(e2), false).unwrap(); assert_ulps_eq!(scene.position(e4).unwrap(), [1.0, 0.0, 2.0].into()); assert_ulps_eq!(scene.local_position(e4).unwrap(), [0.0, 0.0, 0.0].into()); scene.set_local_position(e2, [1.0, 0.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(90.0), Deg(0.0)); scene.set_rotation(e1, euler); assert_ulps_eq!(scene.position(e2).unwrap(), [1.0, 0.0, 1.0].into()); } #[test] fn remove() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); let e4 = scene.create("e4"); let e5 = scene.create("e5"); let e6 = scene.create("e6"); scene.set_parent(e2, e1, false).unwrap(); scene.set_parent(e3, e1, false).unwrap(); scene.set_parent(e4, e3, false).unwrap(); scene.set_parent(e5, e3, false).unwrap(); scene.set_parent(e6, e5, false).unwrap(); assert!(scene.len() == 6); scene.delete(e3); assert!(scene.contains(e1)); assert!(scene.contains(e2)); assert!(!scene.contains(e3)); assert!(!scene.contains(e4)); assert!(!scene.contains(e5)); assert!(!scene.contains(e6)); assert!(scene.len() == 2); } #[test] fn transform() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); scene.set_scale(e1, 2.0); scene.set_position(e1, [1.0, 0.0, 2.0]); let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(90.0)); let rotation = Quaternion::from(euler); scene.set_rotation(e1, rotation); let v = [1.0, 0.0, 0.0]; let transform = scene.transform(e1).unwrap(); assert_ulps_eq!(transform.transform_direction(v), [0.0, 1.0, 0.0].into()); assert_ulps_eq!(transform.transform_vector(v), [0.0, 2.0, 0.0].into()); assert_ulps_eq!(transform.transform_point(v), [1.0, 2.0, 2.0].into()); } #[test] fn keep_world_pose() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); scene.set_position(e1, [0.0, 1.0, 0.0]); assert_ulps_eq!(scene.position(e1).unwrap(), [0.0, 1.0, 0.0].into()); assert_ulps_eq!(scene.local_position(e1).unwrap(), [0.0, 1.0, 0.0].into()); scene.set_position(e2, [1.0, 0.0, 0.0]); scene.set_position(e3, [0.0, 0.0, 1.0]); scene.set_parent(e2, e1, false).unwrap(); assert_ulps_eq!(scene.local_position(e2).unwrap(), [1.0, 0.0, 0.0].into()); assert_ulps_eq!(scene.position(e2).unwrap(), [1.0, 1.0, 0.0].into()); scene.remove_from_parent(e2, true).unwrap(); assert_ulps_eq!(scene.position(e2).unwrap(), [1.0, 1.0, 0.0].into()); scene.set_parent(e3, e1, true).unwrap(); assert_ulps_eq!(scene.local_position(e3).unwrap(), [0.0, -1.0, 1.0].into()); assert_ulps_eq!(scene.position(e3).unwrap(), [0.0, 0.0, 1.0].into()); scene.remove_from_parent(e3, false).unwrap(); assert_ulps_eq!(scene.position(e3).unwrap(), [0.0, -1.0, 1.0].into()); } #[test] fn look_at() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); scene.set_position(e1, [0.0, 0.0, -5.0]); scene.look_at(e1, [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); scene.set_position(e1, [0.0, 0.0, 5.0]); scene.look_at(e1, [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(180.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); scene.set_position(e1, [1.0, 0.0, 1.0]); scene.look_at(e1, [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(225.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); } #[test] fn iteration() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); let e4 = scene.create("e4"); let e5 = scene.create("e5"); let e6 = scene.create("e6"); scene.set_parent(e4, e3, false).unwrap(); scene.set_parent(e3, e1, false).unwrap(); scene.set_parent(e2, e1, false).unwrap(); scene.set_parent(e6, e4, false).unwrap(); scene.set_parent(e5, e4, false).unwrap(); assert_eq!( scene.descendants(e1).collect::<Vec<_>>(), [e2, e3, e4, e5, e6] ); assert_eq!(scene.children(e1).collect::<Vec<_>>(), [e2, e3]); assert_eq!(scene.ancestors(e1).collect::<Vec<_>>(), []); assert_eq!(scene.ancestors(e2).collect::<Vec<_>>(), [e1]); assert_eq!(scene.ancestors(e4).collect::<Vec<_>>(), [e3, e1]); assert_eq!(scene.ancestors(e6).collect::<Vec<_>>(), [e4, e3, e1]); } #[test] fn random_iteration() { let mut scene = Scene::new(HeadlessRenderer::new()); let mut nodes = vec![]; for _ in 0..255 { nodes.push(scene.create("")); } let mut constructed = vec![]; constructed.push(nodes.pop().unwrap()); let mut count = 0; for i in 0..254 { let idx = rand::random::<usize>() % nodes.len(); let pidx = rand::random::<usize>() % constructed.len(); if pidx == 0 { count += 1; } scene .set_parent(nodes[idx], constructed[pidx], false) .unwrap(); let len = scene.descendants(constructed[0]).count(); assert_eq!(len, i + 1); constructed.push(nodes[idx]); nodes.remove(idx); } let len = scene.children(constructed[0]).count(); assert_eq!(len, count); let len = scene.descendants(constructed[0]).count(); assert_eq!(len, 254); }
extern crate crayon; extern crate crayon_world; extern crate rand; use crayon::prelude::*; use crayon::*; use crayon_world::prelude::*; use crayon_world::renderable::headless::HeadlessRenderer; #[test] pub fn hierachy() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); let e4 = scene.create("e4"); scene.set_parent(e4, e3, false).unwrap(); scene.set_parent(e3, e1, false).unwrap(); scene.set_parent(e2, e1, false).unwrap(); assert!(scene.is_ancestor(e2, e1)); assert!(scene.is_ancestor(e3, e1)); assert!(scene.is_ancestor(e4, e1)); assert!(scene.is_ancestor(e4, e3)); assert!(!scene.is_ancestor(e1, e1)); assert!(!scene.is_ancestor(e1, e2)); assert!(!scene.is_ancestor(e1, e3)); assert!(!scene.is_ancestor(e1, e4)); assert!(!scene.is_ancestor(e2, e4)); assert!(scene.is_root(e1)); assert!(!scene.is_root(e2)); assert!(!scene.is_root(e3)); assert!(!scene.is_root(e4)); assert!(!scene.is_leaf(e1)); assert!(scene.is_leaf(e2)); assert!(!scene.is_leaf(e3)); assert!(scene.is_leaf(e4)); let point = [1.0, 0.0, 0.0]; scene.set_position(e3, point); assert_ulps_eq!(scene.position(e4).unwrap(), point.into()); let point = [1.0, 0.0, 2.0]; scene.set_position(e1, point); assert_ulps_eq!(scene.position(e4).unwrap(), [2.0, 0.0, 2.0].into()); assert_ulps_eq!(scene.local_position(e4).unwrap(), [0.0, 0.0, 0.0].into()); scene.set_parent(e4, Some(e2), false).unwrap(); assert_ulps_eq!(scene.position(e4).unwrap(), [1.0, 0.0, 2.0].into()); assert_ulps_eq!(scene.local_position(e4).unwrap(), [0.0, 0.0, 0.0].into()); scene.set_local_position(e2, [1.0, 0.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(90.0), Deg(0.0)); scene.set_rotation(e1, euler); assert_ulps_eq!(scene.position(e2).unwrap(), [1.0, 0.0, 1.0].into()); } #[test] fn remove() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); let e4 = scene.create("e4"); let e5 = scene.create("e5"); let e6 = scene.create("e6"); scene.set_parent(e2, e1, false).unwrap(); scene.set_parent(e3, e1, false).unwrap(); scene.set_parent(e4, e3, false).unwrap(); scene.set_parent(e5, e3, false).unwrap(); scene.set_parent(e6, e5, false).unwrap(); assert!(scene.len() == 6); scene.delete(e3); assert!(scene.contains(e1)); assert!(scene.contains(e2)); assert!(!scene.contains(e3)); assert!(!scene.contains(e4)); assert!(!scene.contains(e5)); assert!(!scene.contains(e6)); assert!(scene.len() == 2); } #[test] fn transform() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); scene.set_scale(e1, 2.0); scene.set_position(e1, [1.0, 0.0, 2.0]); let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(90.0)); let rotation = Quaternion::from(euler); scene.set_rotation(e1, rotation); let v = [1.0, 0.0, 0.0]; let transform = scene.transform(e1).unwrap(); assert_ulps_eq!(transform.transform_direction(v), [0.0, 1.0, 0.0].into()); assert_ulps_eq!(transform.transform_vector(v), [0.0, 2.0, 0.0].into()); assert_ulps_eq!(transform.transform_point(v), [1.0, 2.0, 2.0].into()); } #[test] fn keep_world_pose() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); scene.set_position(e1, [0.0, 1.0, 0.0]); assert_ulps_eq!(scene.position(e1).unwrap(), [0.0, 1.0, 0.0].into()); assert_ulps_eq!(scene.local_position(e1).unwrap(), [0.0, 1.0, 0.0].into()); scene.set_position(e2, [1.0, 0.0, 0.0]); scene.set_position(e3, [0.0, 0.0, 1.0]); scene.set_parent(e2, e1, false).unwrap(); assert_ulps_eq!(scene.local_position(e2).unwrap(), [1.0, 0.0, 0.0].into()); assert_ulps_eq!(scene.position(e2).unwrap(), [1.0, 1.0, 0.0].into()); scene.remove_from_parent(e2, true).unwrap(); assert_ulps_eq!(scene.position(e2).unwrap(), [1.0, 1.0, 0.0].into()); scene.set_parent(e3, e1, true).unwrap(); assert_ulps_eq!(scene.local_position(e3).unwrap(), [0.0, -1.0, 1.0].into()); assert_ulps_eq!(scene.position(e3).unwrap(), [0.0, 0.0, 1.0].into()); scene.remove_from_parent(e3, false).unwrap(); assert_ulps_eq!(scene.position(e3).unwrap(), [0.0, -1.0, 1.0].into()); } #[test] fn look_at() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); scene.set_position(e1, [0.0, 0.0, -5.0]); scene.look_at(e1, [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); scene.set_position(e1, [0.0, 0.0, 5.0]); scene.look_at(e1, [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(180.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); scene.set_position(e1, [1.0, 0.0, 1.0]); scene.look_at(e1, [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]); let euler = Euler::new(Deg(0.0), Deg(225.0), Deg(0.0)); assert_ulps_eq!(scene.rotation(e1).unwrap(), euler.into()); } #[test] fn iteration() { let mut scene = Scene::new(HeadlessRenderer::new()); let e1 = scene.create("e1"); let e2 = scene.create("e2"); let e3 = scene.create("e3"); let e4 = scene.create("e4"); let e5 = scene.create("e5"); let e6 = scene.create("e6"); scene.set_parent(e4, e3, false).unwrap(); scene.set_parent(e3, e1, false).unwrap(); scene.set_parent(e2, e1, false).unwrap(); scene.set_parent(e6, e4, false).unwrap(); scene.set_parent(e5, e4, false).unwrap(); assert_eq!( scene.descendants(e1).collect::<Vec<_>>(), [e2, e3, e4, e5, e6] ); assert_eq!(scene.children(e1).collect::<Vec<_>>(), [e2, e3]); assert_eq!(scene.ancestors(e1).collect::<Vec<_>>(), []); assert_eq!(scene.ancestors(e2).collect::<Vec<_>>(), [e1]); assert_eq!(scene.ancestors(e4).collect::<Vec<_>>(), [e3, e1]); assert_eq!(scene.ancestors(e6).collect::<Vec<_>>(), [e4, e3, e1]); } #[test] fn random_iteration() { let mut scene = Scene::new(HeadlessRenderer::new()); let mut nodes = vec![]; for _ in 0..255 { nodes.push(scene.create("")); } let mut constructed = vec![]; constructed.push(nodes.pop().unwrap()); let mut count = 0; for i in 0..254 { let idx = rand::random::<usize>() % nodes.len(); let pidx = rand::random::<usize>() % constructed.len(); if pidx == 0 { count += 1; } scene .set_parent(nodes[idx], constructed[pidx], false) .unwrap(); let len = scene.descendants(constructed[0]).count(); assert_eq!(len, i + 1); constructed.push(nodes[idx]); nodes.remove(idx); }
let len = scene.children(constructed[0]).count(); assert_eq!(len, count); let len = scene.descendants(constructed[0]).count(); assert_eq!(len, 254); }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn transform() {\n\n let mut e1 = Transform::default();\n\n let euler = Euler::new(Deg(0.0), Deg(0.0), Deg(90.0));\n\n e1.scale = 2.0;\n\n e1.position = [1.0, 0.0, 2.0].into();\n\n e1.rotation = euler.into();\n\n\n\n let v = [1.0, 0.0, 0.0];\n\n assert_ulps_eq!(e1.transform_direction(v), [0.0, 1.0, 0.0].into());\n\n assert_ulps_eq!(e1.transform_vector(v), [0.0, 2.0, 0.0].into());\n\n assert_ulps_eq!(e1.transform_point(v), [1.0, 2.0, 2.0].into());\n\n}\n\n\n", "file_path": "modules/world/tests/transform.rs", "rank": 1, "score": 206039.85955707767 }, { "content": "#[test]\n\nfn iter() {\n\n let mut set: HandlePool<Handle> = HandlePool::new();\n\n let mut v = vec![];\n\n\n\n for m in 2..3 {\n\n for _ in 0..10 {\n\n v.push(set.create())\n\n }\n\n\n\n for i in 0..10 {\n\n if i % m == 0 {\n\n let index = i % v.len();\n\n set.free(v[index]);\n\n v.remove(index);\n\n }\n\n }\n\n }\n\n\n\n v.sort_by(|lhs, rhs| lhs.index().cmp(&rhs.index()));\n\n let mut iter = set.iter();\n", "file_path": "tests/handle_pool.rs", "rank": 2, "score": 199539.1626174256 }, { "content": "#[allow(unused_assignments, unused_mut)]\n\n#[inline]\n\npub fn set_max_fps(mut fps: u32) {\n\n #[cfg(target_arch = \"wasm32\")]\n\n {\n\n warn!(\"The max FPS could not be controlled in web environment.\");\n\n fps = 0;\n\n }\n\n\n\n time_ctx().set_max_fps(fps);\n\n}\n\n\n\n/// Set maximum frames per second when the application does not have input\n\n/// focus.\n", "file_path": "src/application/mod.rs", "rank": 4, "score": 184968.54798086395 }, { "content": "#[test]\n\nfn iterator() {\n\n let mut set = ObjectPool::<Handle, i32>::new();\n\n for i in 0..10 {\n\n set.create(i);\n\n }\n\n\n\n assert!(set.iter().count() == 10);\n\n\n\n for (i, v) in set.keys().enumerate() {\n\n assert_eq!(v, Handle::new(i as u32, 1));\n\n }\n\n\n\n for (i, &v) in set.values().enumerate() {\n\n assert_eq!(v, i as i32);\n\n }\n\n\n\n for v in set.values_mut() {\n\n *v += 1;\n\n }\n\n\n\n for (i, &v) in set.values().enumerate() {\n\n assert_eq!(v, (i + 1) as i32);\n\n }\n\n}\n", "file_path": "tests/object_pool.rs", "rank": 5, "score": 163041.1604918519 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n let device = cpal::default_output_device()\n\n .ok_or_else(|| format_err!(\"No avaiable audio output device\"))?;\n\n\n\n let format = device\n\n .default_output_format()\n\n .expect(\"The device doesn't support any format.\");\n\n\n\n let events = EventLoop::new();\n\n let stream = events.build_output_stream(&device, &format).unwrap();\n\n\n\n info!(\n\n \"Create audio mixer based on CPAL. [{:?}] {:?}.\",\n\n device.name(),\n\n format\n\n );\n\n\n\n let mut sampler = Sampler::new(format.channels as u8, format.sample_rate.0 as u32);\n\n Builder::new()\n\n .name(\"Audio\".into())\n", "file_path": "modules/audio/src/mixer/cpal.rs", "rank": 6, "score": 162273.1511033558 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n info!(\"Create web audio mixer.\",);\n\n\n\n let ctx = AudioContext::new().unwrap();\n\n\n\n let closure = Rc::new(RefCell::new(None));\n\n let clone = closure.clone();\n\n let mut sampler = Sampler::new(CHANNELS, ctx.sample_rate() as u32);\n\n\n\n let mut bufs = Vec::new();\n\n for _ in 0..CHANNELS {\n\n bufs.push(Vec::new());\n\n }\n\n\n\n *closure.borrow_mut() = Some(Closure::wrap(Box::new(move |e: AudioProcessingEvent| {\n\n if clone.borrow().is_some() {}\n\n\n\n {\n\n let mut rx = rx.write().unwrap();\n\n sampler.update(rx.drain(..));\n", "file_path": "modules/audio/src/mixer/webaudio.rs", "rank": 7, "score": 162273.1511033558 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n info!(\"Create headless audio mixer.\",);\n\n\n\n Builder::new()\n\n .name(\"Audio\".into())\n\n .spawn(move || {\n\n //\n\n loop {\n\n {\n\n let mut rx = rx.write().unwrap();\n\n rx.clear();\n\n }\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n }\n\n }).expect(\"Failed to create thread for `AudioSystem`.\");\n\n\n\n Ok(())\n\n}\n", "file_path": "modules/audio/src/mixer/headless.rs", "rank": 8, "score": 162273.1511033558 }, { "content": "#[test]\n\nfn inverse() {\n\n let mut e1 = Transform::default();\n\n e1.position = [0.0, 0.0, 1.0].into();\n\n let euler = Euler::new(Deg(0.0), Deg(90.0), Deg(0.0));\n\n e1.rotation = euler.into();\n\n\n\n let v = e1.inverse().unwrap() * e1;\n\n assert_ulps_eq!(v.position, [0.0, 0.0, 0.0].into());\n\n assert_ulps_eq!(v.scale, 1.0);\n\n assert_ulps_eq!(v.rotation, Quaternion::one());\n\n}\n", "file_path": "modules/world/tests/transform.rs", "rank": 11, "score": 158867.77105001057 }, { "content": "#[test]\n\nfn concat() {\n\n let mut e1 = Transform::default();\n\n e1.position = [0.0, 0.0, 1.0].into();\n\n\n\n let mut e2 = Transform::default();\n\n let euler = Euler::new(Deg(0.0), Deg(90.0), Deg(0.0));\n\n e2.rotation = euler.into();\n\n\n\n let e3 = e2 * e1;\n\n assert_ulps_eq!(e3.position, [1.0, 0.0, 0.0].into());\n\n}\n\n\n", "file_path": "modules/world/tests/transform.rs", "rank": 12, "score": 158867.77105001057 }, { "content": "#[test]\n\nfn find() {\n\n let mut scene = Scene::new(HeadlessRenderer::new());\n\n\n\n let e1 = scene.create(\"room.obj\");\n\n let e2 = scene.create(\"floor\");\n\n let e3 = scene.create(\"tallBox\");\n\n let e4 = scene.create(\"shortBox\");\n\n\n\n scene.set_parent(e2, e1, false).unwrap();\n\n scene.set_parent(e3, e1, false).unwrap();\n\n scene.set_parent(e4, e3, false).unwrap();\n\n\n\n assert_eq!(scene.find(\"room.obj\"), Some(e1));\n\n assert_eq!(scene.find(\"room.obj/\"), Some(e1));\n\n assert_eq!(scene.find(\"room.obj//\"), Some(e1));\n\n assert_eq!(scene.find(\"/room.obj\"), Some(e1));\n\n assert_eq!(scene.find(\"//room.obj\"), Some(e1));\n\n assert_eq!(scene.find(\"/room.obj//\"), Some(e1));\n\n\n\n assert_eq!(scene.find(\"room.obj/floor\"), Some(e2));\n\n assert_eq!(scene.find(\"room.obj/tallBox\"), Some(e3));\n\n assert_eq!(scene.find(\"room.obj/tallBox/shortBox\"), Some(e4));\n\n\n\n assert_eq!(scene.find(\"room.obj/blahblah\"), None);\n\n}\n\n\n", "file_path": "modules/world/tests/scene.rs", "rank": 13, "score": 158775.38856028605 }, { "content": "#[test]\n\nfn instantiate() {\n\n use crayon_world::assets::prefab::PrefabNode;\n\n crayon::application::oneshot().unwrap();\n\n crayon_world::setup().unwrap();\n\n\n\n let mut prefab = Prefab {\n\n nodes: Vec::new(),\n\n universe_meshes: Vec::new(),\n\n meshes: Vec::new(),\n\n };\n\n\n\n prefab.nodes.push(PrefabNode {\n\n name: \"room.obj\".into(),\n\n local_transform: Transform::default(),\n\n first_child: Some(1),\n\n next_sib: None,\n\n mesh_renderer: None,\n\n });\n\n\n\n prefab.nodes.push(PrefabNode {\n", "file_path": "modules/world/tests/scene.rs", "rank": 14, "score": 158775.38856028605 }, { "content": "/// Setup the core system.\n\npub fn setup<T, T2>(mut params: Params, closure: T) -> Result<()>\n\nwhere\n\n T: FnOnce() -> Result<T2> + 'static,\n\n T2: LifecycleListener + Send + 'static,\n\n{\n\n unsafe {\n\n debug_assert!(LIFECYCLE_CTX.is_null(), \"duplicated setup of crayon.\");\n\n\n\n sys::init();\n\n params.validate();\n\n\n\n let dirs = params.res.dirs.clone();\n\n LIFECYCLE_CTX = Box::into_raw(Box::new(LifecycleSystem::new()));\n\n TIME_CTX = Box::into_raw(Box::new(TimeSystem::new(&params)));\n\n\n\n if std::env::args().any(|v| v == \"headless\") {\n\n CTX = Box::into_raw(Box::new(EngineSystem::new_headless(params)?));\n\n } else {\n\n CTX = Box::into_raw(Box::new(EngineSystem::new(params)?));\n\n };\n\n\n\n let latch = crate::res::inside::load_manifests(dirs)?;\n\n ctx().run(latch, closure)\n\n }\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 15, "score": 158740.91981325942 }, { "content": "pub fn sphere(iteration: usize) -> Result<MeshHandle> {\n\n use std::f32::consts::FRAC_1_PI;\n\n\n\n fn normalize(v: [f32; 3]) -> Vertex {\n\n let l = (v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).sqrt();\n\n let v = [v[0] / l, v[1] / l, v[2] / l];\n\n let uv = [v[0].asin() * FRAC_1_PI + 0.5, v[1].asin() * FRAC_1_PI + 0.5];\n\n\n\n Vertex::new(v, v, uv)\n\n }\n\n\n\n let t = (1.0f32 + 5.0f32.sqrt()) / 2.0f32;\n\n let mut verts = vec![\n\n normalize([-1.0, t, 0.0]),\n\n normalize([1.0, t, 0.0]),\n\n normalize([-1.0, -t, 0.0]),\n\n normalize([1.0, -t, 0.0]),\n\n normalize([0.0, -1.0, t]),\n\n normalize([0.0, 1.0, t]),\n\n normalize([0.0, -1.0, -t]),\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 18, "score": 146492.79454979085 }, { "content": "#[inline]\n\npub fn show() {\n\n ctx().show();\n\n}\n\n\n\n/// Hides the window if it was visible.\n\n///\n\n/// # Platform-specific\n\n///\n\n/// Has no effect on mobile platform.\n", "file_path": "src/window/mod.rs", "rank": 19, "score": 143197.05225454032 }, { "content": "#[inline]\n\npub fn discard() {\n\n ctx().shutdown()\n\n}\n\n\n\npub(crate) unsafe fn late_discard() {\n\n drop(Box::from_raw(CTX as *mut EngineSystem));\n\n CTX = std::ptr::null();\n\n\n\n drop(Box::from_raw(TIME_CTX as *mut TimeSystem));\n\n TIME_CTX = std::ptr::null();\n\n\n\n drop(Box::from_raw(LIFECYCLE_CTX as *mut LifecycleSystem));\n\n LIFECYCLE_CTX = std::ptr::null();\n\n}\n\n\n\n/// Checks if the engine is enabled.\n", "file_path": "src/application/mod.rs", "rank": 20, "score": 143197.05225454032 }, { "content": "#[inline]\n\npub fn hide() {\n\n ctx().hide();\n\n}\n\n\n\n/// Set the context as the active context in this thread.\n", "file_path": "src/window/mod.rs", "rank": 21, "score": 143197.05225454032 }, { "content": "#[inline]\n\npub fn reset() {\n\n ctx().reset();\n\n}\n\n\n\n/// Returns true if a keyboard is attached\n", "file_path": "src/input/mod.rs", "rank": 22, "score": 143197.05225454032 }, { "content": "#[doc(hidden)]\n\npub fn oneshot() -> Result<()> {\n\n unsafe {\n\n debug_assert!(LIFECYCLE_CTX.is_null(), \"duplicated setup of crayon.\");\n\n\n\n let params = Params::default();\n\n\n\n sys::init();\n\n LIFECYCLE_CTX = Box::into_raw(Box::new(LifecycleSystem::new()));\n\n TIME_CTX = Box::into_raw(Box::new(TimeSystem::new(&params)));\n\n CTX = Box::into_raw(Box::new(EngineSystem::new_headless(params)?));\n\n\n\n ctx().run_oneshot()\n\n }\n\n}\n\n\n\n/// Discard the core system.\n", "file_path": "src/application/mod.rs", "rank": 23, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !LIFECYCLE_CTX.is_null() }\n\n}\n\n\n\n/// Checks if the engine is running in headless mode.\n", "file_path": "src/application/mod.rs", "rank": 24, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn is_current() -> bool {\n\n ctx().is_current()\n\n}\n\n\n\n/// Returns the position of the lower-left hand corner of the window relative to the lower-left\n\n/// hand corner of the desktop. Note that the lower-left hand corner of the desktop is not\n\n/// necessarily the same as the screen. If the user uses a desktop with multiple monitors,\n\n/// the lower-left hand corner of the desktop is the lower-left hand corner of the monitor at\n\n/// the lower-left of the desktop.\n\n///\n\n/// The coordinates can be negative if the lower-left hand corner of the window is outside of\n\n/// the visible screen region.\n", "file_path": "src/window/mod.rs", "rank": 25, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn headless() -> bool {\n\n ctx().headless()\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 26, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n// /// Blocks current thread until latch is set. Try to keep busy by popping and stealing jobs\n\n// /// as necessary.\n\n// #[inline]\n\n// pub fn wait_until<T>(latch: &T)\n\n// where\n\n// T: LatchWaitProbe,\n\n// {\n\n// ctx().wait_until(latch);\n\n// }\n\n\n", "file_path": "src/sched/mod.rs", "rank": 27, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn text() -> String {\n\n ctx().text()\n\n}\n\n\n\n/// Returns true if a mouse is attached\n", "file_path": "src/input/mod.rs", "rank": 28, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn fps() -> u32 {\n\n time_ctx().fps()\n\n}\n\n\n\n/// Gets the duration duraing last frame.\n", "file_path": "src/application/mod.rs", "rank": 29, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n/// Reset input to initial states.\n", "file_path": "src/input/mod.rs", "rank": 30, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n/// Resolve shortcuts in the provided string recursively and return None if not exists.\n", "file_path": "src/res/mod.rs", "rank": 31, "score": 136407.66777481057 }, { "content": "#[inline]\n\npub fn hash<T: Hash + ?Sized>(v: &T) -> usize {\n\n let mut state = hasher::FxHasher::default();\n\n v.hash(&mut state);\n\n state.finish() as usize\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn basic() {\n\n let mut v: HashMap<&'static str, i32> = Default::default();\n\n v.insert(\"hahah\", 123);\n\n }\n\n}\n\n\n\nmod hasher {\n\n use std::hash::Hasher;\n\n use std::ops::BitXor;\n\n\n\n const ROTATE: u32 = 5;\n\n const SEED64: u64 = 0x517c_c1b7_2722_0a95;\n\n const SEED32: u32 = (SEED64 & 0xFFFF_FFFF) as u32;\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n const SEED: usize = SEED32 as usize;\n\n #[cfg(target_pointer_width = \"64\")]\n\n const SEED: usize = SEED64 as usize;\n\n\n", "file_path": "src/utils/hash.rs", "rank": 32, "score": 136122.9551127702 }, { "content": "#[inline]\n\npub fn hash32<T: Hash + ?Sized>(v: &T) -> u32 {\n\n let mut state = hasher::FxHasher32::default();\n\n v.hash(&mut state);\n\n state.finish() as u32\n\n}\n\n\n\n/// A convenience function for when you need a quick usize hash.\n", "file_path": "src/utils/hash.rs", "rank": 33, "score": 136122.9551127702 }, { "content": "#[inline]\n\npub fn hash64<T: Hash + ?Sized>(v: &T) -> u64 {\n\n let mut state = hasher::FxHasher64::default();\n\n v.hash(&mut state);\n\n state.finish()\n\n}\n\n\n\n/// A convenience function for when you need a quick 32-bit hash.\n", "file_path": "src/utils/hash.rs", "rank": 34, "score": 136122.9551127702 }, { "content": "pub fn timestamp() -> Timestamp {\n\n let duration = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap();\n\n\n\n let ms = u64::from(duration.subsec_millis()) + duration.as_secs() * 1000;\n\n Timestamp::from_millis(ms)\n\n}\n\n\n\npub(crate) fn init() {}\n\n\n\npub(crate) fn run_forever<F, F2>(mut advance: F, mut finished: F2) -> Result<(), failure::Error>\n\nwhere\n\n F: FnMut() -> Result<bool, failure::Error> + 'static,\n\n F2: FnMut() -> Result<(), failure::Error> + 'static,\n\n{\n\n while advance()? {}\n\n finished()\n\n}\n", "file_path": "src/application/sys/glutin.rs", "rank": 35, "score": 133991.2884451085 }, { "content": "#[inline]\n\npub fn has_mouse_attached() -> bool {\n\n ctx().has_mouse_attached()\n\n}\n\n\n\n/// Checks if a mouse buttoAn is held down.\n", "file_path": "src/input/mod.rs", "rank": 36, "score": 133991.2884451085 }, { "content": "pub fn timestamp() -> Timestamp {\n\n let ms = web_sys::window()\n\n .expect(\"should have a window in this context\")\n\n .performance()\n\n .expect(\"performance should be available\")\n\n .now();\n\n\n\n Timestamp::from_millis(ms as u64)\n\n}\n\n\n\npub(crate) fn init() {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n log::set_boxed_logger(Box::new(WebBrowserLogger {})).unwrap();\n\n log::set_max_level(log::LevelFilter::Info);\n\n}\n\n\n\npub(crate) fn run_forever<F, F2>(mut advance: F, mut finished: F2) -> Result<(), failure::Error>\n\nwhere\n\n F: FnMut() -> Result<bool, failure::Error> + 'static,\n\n F2: FnMut() -> Result<(), failure::Error> + 'static,\n", "file_path": "src/application/sys/web.rs", "rank": 37, "score": 133991.2884451085 }, { "content": "#[inline]\n\npub fn make_current() -> Result<()> {\n\n ctx().make_current()\n\n}\n\n\n\n/// Returns true if this context is the current one in this thread.\n", "file_path": "src/window/mod.rs", "rank": 38, "score": 133991.2884451085 }, { "content": "#[inline]\n\npub fn has_keyboard_attached() -> bool {\n\n ctx().has_keyboard_attached()\n\n}\n\n\n\n/// Checks if a key is currently held down.\n", "file_path": "src/input/mod.rs", "rank": 39, "score": 133991.2884451085 }, { "content": "#[inline]\n\npub fn has_touchpad_attached() -> bool {\n\n ctx().has_touchpad_attached()\n\n}\n\n\n\n/// Checks if the `n`th finger is touched during last frame.\n", "file_path": "src/input/mod.rs", "rank": 40, "score": 133991.2884451085 }, { "content": "#[inline]\n\npub fn finger_pan() -> GesturePan {\n\n ctx().finger_pan()\n\n}\n\n\n\npub(crate) mod inside {\n\n use super::system::InputSystem;\n\n use super::InputParams;\n\n\n\n pub static mut CTX: *const InputSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static InputSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"input system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n", "file_path": "src/input/mod.rs", "rank": 41, "score": 131714.1650578429 }, { "content": "#[inline]\n\npub fn device_pixel_ratio() -> f32 {\n\n ctx().device_pixel_ratio()\n\n}\n\n\n\npub(crate) mod inside {\n\n use crate::errors::*;\n\n use crate::math::prelude::Vector2;\n\n\n\n use super::system::WindowSystem;\n\n use super::WindowParams;\n\n\n\n pub static mut CTX: *const WindowSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static WindowSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"window system has not been initialized properly.\"\n\n );\n", "file_path": "src/window/mod.rs", "rank": 42, "score": 131714.1650578429 }, { "content": "#[inline]\n\npub fn finger_tap() -> GestureTap {\n\n ctx().finger_tap()\n\n}\n\n\n\n/// Gets the double tap gesture.\n", "file_path": "src/input/mod.rs", "rank": 43, "score": 131714.1650578429 }, { "content": "#[inline]\n\npub fn dimensions() -> Vector2<u32> {\n\n ctx().dimensions()\n\n}\n\n\n\n/// Returns the ratio between the backing framebuffer resolution and the window size in\n\n/// screen pixels. This is typically one for a normal display and two for a retina display.\n", "file_path": "src/window/mod.rs", "rank": 44, "score": 130332.58748891542 }, { "content": "#[inline]\n\npub fn position() -> Vector2<i32> {\n\n ctx().position()\n\n}\n\n\n\n/// Returns the size in *points* of the client area of the window.\n\n///\n\n/// The client area is the content of the window, excluding the title bar and borders. These are\n\n/// the size of the frame buffer.\n", "file_path": "src/window/mod.rs", "rank": 45, "score": 130332.58748891542 }, { "content": "#[inline]\n\npub fn finger_double_tap() -> GestureTap {\n\n ctx().finger_double_tap()\n\n}\n\n\n\n/// Gets the panning gesture.\n", "file_path": "src/input/mod.rs", "rank": 46, "score": 129564.59679208497 }, { "content": "#[inline]\n\npub fn default() -> WorldDefaultResources {\n\n ctx().default\n\n}\n\n\n\nmod inside {\n\n use super::system::WorldSystem;\n\n\n\n static mut CTX: *const WorldSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static WorldSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"world system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n\n }\n", "file_path": "modules/world/src/lib.rs", "rank": 47, "score": 129564.59679208497 }, { "content": "#[inline]\n\npub fn mouse_scroll() -> Vector2<f32> {\n\n ctx().mouse_scroll()\n\n}\n\n\n\n/// Returns true if a touchpad is attached\n", "file_path": "src/input/mod.rs", "rank": 48, "score": 128055.46410164979 }, { "content": "#[inline]\n\npub fn mouse_movement() -> Vector2<f32> {\n\n ctx().mouse_movement()\n\n}\n\n\n\n/// Gets the scroll movement of mouse, usually provided by mouse wheel.\n", "file_path": "src/input/mod.rs", "rank": 49, "score": 128055.46410164979 }, { "content": "#[inline]\n\npub fn mouse_position() -> Vector2<f32> {\n\n ctx().mouse_position()\n\n}\n\n\n\n/// Gets mouse movement since last frame.\n", "file_path": "src/input/mod.rs", "rank": 50, "score": 128055.46410164979 }, { "content": "/// Removes a event listener from window.\n\npub fn detach(handle: EventListenerHandle) {\n\n ctx().remove_event_listener(handle)\n\n}\n\n\n\n/// Shows the window if it was hidden.\n\n///\n\n/// # Platform-specific\n\n///\n\n/// Has no effect on mobile platform.\n", "file_path": "src/window/mod.rs", "rank": 51, "score": 125911.00115069779 }, { "content": "#[inline]\n\npub fn delete_surface(handle: SurfaceHandle) {\n\n ctx().delete_surface(handle)\n\n}\n\n\n\n/// Create a shader with initial shaders and render state. It encapusulates all the\n\n/// informations we need to configurate graphics pipeline before real drawing.\n", "file_path": "src/video/mod.rs", "rank": 52, "score": 125905.89583589186 }, { "content": "#[inline]\n\npub fn set_min_fps(fps: u32) {\n\n time_ctx().set_min_fps(fps);\n\n}\n\n\n\n/// Set maximum frames per second. The Time will sleep if fps is higher\n\n/// than this for less resource(e.g. power) consumptions.\n", "file_path": "src/application/mod.rs", "rank": 53, "score": 125905.89583589186 }, { "content": "pub fn new_headless() -> Box<Visitor> {\n\n Box::new(self::headless::HeadlessVisitor::new())\n\n}\n", "file_path": "src/video/backends/mod.rs", "rank": 54, "score": 125905.89583589186 }, { "content": "#[inline]\n\npub fn delete_mesh(handle: MeshHandle) {\n\n ctx().delete_mesh(handle);\n\n}\n\n\n\n/// Create texture object. A texture is an image loaded in video memory,\n\n/// which can be sampled in shaders.\n", "file_path": "src/video/mod.rs", "rank": 55, "score": 125905.89583589186 }, { "content": "pub fn new_headless() -> Box<Visitor> {\n\n Box::new(self::headless::HeadlessVisitor {})\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nmod glutin;\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub use self::glutin::new;\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nmod web;\n\n#[cfg(target_arch = \"wasm32\")]\n\npub use self::web::new;\n", "file_path": "src/window/backends/mod.rs", "rank": 56, "score": 125905.89583589186 }, { "content": "#[inline]\n\npub fn delete_texture(handle: TextureHandle) {\n\n ctx().delete_texture(handle);\n\n}\n\n\n\n/// Create render texture object, which could be attached with a framebuffer.\n", "file_path": "src/video/mod.rs", "rank": 57, "score": 125905.89583589186 }, { "content": "#[inline]\n\npub fn detach(handle: LifecycleListenerHandle) {\n\n lifecycle_ctx().detach(handle)\n\n}\n\n\n\n/// Set minimum frames per second. If fps goes lower than this, time will\n\n/// appear to slow. This is useful for some subsystems required strict minimum\n\n/// time step per frame, such like Collision checks.\n", "file_path": "src/application/mod.rs", "rank": 58, "score": 125905.89583589186 }, { "content": "#[inline]\n\npub fn delete_shader(handle: ShaderHandle) {\n\n ctx().delete_shader(handle)\n\n}\n\n\n\n/// Create a new mesh object.\n", "file_path": "src/video/mod.rs", "rank": 59, "score": 125905.89583589186 }, { "content": "#[inline]\n\npub fn is_finger_touched(n: usize) -> bool {\n\n ctx().is_finger_touched(n)\n\n}\n\n\n\n/// Gets the position of the `n`th touched finger.\n", "file_path": "src/input/mod.rs", "rank": 60, "score": 124853.43649724877 }, { "content": "#[inline]\n\npub fn is_key_down(key: Key) -> bool {\n\n ctx().is_key_down(key)\n\n}\n\n\n\n/// Checks if a key has been pressed down during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 61, "score": 124853.43649724877 }, { "content": "/// Spawn an asynchronous job in the global `Scheduler.`\n\npub fn spawn<F>(func: F)\n\nwhere\n\n F: FnOnce() + Send + 'static,\n\n{\n\n ctx().spawn(func);\n\n}\n\n\n", "file_path": "src/sched/mod.rs", "rank": 62, "score": 124853.43649724877 }, { "content": "#[inline]\n\npub fn exists(uuid: Uuid) -> bool {\n\n ctx().exists(uuid)\n\n}\n\n\n\n/// Loads file asynchronously with response callback.\n", "file_path": "src/res/mod.rs", "rank": 63, "score": 124853.43649724877 }, { "content": "#[inline]\n\npub fn stop(handle: AudioSourceHandle) {\n\n ctx().stop(handle)\n\n}\n\n\n\n/// Sets the emiiter position of playing sound.\n", "file_path": "modules/audio/src/lib.rs", "rank": 64, "score": 123873.45702046787 }, { "content": "#[inline]\n\npub fn set_max_inactive_fps(fps: u32) {\n\n time_ctx().set_max_inactive_fps(fps);\n\n}\n\n\n\n/// Set how many frames to average for timestep smoothing.\n", "file_path": "src/application/mod.rs", "rank": 65, "score": 123873.45702046787 }, { "content": "#[inline]\n\npub fn delete_prefab(handle: PrefabHandle) {\n\n ctx().delete_prefab(handle);\n\n}\n\n\n\n/// Return the default resources in this world.\n", "file_path": "modules/world/src/lib.rs", "rank": 66, "score": 123873.45702046787 }, { "content": "#[inline]\n\npub fn set_time_smoothing_step(step: u32) {\n\n time_ctx().set_time_smoothing_step(step);\n\n}\n\n\n\n/// Gets current fps.\n", "file_path": "src/application/mod.rs", "rank": 67, "score": 123873.45702046787 }, { "content": "#[inline]\n\npub fn frame_duration() -> ::std::time::Duration {\n\n time_ctx().frame_duration()\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 68, "score": 122703.86823149084 }, { "content": "#[inline]\n\npub fn is_key_repeat(key: Key) -> bool {\n\n ctx().is_key_repeat(key)\n\n}\n\n\n\n/// Gets captured text during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 69, "score": 122703.86823149084 }, { "content": "#[inline]\n\npub fn is_key_press(key: Key) -> bool {\n\n ctx().is_key_press(key)\n\n}\n\n\n\n/// Checks if a key has been released during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 70, "score": 122703.86823149084 }, { "content": "#[inline]\n\npub fn is_key_release(key: Key) -> bool {\n\n ctx().is_key_release(key)\n\n}\n\n\n\n/// Checks if a key has been repeated during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 71, "score": 122703.86823149084 }, { "content": "#[inline]\n\npub fn is_mouse_down(button: MouseButton) -> bool {\n\n ctx().is_mouse_down(button)\n\n}\n\n\n\n/// Checks if a mouse button has been pressed during last frame.\n", "file_path": "src/input/mod.rs", "rank": 72, "score": 122703.86823149084 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn new() -> Result<Box<Visitor>> {\n\n let visitor = unsafe { webgl::visitor::WebGLVisitor::new()? };\n\n Ok(Box::new(visitor))\n\n}\n\n\n", "file_path": "src/video/backends/mod.rs", "rank": 73, "score": 122703.86823149084 }, { "content": "#[inline]\n\npub fn delete_clip(handle: AudioClipHandle) {\n\n ctx().delete_clip(handle);\n\n}\n\n\n\n/// Plays a audio source, returning a `AudioSourceHandle` for it.\n", "file_path": "modules/audio/src/lib.rs", "rank": 74, "score": 121948.82803455432 }, { "content": "pub fn quad() -> Result<MeshHandle> {\n\n let verts: [Vertex; 4] = [\n\n Vertex::new([-0.5, -0.5, 0.0], [0.0, 0.0, -1.0], [0.0, 0.0]),\n\n Vertex::new([0.5, -0.5, 0.0], [0.0, 0.0, -1.0], [1.0, 0.0]),\n\n Vertex::new([0.5, 0.5, 0.0], [0.0, 0.0, -1.0], [1.0, 1.0]),\n\n Vertex::new([-0.5, 0.5, 0.0], [0.0, 0.0, -1.0], [0.0, 1.0]),\n\n ];\n\n\n\n let idxes: [u16; 6] = [0, 1, 2, 0, 2, 3];\n\n\n\n let mut params = MeshParams::default();\n\n params.num_verts = verts.len();\n\n params.num_idxes = idxes.len();\n\n params.layout = Vertex::layout();\n\n\n\n let data = MeshData {\n\n vptr: Vertex::encode(&verts[..]).into(),\n\n iptr: IndexFormat::encode(&idxes).into(),\n\n };\n\n\n\n let mesh = video::create_mesh(params, Some(data))?;\n\n Ok(mesh)\n\n}\n\n\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 75, "score": 121948.82803455432 }, { "content": "pub fn cube() -> Result<MeshHandle> {\n\n let texcoords = [[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0]];\n\n\n\n let points = [\n\n [-0.5, -0.5, 0.5],\n\n [0.5, -0.5, 0.5],\n\n [0.5, 0.5, 0.5],\n\n [-0.5, 0.5, 0.5],\n\n [-0.5, -0.5, -0.5],\n\n [0.5, -0.5, -0.5],\n\n [0.5, 0.5, -0.5],\n\n [-0.5, 0.5, -0.5],\n\n ];\n\n\n\n let normals = [\n\n [0.0, 0.0, 1.0],\n\n [1.0, 0.0, 0.0],\n\n [0.0, 0.0, -1.0],\n\n [-1.0, 0.0, 0.0],\n\n [0.0, 1.0, 0.0],\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 76, "score": 121948.82803455432 }, { "content": "#[inline]\n\npub fn delete_render_texture(handle: RenderTextureHandle) {\n\n ctx().delete_render_texture(handle)\n\n}\n\n\n\npub(crate) mod inside {\n\n use std::sync::Arc;\n\n\n\n use crate::errors::*;\n\n use crate::utils::double_buf::DoubleBuf;\n\n\n\n use super::backends::frame::Frame;\n\n use super::system::VideoSystem;\n\n\n\n pub static mut CTX: *const VideoSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static VideoSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n", "file_path": "src/video/mod.rs", "rank": 77, "score": 121948.82803455432 }, { "content": "pub fn white() -> Result<TextureHandle> {\n\n let mut params = TextureParams::default();\n\n params.dimensions = (2, 2).into();\n\n\n\n let bytes = vec![255; 16];\n\n let data = TextureData {\n\n bytes: vec![bytes.into_boxed_slice()],\n\n };\n\n\n\n let texture = video::create_texture(params, data)?;\n\n Ok(texture)\n\n}\n", "file_path": "modules/world/src/assets/texture_builder.rs", "rank": 78, "score": 121948.82803455432 }, { "content": "#[test]\n\nfn basic() {\n\n let mut resolver = ShortcutResolver::new();\n\n\n\n resolver.add(\"home:\", \"file://docs/\").unwrap();\n\n resolver.add(\"a:\", \"home:\").unwrap();\n\n resolver.add(\"b:\", \"a:crayon/\").unwrap();\n\n\n\n assert!(resolver.has(\"home:\"));\n\n assert!(resolver.has(\"a:\"));\n\n assert!(resolver.has(\"b:\"));\n\n assert!(!resolver.has(\"abc:\"));\n\n\n\n assert_eq!(resolver.resolve(\"home:\"), Some(\"file://docs/\".into()));\n\n assert_eq!(resolver.resolve(\"b:\"), Some(\"file://docs/crayon/\".into()));\n\n\n\n resolver.add(\"home:\", \"http://www.rust-lang.org/\").unwrap();\n\n assert_eq!(\n\n resolver.resolve(\"b:\"),\n\n Some(\"http://www.rust-lang.org/crayon/\".into())\n\n );\n\n}\n\n\n", "file_path": "tests/shortcut.rs", "rank": 79, "score": 121844.22437859124 }, { "content": "#[test]\n\nfn basic_2() {\n\n let url = Url::new(\"https://shawn:123456@www.rust-lang.org:8080/en-US/index.html\").unwrap();\n\n assert_eq!(url.schema(), \"https\");\n\n assert_eq!(url.username(), Some(\"shawn\"));\n\n assert_eq!(url.password(), Some(\"123456\"));\n\n assert_eq!(url.host(), \"www.rust-lang.org\");\n\n assert_eq!(url.port(), Some(\"8080\"));\n\n assert_eq!(url.path(), \"/en-US/index.html\");\n\n assert_eq!(url.fragment(), None);\n\n assert_eq!(url.queries(), None);\n\n\n\n let url = Url::new(\"https://shawn:123456@www.rust-lang.org:8080/en-US/index.html#abc\").unwrap();\n\n assert_eq!(url.schema(), \"https\");\n\n assert_eq!(url.username(), Some(\"shawn\"));\n\n assert_eq!(url.password(), Some(\"123456\"));\n\n assert_eq!(url.host(), \"www.rust-lang.org\");\n\n assert_eq!(url.port(), Some(\"8080\"));\n\n assert_eq!(url.path(), \"/en-US/index.html\");\n\n assert_eq!(url.fragment(), Some(\"abc\"));\n\n assert_eq!(url.queries(), None);\n", "file_path": "tests/url.rs", "rank": 80, "score": 121844.22437859124 }, { "content": "#[test]\n\nfn err() {\n\n // URL must have a schema.\n\n assert!(Url::new(\"www.rust-lang.org/index.html\").is_err());\n\n assert!(Url::new(\":www.rust-lang.org/index.html\").is_err());\n\n // URL must have a hostname.\n\n assert!(Url::new(\"http://index.html\").is_err());\n\n assert!(Url::new(\"file://index.html\").is_err());\n\n assert!(Url::new(\"file:///index.html\").is_ok());\n\n // URL must have a filename.\n\n assert!(Url::new(\"http://www.rust-lang.org\").is_err());\n\n}\n", "file_path": "tests/url.rs", "rank": 81, "score": 121844.22437859124 }, { "content": "#[test]\n\nfn queries() {\n\n let url = Url::new(\"https://www.rust-lang.org/index.html?key0=value0&key1=value1\").unwrap();\n\n let queries = url.queries().unwrap();\n\n let mut iter = queries.iter();\n\n assert_eq!(iter.next(), Some((&\"key0\".into(), &Some(\"value0\".into()))));\n\n assert_eq!(iter.next(), Some((&\"key1\".into(), &Some(\"value1\".into()))));\n\n assert_eq!(iter.next(), None);\n\n\n\n let url = Url::new(\"https://www.rust-lang.org/index.html?key0=value0&key1=value1#abc\").unwrap();\n\n let queries = url.queries().unwrap();\n\n let mut iter = queries.iter();\n\n assert_eq!(iter.next(), Some((&\"key0\".into(), &Some(\"value0\".into()))));\n\n assert_eq!(iter.next(), Some((&\"key1\".into(), &Some(\"value1\".into()))));\n\n assert_eq!(iter.next(), None);\n\n assert_eq!(url.fragment(), Some(\"abc\"));\n\n\n\n let url = Url::new(\"https://www.rust-lang.org/index.html?key0=value0&key1#abc\").unwrap();\n\n let queries = url.queries().unwrap();\n\n let mut iter = queries.iter();\n\n assert_eq!(iter.next(), Some((&\"key0\".into(), &Some(\"value0\".into()))));\n\n assert_eq!(iter.next(), Some((&\"key1\".into(), &None)));\n\n assert_eq!(iter.next(), None);\n\n assert_eq!(url.fragment(), Some(\"abc\"));\n\n}\n\n\n", "file_path": "tests/url.rs", "rank": 82, "score": 121844.22437859124 }, { "content": "#[test]\n\nfn err() {\n\n let mut resolver = ShortcutResolver::new();\n\n // Shortcut MUST ends with a colon (':').\n\n assert!(resolver.add(\"home\", \"file://docs/\").is_err());\n\n // Shortcut MUST be at least 2 chars to not be confused with DOS drive letters.\n\n assert!(resolver.add(\":\", \"file://docs/\").is_err());\n\n // Fullname must end in a '/' (dir) or ':' (other shortcut).\n\n assert!(resolver.add(\"home:\", \"file://docs\").is_err());\n\n}\n", "file_path": "tests/shortcut.rs", "rank": 83, "score": 121844.22437859124 }, { "content": "#[test]\n\nfn basic() {\n\n let url = Url::new(\"https://www.rust-lang.org/en-US/index.html\").unwrap();\n\n assert_eq!(url.schema(), \"https\");\n\n assert_eq!(url.username(), None);\n\n assert_eq!(url.password(), None);\n\n assert_eq!(url.host(), \"www.rust-lang.org\");\n\n assert_eq!(url.port(), None);\n\n assert_eq!(url.path(), \"/en-US/index.html\");\n\n assert_eq!(url.fragment(), None);\n\n assert_eq!(url.queries(), None);\n\n\n\n let url = Url::new(\"https://shawn@www.rust-lang.org/en-US/index.html\").unwrap();\n\n assert_eq!(url.schema(), \"https\");\n\n assert_eq!(url.username(), Some(\"shawn\"));\n\n assert_eq!(url.password(), None);\n\n assert_eq!(url.host(), \"www.rust-lang.org\");\n\n assert_eq!(url.port(), None);\n\n assert_eq!(url.path(), \"/en-US/index.html\");\n\n assert_eq!(url.fragment(), None);\n\n assert_eq!(url.queries(), None);\n", "file_path": "tests/url.rs", "rank": 84, "score": 121844.22437859124 }, { "content": "type FrameTasks = Mutex<Vec<(Request, Box<dyn FnMut(Response) + Send>)>>;\n\n\n\n#[derive(Default)]\n\npub struct RequestQueue {\n\n // FIXME: Use FnOnce instead of Box<Fn> when its stable.\n\n last_frame_tasks: FrameTasks,\n\n tasks: FrameTasks,\n\n idxes: Mutex<Vec<usize>>,\n\n}\n\n\n\nimpl RequestQueue {\n\n pub fn new() -> Self {\n\n RequestQueue {\n\n last_frame_tasks: Mutex::new(Vec::new()),\n\n tasks: Mutex::new(Vec::new()),\n\n idxes: Mutex::new(Vec::new()),\n\n }\n\n }\n\n\n\n pub fn add<T: FnOnce(Response) + Send + 'static>(&self, request: Request, func: T) {\n", "file_path": "src/res/request.rs", "rank": 85, "score": 121138.1213239299 }, { "content": "#[inline]\n\npub fn is_mouse_press(button: MouseButton) -> bool {\n\n ctx().is_mouse_press(button)\n\n}\n\n\n\n/// Checks if a mouse button has been released during last frame.\n", "file_path": "src/input/mod.rs", "rank": 86, "score": 120671.42941606685 }, { "content": "#[inline]\n\npub fn is_mouse_click(button: MouseButton) -> bool {\n\n ctx().is_mouse_click(button)\n\n}\n\n\n\n/// Checks if a mouse button has been double clicked during last frame.\n", "file_path": "src/input/mod.rs", "rank": 87, "score": 120671.42941606685 }, { "content": "#[inline]\n\npub fn set_listener<T>(position: T)\n\nwhere\n\n T: Into<Vector3<f32>>,\n\n{\n\n ctx().set_listener(position);\n\n}\n\n\n\n/// Creates a clip object from file asynchronously.\n", "file_path": "modules/audio/src/lib.rs", "rank": 88, "score": 120671.42941606685 }, { "content": "#[inline]\n\npub fn is_mouse_release(button: MouseButton) -> bool {\n\n ctx().is_mouse_release(button)\n\n}\n\n\n\n/// Checks if a mouse button has been clicked during last frame.\n", "file_path": "src/input/mod.rs", "rank": 89, "score": 120671.42941606685 }, { "content": "pub fn resume_unwinding(payload: Box<Any + Send>) -> ! {\n\n panic::resume_unwind(payload)\n\n}\n\n\n\npub struct AbortIfPanic;\n\n\n\nimpl Drop for AbortIfPanic {\n\n fn drop(&mut self) {\n\n writeln!(&mut io::stderr(), \"detected unexpected panic; aborting\").unwrap();\n\n process::abort();\n\n }\n\n}\n", "file_path": "src/sched/unwind.rs", "rank": 90, "score": 119878.03474122979 }, { "content": "#[test]\n\nfn retain() {\n\n let mut set: HandlePool<Handle> = HandlePool::new();\n\n for _ in 0..10 {\n\n set.create();\n\n }\n\n\n\n set.retain(|e| e.index() % 2 == 0);\n\n\n\n for v in &set {\n\n assert!(v.index() % 2 == 0);\n\n }\n\n}\n\n\n", "file_path": "tests/handle_pool.rs", "rank": 91, "score": 118906.66391915554 }, { "content": "#[test]\n\nfn basic() {\n\n let mut set = ObjectPool::<Handle, i32>::new();\n\n\n\n let e1 = set.create(3);\n\n assert_eq!(set.get(e1), Some(&3));\n\n assert_eq!(set.len(), 1);\n\n assert_eq!(set.free(e1), Some(3));\n\n assert_eq!(set.len(), 0);\n\n assert_eq!(set.get(e1), None);\n\n assert_eq!(set.free(e1), None);\n\n assert_eq!(set.len(), 0);\n\n}\n\n\n", "file_path": "tests/object_pool.rs", "rank": 92, "score": 118906.66391915554 }, { "content": "#[inline]\n\npub fn shader_state(handle: ShaderHandle) -> ResourceState {\n\n ctx().shader_state(handle)\n\n}\n\n\n\n/// Delete shader state object.\n", "file_path": "src/video/mod.rs", "rank": 93, "score": 118746.8004301533 }, { "content": "#[inline]\n\npub fn surface_state(handle: SurfaceHandle) -> ResourceState {\n\n ctx().surface_state(handle)\n\n}\n\n\n\n/// Deletes surface object.\n", "file_path": "src/video/mod.rs", "rank": 94, "score": 118746.8004301533 }, { "content": "#[inline]\n\npub fn texture_state(handle: TextureHandle) -> ResourceState {\n\n ctx().texture_state(handle)\n\n}\n\n\n\n/// Update a contiguous subregion of an existing two-dimensional texture object.\n", "file_path": "src/video/mod.rs", "rank": 95, "score": 118746.8004301533 }, { "content": "#[inline]\n\npub fn mesh_state(handle: MeshHandle) -> ResourceState {\n\n ctx().mesh_state(handle)\n\n}\n\n\n\n/// Update a subset of dynamic vertex buffer. Use `offset` specifies the offset\n\n/// into the buffer object's data store where data replacement will begin, measured\n\n/// in bytes.\n", "file_path": "src/video/mod.rs", "rank": 96, "score": 118746.8004301533 }, { "content": "#[inline]\n\npub fn is_mouse_double_click(button: MouseButton) -> bool {\n\n ctx().is_mouse_double_click(button)\n\n}\n\n\n\n/// Gets the mouse position relative to the lower-left hand corner of the window.\n", "file_path": "src/input/mod.rs", "rank": 97, "score": 118746.8004301533 }, { "content": "#[inline]\n\npub fn sample_i16_to_f32(sample: i16) -> f32 {\n\n if sample < 0 {\n\n sample as f32 / -(::std::i16::MIN as f32)\n\n } else {\n\n sample as f32 / ::std::i16::MAX as f32\n\n }\n\n}\n\n\n", "file_path": "modules/audio/src/mixer/sampler.rs", "rank": 98, "score": 116921.6248180251 }, { "content": "\n\n self.remap.insert(ent, self.entities.len());\n\n self.entities.push(ent);\n\n self.nodes.push(Node::default());\n\n self.local_transforms.push(Transform::default());\n\n self.roots.insert(ent);\n\n }\n\n\n\n /// Removes a node and all of its descendants from SceneGraph.\n\n pub(crate) fn remove(&mut self, ent: Entity) -> Option<Vec<Entity>> {\n\n if self.remap.contains_key(&ent) {\n\n self.remove_from_parent(ent, false).unwrap();\n\n self.roots.remove(&ent);\n\n\n\n let removes: Vec<_> = iter::once(ent).chain(self.descendants(ent)).collect();\n\n for w in removes.iter() {\n\n let index = self.remap.remove(w).unwrap();\n\n self.entities.swap_remove(index);\n\n self.nodes.swap_remove(index);\n\n self.local_transforms.swap_remove(index);\n", "file_path": "modules/world/src/spatial/graph.rs", "rank": 99, "score": 39.47764895087121 } ]
Rust
day-11/src/main.rs
mmehrten/advent-of-code-2021
f04ac08718f4bda3d24ecf255083dac7b6b2cf8a
use std::collections::HashSet; use std::fs::File; use std::io::Error; use std::io::{BufRead, BufReader}; fn parse_file_path(args: &[String]) -> &str { if args.len() != 2 { panic!( "Expected one file path and an optional window size to run against, got: {} arguments", args.len() - 1 ); } let input_path = &args[1]; input_path.as_str() } #[cfg(test)] mod test_parse_file_path { use crate::parse_file_path; #[test] fn one_arg_ok() { assert_eq!( parse_file_path(&vec!["script_path".to_string(), "arg_text".to_string()][..]), "arg_text" ); } #[test] #[should_panic] fn no_arg_fail() { parse_file_path(&Vec::new()); } #[test] #[should_panic] fn many_arg_fail() { parse_file_path( &vec![ "script_path".to_string(), "arg_text".to_string(), "extra_arg".to_string(), ][..], ); } } fn get_buf_reader(input_path: &str) -> BufReader<File> { let contents = File::open(input_path).expect(format!("Error reading file: {}", input_path).as_str()); let reader = BufReader::new(contents); reader } #[cfg(test)] mod test_get_buf_reader { use crate::get_buf_reader; #[test] #[should_panic] fn error_file_handled() { get_buf_reader("inputs/noexist.txt"); } #[test] fn example_file_handled() { get_buf_reader("inputs/example.txt"); } } struct Field { spaces: Vec<usize>, width: usize, } static ACTIVATION_ENERGY: usize = 9; impl Field { fn len(&self) -> usize { self.spaces.len() } fn neighbors(&self, idx: usize) -> Vec<usize> { let mut neighbors = Vec::new(); let has_above = idx >= self.width; let has_left = idx % self.width != 0; let has_right = idx % self.width != self.width - 1; let has_below = idx < self.spaces.len() - self.width; if has_above { neighbors.push(idx - self.width); } if has_left { neighbors.push(idx - 1); } if has_right { neighbors.push(idx + 1); } if has_below { neighbors.push(idx + self.width); } if has_above && has_left { neighbors.push(idx - 1 - self.width) } if has_above && has_right { neighbors.push(idx + 1 - self.width) } if has_below && has_left { neighbors.push(idx - 1 + self.width) } if has_below && has_right { neighbors.push(idx + 1 + self.width) } neighbors } fn parse_line(line: Result<String, Error>) -> Vec<usize> { line.expect("Failed to parse line from file.") .split("") .filter(|s| s != &"") .map(|s| { s.parse::<usize>() .expect("Failed to parse integer from inputs.") }) .collect::<Vec<usize>>() } fn parse_line_into(&mut self, line: Result<String, Error>) { self.spaces.extend(Field::parse_line(line)); } fn increase_total_energy(&mut self) { for idx in 0..self.len() { self.spaces[idx] += 1; } } fn try_activate_node(&mut self, idx: usize, activations: &mut HashSet<usize>) { if self.spaces[idx] <= ACTIVATION_ENERGY || activations.contains(&idx) { return; } activations.insert(idx); for neighbor in self.neighbors(idx) { self.spaces[neighbor] += 1; self.try_activate_node(neighbor, activations); } } fn try_activate_all(&mut self, activations: &mut HashSet<usize>) { for idx in 0..self.len() { self.try_activate_node(idx, activations); } } fn deactivate_node(&mut self, idx: usize) { self.spaces[idx] = 0; } } fn solution(input_path: &str, num_iterations: usize) -> (usize, usize) { let reader = get_buf_reader(input_path); let mut lines = reader.lines(); let mut inputs = Vec::new(); inputs.extend(Field::parse_line(lines.next().expect(""))); let array_width = inputs.len(); let mut field = Field { width: array_width, spaces: inputs, }; while let Some(line) = lines.next() { field.parse_line_into(line); } let mut activation_count = 0; let mut step_num = 0; loop { step_num += 1; let mut activations = HashSet::new(); field.increase_total_energy(); field.try_activate_all(&mut activations); if step_num <= num_iterations { activation_count += activations.len(); } if activations.len() == field.len() { return (activation_count, step_num); } for idx in activations { field.deactivate_node(idx); } } } fn main() { let args: Vec<String> = std::env::args().collect(); let input_path = parse_file_path(&args); let (activation_count, sync_step_count) = solution(input_path, 100); println!( "Total activation count after 100 steps: {:?}", activation_count ); println!("Steps to flash synchronization: {:?}", sync_step_count); } #[cfg(test)] mod test_solution { use crate::solution; #[test] fn example_correct() { assert_eq!(solution("inputs/example.txt", 100), (1656, 195)); } #[test] fn question_correct() { assert_eq!(solution("inputs/challenge.txt", 100), (1613, 510)); } }
use std::collections::HashSet; use std::fs::File; use std::io::Error; use std::io::{BufRead, BufReader}; fn parse_file_path(args: &[String]) -> &str { if args.len() != 2 { panic!( "Expected one file path and an optional window size to run against, got: {} arguments
("inputs/challenge.txt", 100), (1613, 510)); } }
", args.len() - 1 ); } let input_path = &args[1]; input_path.as_str() } #[cfg(test)] mod test_parse_file_path { use crate::parse_file_path; #[test] fn one_arg_ok() { assert_eq!( parse_file_path(&vec!["script_path".to_string(), "arg_text".to_string()][..]), "arg_text" ); } #[test] #[should_panic] fn no_arg_fail() { parse_file_path(&Vec::new()); } #[test] #[should_panic] fn many_arg_fail() { parse_file_path( &vec![ "script_path".to_string(), "arg_text".to_string(), "extra_arg".to_string(), ][..], ); } } fn get_buf_reader(input_path: &str) -> BufReader<File> { let contents = File::open(input_path).expect(format!("Error reading file: {}", input_path).as_str()); let reader = BufReader::new(contents); reader } #[cfg(test)] mod test_get_buf_reader { use crate::get_buf_reader; #[test] #[should_panic] fn error_file_handled() { get_buf_reader("inputs/noexist.txt"); } #[test] fn example_file_handled() { get_buf_reader("inputs/example.txt"); } } struct Field { spaces: Vec<usize>, width: usize, } static ACTIVATION_ENERGY: usize = 9; impl Field { fn len(&self) -> usize { self.spaces.len() } fn neighbors(&self, idx: usize) -> Vec<usize> { let mut neighbors = Vec::new(); let has_above = idx >= self.width; let has_left = idx % self.width != 0; let has_right = idx % self.width != self.width - 1; let has_below = idx < self.spaces.len() - self.width; if has_above { neighbors.push(idx - self.width); } if has_left { neighbors.push(idx - 1); } if has_right { neighbors.push(idx + 1); } if has_below { neighbors.push(idx + self.width); } if has_above && has_left { neighbors.push(idx - 1 - self.width) } if has_above && has_right { neighbors.push(idx + 1 - self.width) } if has_below && has_left { neighbors.push(idx - 1 + self.width) } if has_below && has_right { neighbors.push(idx + 1 + self.width) } neighbors } fn parse_line(line: Result<String, Error>) -> Vec<usize> { line.expect("Failed to parse line from file.") .split("") .filter(|s| s != &"") .map(|s| { s.parse::<usize>() .expect("Failed to parse integer from inputs.") }) .collect::<Vec<usize>>() } fn parse_line_into(&mut self, line: Result<String, Error>) { self.spaces.extend(Field::parse_line(line)); } fn increase_total_energy(&mut self) { for idx in 0..self.len() { self.spaces[idx] += 1; } } fn try_activate_node(&mut self, idx: usize, activations: &mut HashSet<usize>) { if self.spaces[idx] <= ACTIVATION_ENERGY || activations.contains(&idx) { return; } activations.insert(idx); for neighbor in self.neighbors(idx) { self.spaces[neighbor] += 1; self.try_activate_node(neighbor, activations); } } fn try_activate_all(&mut self, activations: &mut HashSet<usize>) { for idx in 0..self.len() { self.try_activate_node(idx, activations); } } fn deactivate_node(&mut self, idx: usize) { self.spaces[idx] = 0; } } fn solution(input_path: &str, num_iterations: usize) -> (usize, usize) { let reader = get_buf_reader(input_path); let mut lines = reader.lines(); let mut inputs = Vec::new(); inputs.extend(Field::parse_line(lines.next().expect(""))); let array_width = inputs.len(); let mut field = Field { width: array_width, spaces: inputs, }; while let Some(line) = lines.next() { field.parse_line_into(line); } let mut activation_count = 0; let mut step_num = 0; loop { step_num += 1; let mut activations = HashSet::new(); field.increase_total_energy(); field.try_activate_all(&mut activations); if step_num <= num_iterations { activation_count += activations.len(); } if activations.len() == field.len() { return (activation_count, step_num); } for idx in activations { field.deactivate_node(idx); } } } fn main() { let args: Vec<String> = std::env::args().collect(); let input_path = parse_file_path(&args); let (activation_count, sync_step_count) = solution(input_path, 100); println!( "Total activation count after 100 steps: {:?}", activation_count ); println!("Steps to flash synchronization: {:?}", sync_step_count); } #[cfg(test)] mod test_solution { use crate::solution; #[test] fn example_correct() { assert_eq!(solution("inputs/example.txt", 100), (1656, 195)); } #[test] fn question_correct() { assert_eq!(solution
random
[ { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-7/src/main.rs", "rank": 0, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-12/src/main.rs", "rank": 1, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-13/src/main.rs", "rank": 2, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-9/src/main.rs", "rank": 4, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-15/src/main.rs", "rank": 5, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-3/src/main.rs", "rank": 6, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n", "file_path": "day-2/src/main.rs", "rank": 7, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-8/src/main.rs", "rank": 8, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-10/src/main.rs", "rank": 9, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-6/src/main.rs", "rank": 10, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-5/src/main.rs", "rank": 11, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "template/src/main.rs", "rank": 12, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-4/src/main.rs", "rank": 13, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-16/src/main.rs", "rank": 14, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> &str {\n\n if args.len() != 2 {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n input_path.as_str()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n", "file_path": "day-14/src/main.rs", "rank": 15, "score": 183926.5035116296 }, { "content": "/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n\nfn parse_file_path(args: &[String]) -> (&str, usize) {\n\n if !(args.len() == 2 || args.len() == 3) {\n\n panic!(\n\n \"Expected one file path and an optional window size to run against, got: {} arguments\",\n\n args.len() - 1\n\n );\n\n }\n\n let input_path = &args[1];\n\n if args.len() == 2 {\n\n return (input_path.as_str(), 1);\n\n }\n\n let window_size = &args[2]\n\n .parse::<usize>()\n\n .expect(\"Failed to parse window size.\");\n\n (input_path.as_str(), *window_size)\n\n}\n\n\n", "file_path": "day-1/src/main.rs", "rank": 16, "score": 175541.21128187288 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 17, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-4/src/main.rs", "rank": 18, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-8/src/main.rs", "rank": 19, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-15/src/main.rs", "rank": 21, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-3/src/main.rs", "rank": 22, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n\nconst NEW_FISH_TTR: usize = 8;\n\nconst OLD_FISH_TTR: usize = 6;\n\n\n", "file_path": "day-6/src/main.rs", "rank": 23, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-9/src/main.rs", "rank": 24, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 25, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-7/src/main.rs", "rank": 26, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 27, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-5/src/main.rs", "rank": 28, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "template/src/main.rs", "rank": 29, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n", "file_path": "day-2/src/main.rs", "rank": 30, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n", "file_path": "day-14/src/main.rs", "rank": 31, "score": 145247.97739020686 }, { "content": "/// Open an input path and return a buffered reader over the contents.\n\nfn get_buf_reader(input_path: &str) -> BufReader<File> {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n reader\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_get_buf_reader {\n\n use crate::get_buf_reader;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn error_file_handled() {\n\n get_buf_reader(\"inputs/noexist.txt\");\n\n }\n\n\n\n #[test]\n\n fn example_file_handled() {\n\n get_buf_reader(\"inputs/example.txt\");\n\n }\n\n}\n\n\n\nconst OPENERS: [&str; 4] = [\"(\", \"{\", \"[\", \"<\"];\n\nconst CLOSERS: [&str; 4] = [\")\", \"}\", \"]\", \">\"];\n\nconst MALFORMED_SCORES: [usize; 4] = [3, 1197, 57, 25137];\n\nconst INCOMPLETE_SCORES: [usize; 4] = [1, 3, 2, 4];\n\n\n", "file_path": "day-10/src/main.rs", "rank": 32, "score": 145247.97739020686 }, { "content": "/// Parse an input file path, counting the number of numeric increases in the file.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path` - the OS fully qualified path to the file containing the input data.\n\n/// * `window_size` - the number of lines to include in a sliding comparison\n\n///\n\n/// # Returns\n\n///\n\n/// The count of lines whose numeric value are greater than the preceding value.\n\n///\n\n/// # Examples\n\n///\n\n/// ## `window_size = 1`\n\n///\n\n/// For example, suppose you had the following input file:\n\n///\n\n/// ```\n\n/// 199\n\n/// 200\n\n/// 208\n\n/// 210\n\n/// 200\n\n/// 207\n\n/// 240\n\n/// 269\n\n/// 260\n\n/// 263\n\n/// ```\n\n/// We count the number of times a line increases from the previous line. (There is no measurement before the first measurement.)\n\n/// In this example, the changes are as follows:\n\n///\n\n/// ```\n\n/// 199 (N/A - no previous measurement)\n\n/// 200 (increased)\n\n/// 208 (increased)\n\n/// 210 (increased)\n\n/// 200 (decreased)\n\n/// 207 (increased)\n\n/// 240 (increased)\n\n/// 269 (increased)\n\n/// 260 (decreased)\n\n/// 263 (increased)\n\n/// ```\n\n///\n\n/// In this example, there are 7 lines that are larger than the previous, so we return 7.\n\n///\n\n/// ## `window_size = 3`\n\n///\n\n/// Considering a sliding window, we can compare sets of lines rather than individual lines:\n\n///\n\n/// ```\n\n/// 199 A \n\n/// 200 A B \n\n/// 208 A B C \n\n/// 210 B C D\n\n/// 200 E C D\n\n/// 207 E F D\n\n/// 240 E F G \n\n/// 269 F G H\n\n/// 260 G H\n\n/// 263 H\n\n/// ```\n\n///\n\n/// Start by comparing the first and second three-measurement windows.\n\n/// The measurements in the first window are marked A (199, 200, 208); their sum is 199 + 200 + 208 = 607.\n\n/// The second window is marked B (200, 208, 210); its sum is 618.\n\n/// The sum of measurements in the second window is larger than the sum of the first, so this first comparison increased.\n\n/// \n\n/// In this example, the sum of each three-measurement window is as follows:\n\n///\n\n/// ```\n\n/// A: 607 (N/A - no previous sum)\n\n/// B: 618 (increased)\n\n/// C: 618 (no change)\n\n/// D: 617 (decreased)\n\n/// E: 647 (increased)\n\n/// F: 716 (increased)\n\n/// G: 769 (increased)\n\n/// H: 792 (increased)\n\n/// ```\n\n///\n\n/// Leading to 5 windows with an increase.\n\nfn count_numeric_increases(input_path: &str, window_size: usize) -> i32 {\n\n // Create a buffer to read the file line by line\n\n let contents =\n\n File::open(input_path).expect(format!(\"Error reading file: {}\", input_path).as_str());\n\n let reader = BufReader::new(contents);\n\n\n\n // Read each number into a window, removing stale window elements as we traverse the file\n\n let mut window: VecDeque<i32> = VecDeque::new();\n\n let mut count_increases = 0;\n\n\n\n for line in reader.lines() {\n\n let line = line.expect(\"Failed to parse line from file.\");\n\n let number = line\n\n .parse::<i32>()\n\n .expect(\"Error parsing number from file.\");\n\n\n\n // If the window is the expected size, then we've parsed at least window_size numbers out of the file and can compare\n\n if window.len() == window_size {\n\n // Get the size of the old window\n\n let old_size: i32 = window.iter().sum();\n", "file_path": "day-1/src/main.rs", "rank": 33, "score": 142816.67899811428 }, { "content": "fn clean_input(line: &str) -> Vec<String> {\n\n line.split(\" \")\n\n .map(sort_string)\n\n .filter(|s| s != \"\")\n\n .collect()\n\n}\n\n\n", "file_path": "day-8/src/main.rs", "rank": 34, "score": 130174.9365721362 }, { "content": "/// Count the number of viable paths from the starting node to the ending node in a graph.\n\n///\n\n/// There are two types of graph nodes:\n\n///\n\n/// * Large nodes - can be visited any number of times in a traversal, denoted by an uppercase node name\n\n/// * Small nodes - can be visited only once in a traversal, denoted by a lowercase node name\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path` - The input file path containing the graph to traverse.\n\n///\n\n/// # Returns\n\n///\n\n/// The number of distinct paths from start to end.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// The following example has 10 paths:\n\n///\n\n/// ```\n\n/// start-A\n\n/// start-b\n\n/// A-c\n\n/// A-b\n\n/// b-d\n\n/// A-end\n\n/// b-end\n\n/// ```\n\n///\n\n/// ```\n\n/// start\n\n// / \\\n\n// c--A-----b--d\n\n// \\ /\n\n// end\n\n// ```\n\nfn solution(input_path: &str) -> usize {\n\n let reader = get_buf_reader(input_path);\n\n Graph::from_lines(reader.lines()).get_paths_to_end_dfs()\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 35, "score": 114217.10247525765 }, { "content": "/// Counts the number of occurrences of digits 1, 4, 7, and 8 in an encoded input file.\n\n///\n\n/// Encoding is a random string of characters, where each group of characters represents the representation\n\n/// of the digit in a seven-segment display:\n\n///\n\n/// ```\n\n/// aaaa \n\n/// b c\n\n/// b c \n\n/// dddd \n\n/// e f\n\n/// e f \n\n/// gggg \n\n/// ```\n\n///\n\n/// E.g. cf here would represent a one.\n\n///\n\n/// The input file contains many different encodings, where a/b/c etc. are randomly mapped to a digit segment:\n\n///\n\n/// ```\n\n/// be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb\n\n/// 1 8 9/6/0 9/6/0 4 2/3/5 9/6/0 2/3/5 2/3/5 7\n\n/// ```\n\n///\n\n/// This string\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the encoded data\n\n///\n\n/// # Returns\n\n///\n\n/// The occurences of 1, 4, 7, and 8 in the output data.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// In the following example, we see that there are 26 instances of 1, 4, 7, and 8.\n\n///\n\n/// ```\n\n/// be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe\n\n/// edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc\n\n/// fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg\n\n/// fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb\n\n/// aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea\n\n/// fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb\n\n/// dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe\n\n/// bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef\n\n/// egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb\n\n/// gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce\n\n/// ```\n\nfn solution(input_path: &str) -> i32 {\n\n let reader = get_buf_reader(input_path);\n\n let mut digit_sum = 0;\n\n for line in reader.lines() {\n\n let line = line.expect(\"Failed to parse line from file.\");\n\n let (digits, outputs) = line\n\n .split_once(\" | \")\n\n .expect(\"Failed to parse input line into digits.\");\n\n\n\n let digits: Vec<String> = clean_input(digits);\n\n let outputs: Vec<String> = clean_input(outputs);\n\n\n\n let mut digit_map = HashMap::new();\n\n for digit in &digits {\n\n match digit.len() {\n\n 2 => {\n\n let _ = digit_map.insert(1, digit);\n\n }\n\n 3 => {\n\n let _ = digit_map.insert(7, digit);\n", "file_path": "day-8/src/main.rs", "rank": 36, "score": 114215.80092324567 }, { "content": "/// Parse a packet of binary into hex, using an unnecessarily complex encoding scheme.\n\n/// # Arguments\n\n///\n\n/// * `input_path` - The input file path containing the packets to parse.\n\n///\n\n/// # Returns\n\n///\n\n/// The evaluated packet data.\n\nfn solution(input_path: &str) -> Vec<usize> {\n\n get_buf_reader(input_path)\n\n .lines()\n\n .map(|line| {\n\n let line = line.expect(\"Failed to parse line from file.\");\n\n println!(\"----------------\");\n\n println!(\"Starting hex: {}\", line);\n\n let mut seq = PacketSequence::new(line);\n\n seq.evaluate()\n\n })\n\n .collect::<Vec<usize>>()\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 37, "score": 108890.9565596149 }, { "content": "/// TODO\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path TODO.\n\n///\n\n/// # Returns\n\n///\n\n/// TODO.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n/// \n\n/// TODO\n\nfn solution(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n let lines = reader.lines();\n\n for line in lines {\n\n let line = line.expect(\"Failed to parse line from file.\");\n\n }\n\n (0, 0)\n\n}\n\n\n", "file_path": "template/src/main.rs", "rank": 38, "score": 108889.34955450839 }, { "content": "/// Finds all local minima in an input array of values, and returns the sum of their risk values, as well as the product of all basin sizes around the minima.\n\n///\n\n/// A local minima is any point in the array that is lower than its adjacent up, down, left, and right points.\n\n///\n\n/// A risk value is one plus the local minima value.\n\n///\n\n/// A basin is all points that lead into a local minima.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the array of values.\n\n///\n\n/// # Returns\n\n///\n\n/// The sum of the local minima's risk values\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// The following array has 4 local minima, with a total risk value of 15:\n\n/// ```\n\n/// 2199943210 // width 10\n\n/// 3987894921\n\n/// 9856789892\n\n/// 8767896789\n\n/// 9899965678\n\n/// ```\n\nfn solution(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n let mut lines = reader.lines();\n\n let mut inputs = Vec::new();\n\n\n\n // Method used to parse a single iteration of the input file\n\n let parse_line = |line: Option<Result<String, Error>>| {\n\n line.expect(\"Failed to parse line from file.\")\n\n .expect(\"Failed to parse line from file.\")\n\n .split(\"\")\n\n .filter(|s| s != &\"\")\n\n .map(|s| {\n\n s.parse::<i32>()\n\n .expect(\"Failed to parse integer from inputs.\")\n\n })\n\n .collect::<Vec<i32>>()\n\n };\n\n\n\n // Parse just the first line to determine the overall width of the inputs\n\n inputs.extend(parse_line(lines.next()));\n", "file_path": "day-9/src/main.rs", "rank": 39, "score": 108888.80297444396 }, { "content": "/// Determine the closest common value between a set of numbers, and the overall difference between the values and the common value.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing integers to align.\n\n///\n\n/// # Returns\n\n///\n\n/// The closest common value, and the total distance of the points from the common value.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// For examples, given the numbers 16,1,2,0,4,2,7,1,2,14, the closest common value between them is 2,\n\n/// with a total overall difference of 37 (16 - 2 + ... + 14 - 2).\n\nfn solution(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n let to_align: Vec<i32> = reader\n\n .lines()\n\n .map(|line| {\n\n line.expect(\"Failed to read line from file\")\n\n .split(\",\")\n\n .map(|s| s.parse::<i32>().expect(\"Failed to parse value from file.\"))\n\n .collect::<Vec<i32>>()\n\n })\n\n .flatten()\n\n .collect();\n\n let smallest_val = *to_align\n\n .iter()\n\n .min()\n\n .expect(\"Failed to parse population data\");\n\n let largest_val = *to_align\n\n .iter()\n\n .max()\n\n .expect(\"Failed to parse population data\");\n", "file_path": "day-7/src/main.rs", "rank": 40, "score": 108886.65280814731 }, { "content": "/// Parse a bingo game as inputs and report a winning board, as well as the worst-losing board, scores.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the bingo game.\n\n///\n\n/// # Returns\n\n///\n\n/// The score of the winning board and worst-losing board.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n/// Bingo input has the format:\n\n///\n\n/// ```\n\n/// 7,4,9,5,11,17,23,2,0,14,21,24,10,16,13,6,15,25,12,22,18,20,8,19,3,26,1\n\n///\n\n/// 22 13 17 11 0\n\n/// 8 2 23 4 24\n\n/// 21 9 14 16 7\n\n/// 6 10 3 18 5\n\n/// 1 12 20 15 19\n\n///\n\n/// 3 15 0 2 22\n\n/// 9 18 13 17 5\n\n/// 19 8 7 25 23\n\n/// 20 11 10 24 4\n\n/// 14 21 16 12 6\n\n///\n\n/// 14 21 17 24 4\n\n/// 10 16 15 9 19\n\n/// 18 8 23 26 20\n\n/// 22 11 13 6 5\n\n/// 2 0 12 3 7\n\n/// ```\n\n///\n\n/// Where the first row indicates the order of bingo calls.\n\n///\n\n/// Returns the score of the winning board can now be calculated.\n\n/// The score is calculated by:\n\n///\n\n/// * The sum of all unmarked numbers on the winning board\n\n/// * Multiplied by the number that caused the board to win\n\n///\n\n/// So in this case 188 * 24 = 4512 for the best board, and 148 * 13 = 1924 for the worst\n\nfn solution(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n let mut lines = reader.lines().map(|l| l.unwrap());\n\n let mut calls: Vec<String> = lines\n\n .next()\n\n .expect(\"Failed to parse moves from input\")\n\n // .expect(\"Failed to parse moves from input\")\n\n .split(\",\")\n\n .map(|x| x.to_string())\n\n .collect();\n\n\n\n let mut winning_scores: Vec<Solution> = Vec::new();\n\n\n\n let mut board_repr = Vec::new();\n\n let mut board_dim: Option<usize> = None; // Set on first iteration\n\n let mut expected_size: Option<usize> = None;\n\n for line in lines {\n\n let entry: Vec<String> = line\n\n .split(\" \")\n\n .filter(|x| x.trim() != \"\")\n", "file_path": "day-4/src/main.rs", "rank": 41, "score": 108885.38012639614 }, { "content": "/// Return the syntax error score and the \"middle\" autocomplete score in a given file of (), [], {}, <> characters.\n\n///\n\n/// A syntax error is any malformed / unclosed combination of opening and closing characters.\n\n///\n\n/// Each malformed line gets a syntax error score based on the first incorrect character, with point values being:\n\n/// \n\n/// * ): 3 points.\n\n/// * ]: 57 points.\n\n/// * }: 1197 points.\n\n/// * >: 25137 points.\n\n///\n\n/// The overall syntax error score is the sum of the scores for each line, with point values being:\n\n///\n\n/// Each incomplete line gets an autocomplete score based on the characters needed to complete the line,\n\n///\n\n/// * ): 1 point.\n\n/// * ]: 2 points.\n\n/// * }: 3 points.\n\n/// * >: 4 points.\n\n///\n\n/// Starting with a total score of 0, then, for each character, multipling the total score by 5 and increasing the total score by the point value given for the character.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the characters to check.\n\n///\n\n/// # Returns\n\n///\n\n/// The syntax error score.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// The syntax lines below have an overall score of 26397, and the middle-most incomplete score of 288957:\n\n/// ```\n\n/// [({(<(())[]>[[{[]{<()<>>\n\n/// [(()[<>])]({[<{<<[]>>(\n\n/// {([(<{}[<>[]}>{[]{[(<()>\n\n/// (((({<>}<{<{<>}{[]{[]{}\n\n/// [[<[([]))<([[{}[[()]]]\n\n/// [{[{({}]{}}([{[{{{}}([]\n\n/// {<[[]]>}<{[{[{[]{()[[[]\n\n/// [<(<(<(<{}))><([]([]()\n\n/// <{([([[(<>()){}]>(<<{{\n\n/// <{([{{}}[<[[[<>{}]]]>[]]\n\n/// ```\n\nfn solution(input_path: &str) -> (usize, usize) {\n\n let reader = get_buf_reader(input_path);\n\n let lines = reader.lines();\n\n let mut syntax_score = 0;\n\n let mut incomplete_scores = Vec::new();\n\n for line in lines {\n\n let line = line\n\n .expect(\"Failed to parse line from file.\")\n\n .split(\"\")\n\n .map(|s| s.trim().to_string())\n\n .filter(|s| s != &\"\")\n\n .collect::<Vec<String>>();\n\n let mut char_deque = VecDeque::new();\n\n let mut is_malformed = false;\n\n for c in line {\n\n for (idx, open) in OPENERS.iter().enumerate() {\n\n if c != *open {\n\n continue;\n\n }\n\n char_deque.push_back(CLOSERS[idx]);\n", "file_path": "day-10/src/main.rs", "rank": 42, "score": 108884.97003063193 }, { "content": "/// Record movements of forward, up, and down to retrieve the final (horizontal, depth) coordinates of the movements.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the movements\n\n///\n\n/// # Returns\n\n///\n\n/// The (horizontal, depth) coordinates of the final position.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// For example, the movements:\n\n///\n\n/// ```\n\n/// forward 5\n\n/// down 5\n\n/// forward 8\n\n/// up 3\n\n/// down 8\n\n/// forward 2\n\n/// ```\n\n///\n\n/// Would produce a final position of (15, 10).\n\n///\n\nfn record_movements(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n let (mut horizontal, mut depth) = (0, 0);\n\n for line in reader.lines() {\n\n let line = line.expect(\"Failed to parse line from file.\");\n\n let mut parts: Vec<&str> = line.split(\" \").collect();\n\n if parts.len() != 2 {\n\n panic!(\"Got unreadable line: {}\", line);\n\n }\n\n let score = parts\n\n .pop()\n\n .unwrap()\n\n .parse::<i32>()\n\n .expect(\"Failed to parse movement size.\");\n\n let key = parts.pop().unwrap();\n\n match key {\n\n \"forward\" => horizontal += score,\n\n \"up\" => depth -= score,\n\n \"down\" => depth += score,\n\n _ => panic!(\"Unknown direction: {}\", line),\n\n }\n\n }\n\n (horizontal, depth)\n\n}\n\n\n", "file_path": "day-2/src/main.rs", "rank": 43, "score": 106440.5674334309 }, { "content": "/// Record movements of forward, up, and down to retrieve the final (horizontal, depth) coordinates of the movements.\n\n///\n\n/// Records movements using *aim* concept, where rather than simply changing directions, up/down movements just adjust\n\n/// an aim factor, with only forward movements impacting depth.\n\n///\n\n/// * down X increases aim by X units.\n\n/// * up X decreases aim by X units.\n\n/// * forward X does two things:\n\n/// * It increases horizontal position by X units.\n\n/// * It increases depth by your aim multiplied by X.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the movements\n\n///\n\n/// # Returns\n\n///\n\n/// The (horizontal, depth) coordinates of the final position.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// For example, the movements:\n\n///\n\n/// ```\n\n/// forward 5\n\n/// down 5\n\n/// forward 8\n\n/// up 3\n\n/// down 8\n\n/// forward 2\n\n/// ```\n\n///\n\n/// Would produce a final position of (15, 60).\n\n///\n\nfn record_movements_with_aim(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n let (mut horizontal, mut depth, mut aim) = (0, 0, 0);\n\n for line in reader.lines() {\n\n let line = line.expect(\"Failed to parse line from file.\");\n\n let mut parts: Vec<&str> = line.split(\" \").collect();\n\n if parts.len() != 2 {\n\n panic!(\"Got unreadable line: {}\", line);\n\n }\n\n let score = parts\n\n .pop()\n\n .unwrap()\n\n .parse::<i32>()\n\n .expect(\"Failed to parse movement size.\");\n\n let key = parts.pop().unwrap();\n\n match key {\n\n \"forward\" => {\n\n depth += aim * score;\n\n horizontal += score;\n\n }\n\n \"up\" => aim -= score,\n\n \"down\" => aim += score,\n\n _ => panic!(\"Unknown direction: {}\", line),\n\n }\n\n }\n\n (horizontal, depth)\n\n}\n\n\n", "file_path": "day-2/src/main.rs", "rank": 44, "score": 104151.9158336005 }, { "content": "/// Parse the gamma and epsilon power factors from a binary power report.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing the power report.\n\n///\n\n/// # Returns\n\n///\n\n/// The (gamma rate, epsilon rate) of the power report\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// For example, the power report:\n\n///\n\n/// ```\n\n/// 00100\n\n/// 11110\n\n/// 10110\n\n/// 10111\n\n/// 10101\n\n/// 01111\n\n/// 00111\n\n/// 11100\n\n/// 10000\n\n/// 11001\n\n/// 00010\n\n/// 01010\n\n/// ```\n\n///\n\n/// Considering only the first bit of each number, there are five 0 bits and seven 1 bits. Since the most common bit is 1, the first bit of the gamma rate is 1.\n\n///\n\n/// The most common second bit of the numbers in the diagnostic report is 0, so the second bit of the gamma rate is 0.\n\n///\n\n/// The most common value of the third, fourth, and fifth bits are 1, 1, and 0, respectively, and so the final three bits of the gamma rate are 110.\n\n///\n\n/// So, the gamma rate is the binary number 10110, or 22 in decimal.\n\n///\n\n/// The epsilon rate is calculated in a similar way; rather than use the most common bit, the least common bit from each position is used. So, the epsilon rate is 01001, or 9 in decimal.\n\n///\n\n/// Therefore, the we Would produce a final power factors of (22, 9).\n\n/// \n\n/// Both the oxygen generator rating and the CO2 scrubber rating are values that can be found in your diagnostic report - finding them is the tricky part. Both values are located using a similar process that involves filtering out values until only one remains. Before searching for either rating value, start with the full list of binary numbers from your diagnostic report and consider just the first bit of those numbers. Then:\n\n/// \n\n/// * Keep only numbers selected by the bit criteria for the type of rating value for which you are searching. Discard numbers which do not match the bit criteria.\n\n/// * If you only have one number left, stop; this is the rating value for which you are searching.\n\n/// * Otherwise, repeat the process, considering the next bit to the right.\n\n/// \n\n/// The bit criteria depends on which type of rating value you want to find:\n\n/// \n\n/// * To find oxygen generator rating, determine the most common value (0 or 1) in the current bit position, and keep only numbers with that bit in that position. If 0 and 1 are equally common, keep values with a 1 in the position being considered.\n\n/// * To find CO2 scrubber rating, determine the least common value (0 or 1) in the current bit position, and keep only numbers with that bit in that position. If 0 and 1 are equally common, keep values with a 0 in the position being considered.\n\n/// \n\n/// For example, to determine the oxygen generator rating value using the same example diagnostic report from above:\n\n/// \n\n/// * Start with all 12 numbers and consider only the first bit of each number. There are more 1 bits (7) than 0 bits (5), so keep only the 7 numbers with a 1 in the first position: 11110, 10110, 10111, 10101, 11100, 10000, and 11001.\n\n/// * Then, consider the second bit of the 7 remaining numbers: there are more 0 bits (4) than 1 bits (3), so keep only the 4 numbers with a 0 in the second position: 10110, 10111, 10101, and 10000.\n\n/// * In the third position, three of the four numbers have a 1, so keep those three: 10110, 10111, and 10101.\n\n/// * In the fourth position, two of the three numbers have a 1, so keep those two: 10110 and 10111.\n\n/// * In the fifth position, there are an equal number of 0 bits and 1 bits (one each). So, to find the oxygen generator rating, keep the number with a 1 in that position: 10111.\n\n/// * As there is only one number left, stop; the oxygen generator rating is 10111, or 23 in decimal.\n\n/// \n\n/// Then, to determine the CO2 scrubber rating value from the same example above:\n\n/// \n\n/// Start again with all 12 numbers and consider only the first bit of each number. There are fewer 0 bits (5) than 1 bits (7), so keep only the 5 numbers with a 0 in the first position: 00100, 01111, 00111, 00010, and 01010.\n\n/// Then, consider the second bit of the 5 remaining numbers: there are fewer 1 bits (2) than 0 bits (3), so keep only the 2 numbers with a 1 in the second position: 01111 and 01010.\n\n/// In the third position, there are an equal number of 0 bits and 1 bits (one each). So, to find the CO2 scrubber rating, keep the number with a 0 in that position: 01010.\n\n/// As there is only one number left, stop; the CO2 scrubber rating is 01010, or 10 in decimal.\n\n/// \n\n/// Finally, to find the life support rating, multiply the oxygen generator rating (23) by the CO2 scrubber rating (10) to get 230.\n\nfn read_power_report(input_path: &str) -> (i32, i32) {\n\n let reader = get_buf_reader(input_path);\n\n // Create an array to count zero bits in each number - only two options so if zero is more than half of the lines,\n\n // then zero is the most common bit\n\n let mut zero_byte_counts = Vec::new();\n\n let mut line_count = 0;\n\n\n\n struct ByteCounter {\n\n position: usize,\n\n followers: Vec<String>,\n\n zero_count: i32,\n\n one_count: i32,\n\n to_zero: Box<Option<ByteCounter>>,\n\n to_one: Box<Option<ByteCounter>>,\n\n }\n\n\n\n let mut starting_node = ByteCounter {\n\n position: 0,\n\n followers: Vec::new(),\n\n zero_count: 0,\n", "file_path": "day-3/src/main.rs", "rank": 45, "score": 104150.39448638272 }, { "content": "/// Calculate the lowest cost path between the top left and bottom right corners of a grid.\n\n///\n\n/// Example grid:\n\n///\n\n/// ```\n\n/// 1163751742\n\n/// 1381373672\n\n/// 2136511328\n\n/// 3694931569\n\n/// 7463417111\n\n/// 1319128137\n\n/// 1359912421\n\n/// 3125421639\n\n/// 1293138521\n\n/// 2311944581\n\n/// ```\n\n///\n\n/// Optionally repeat the input grid N times horizontally and vertically, increasing the cost\n\n/// per repitition by 1 each time (cost wrapping back to 1 when over 9).\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path` - The input file path containing the grid to traverse.\n\n/// * `repetitions` - Number of times to repeat the grid vertically / horizontally.\n\n///\n\n/// # Returns\n\n///\n\n/// The cost of the lowest cost path.\n\nfn solution(input_path: &str, repetitions: usize) -> usize {\n\n let reader = get_buf_reader(input_path);\n\n let f = Field::from_reader(reader, repetitions);\n\n f.get_min_cost_dijkstra()\n\n}\n\n\n", "file_path": "day-15/src/main.rs", "rank": 46, "score": 104088.86845804701 }, { "content": "/// Return the number of lanternfish alive after X days given an initial population.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path containing initial lanternfish ages.\n\n/// * `days` - The number of days to count lanternfish over.\n\n///\n\n/// # Returns\n\n///\n\n/// The number of lanternfish after the given duration.\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// Rules for lanternfish growth are as follows:\n\n///\n\n/// * Each lanternfish creates a new lanternfish once every 7 days.\n\n/// * New lanternfish require two additional days for their first cycle (9 days).\n\n///\n\n/// So, suppose you have a lanternfish with an internal timer value of 3:\n\n///\n\n/// * After one day, its internal timer would become 2.\n\n/// * After another day, its internal timer would become 1.\n\n/// * After another day, its internal timer would become 0.\n\n/// * After another day, its internal timer would reset to 6, and it would create a new lanternfish with an internal timer of 8.\n\n/// * After another day, the first lanternfish would have an internal timer of 5, and the second lanternfish would have an internal timer of 7.\n\n///\n\n/// So, given initial ages of 3,4,3,1,2 - in 80 days, the population would be 5934.\n\nfn solution(input_path: &str, days: usize) -> usize {\n\n let reader = get_buf_reader(input_path);\n\n let population: Vec<usize> = reader\n\n .lines()\n\n .map(|line| {\n\n line.expect(\"Failed to read line from file\")\n\n .split(\",\")\n\n .map(|s| s.parse::<usize>().expect(\"Failed to parse age from file.\"))\n\n .collect::<Vec<usize>>()\n\n })\n\n .flatten()\n\n .collect();\n\n\n\n fn add_key<K, V>(hash_map: &mut HashMap<K, V>, key: K, value: V)\n\n where\n\n V: std::ops::Add<Output = V>,\n\n V: std::ops::AddAssign,\n\n K: Eq,\n\n K: PartialEq,\n\n K: std::hash::Hash,\n", "file_path": "day-6/src/main.rs", "rank": 47, "score": 104085.48715706689 }, { "content": "/// Parse a set of points from an input, and follow a set of \"fold\" instructions to transform the points.\n\n///\n\n/// For example, the input:\n\n///\n\n/// ```\n\n/// 6,10\n\n/// 0,14\n\n/// 9,10\n\n/// 0,3\n\n/// 10,4\n\n/// 4,11\n\n/// 6,0\n\n/// 6,12\n\n/// 4,1\n\n/// 0,13\n\n/// 10,12\n\n/// 3,4\n\n/// 3,0\n\n/// 8,4\n\n/// 1,10\n\n/// 2,14\n\n/// 8,10\n\n/// 9,0\n\n///\n\n/// fold along y=7\n\n/// fold along x=5\n\n/// ```\n\n///\n\n/// Indicates mapping all points y > 7 down with a vertical reflection, followed by all points with\n\n/// x > 5 left with a horizontal reflection.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path` - The input file path contianing the points and fold instructions.\n\n/// * `num_folds` - The number of fold instructions to perform.\n\n///\n\n/// # Returns\n\n///\n\n/// The number of dots visible after N folds.\n\nfn solution(input_path: &str, num_folds: usize) -> usize {\n\n let reader = get_buf_reader(input_path);\n\n\n\n let mut points = Vec::new();\n\n let mut folds = Vec::new();\n\n\n\n for line in reader.lines() {\n\n let line = line\n\n .expect(\"Failed to read line from file.\")\n\n .trim()\n\n .replace(\"fold along \", \"\");\n\n let parts = line.split_once(\",\");\n\n if !parts.is_none() {\n\n let (left, right) = parts.unwrap();\n\n points.push((\n\n left.parse::<usize>().unwrap(),\n\n right.parse::<usize>().unwrap(),\n\n ));\n\n }\n\n let parts = line.split_once(\"=\");\n", "file_path": "day-13/src/main.rs", "rank": 48, "score": 101798.34039460591 }, { "content": "/// Parse a polymer creation template and return the final polymer chain after N steps.\n\n///\n\n/// Templates have the form:\n\n///\n\n/// ```\n\n/// NNCB\n\n///\n\n/// CH -> B\n\n/// HH -> N\n\n/// CB -> H\n\n/// NH -> C\n\n/// HB -> C\n\n/// HC -> B\n\n/// HN -> C\n\n/// NN -> C\n\n/// BH -> H\n\n/// NC -> B\n\n/// NB -> B\n\n/// BN -> B\n\n/// BB -> N\n\n/// BC -> B\n\n/// CC -> N\n\n/// CN -> C\n\n/// ```\n\n///\n\n/// Where the first line `NNCB` is the polymer template,\n\n/// and the subsequent lines are insertion rules indicating that pairs\n\n/// of letters should have new characters inserted between them (eg. `CH` becomes `CBH`).\n\n///\n\n/// These rules can be applied multiple times to the starting string to create a final\n\n/// polymer chain.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path` - The input file path containing the polymer rules.\n\n/// * `num_steps` - The number of times to apply insertion rules\n\n///\n\n/// # Returns\n\n///\n\n/// The quantity of the most common element minus the quantity of the least common element after N steps.\n\nfn solution(input_path: &str, num_steps: usize) -> usize {\n\n let reader = get_buf_reader(input_path);\n\n let mut lines = reader.lines();\n\n\n\n // Parse the polymer starting string into a list of single characters\n\n let polymer = lines\n\n .next()\n\n .expect(\"Empty file found.\")\n\n .expect(\"Empty file found.\")\n\n .split(\"\")\n\n .filter(|s| s != &\"\")\n\n .map(|s| s.to_string())\n\n .collect::<Vec<String>>();\n\n\n\n // Count all pairs in the current string\n\n // This is where we will store the running totals of character occurrences,\n\n // as well as occurences of pairs of characters\n\n let mut pair_counts = HashMap::new();\n\n for idx in 0..polymer.len() - 1 {\n\n let match_pair = polymer[idx].clone() + &polymer[idx + 1];\n", "file_path": "day-14/src/main.rs", "rank": 49, "score": 101797.41876627447 }, { "content": "/// TODO\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path - The input file path TODO\n\n///\n\n/// # Returns\n\n///\n\n/// TODO\n\n///\n\n/// # Examples\n\n///\n\n/// ## Basic\n\n///\n\n/// You come across a field of hydrothermal vents on the ocean floor!\n\n/// These vents constantly produce large, opaque clouds, so it would be best to avoid them if possible.\n\n///\n\n/// They tend to form in lines; the submarine helpfully produces a list of nearby lines of vents (your puzzle input)\n\n/// for you to review. For example:\n\n///\n\n/// ```\n\n/// 0,9 -> 5,9\n\n/// 8,0 -> 0,8\n\n/// 9,4 -> 3,4\n\n/// 2,2 -> 2,1\n\n/// 7,0 -> 7,4\n\n/// 6,4 -> 2,0\n\n/// 0,9 -> 2,9\n\n/// 3,4 -> 1,4\n\n/// 0,0 -> 8,8\n\n/// 5,5 -> 8,2\n\n/// ```\n\n///\n\n/// Each line of vents is given as a line segment in the format x1,y1 -> x2,y2 where x1,y1 are the coordinates\n\n/// of one end the line segment and x2,y2 are the coordinates of the other end. These line segments include the\n\n/// Bapoints at both ends. In other words:\n\n///\n\n/// * An entry like 1,1 -> 1,3 covers points 1,1, 1,2, and 1,3.\n\n/// * An entry like 9,7 -> 7,7 covers points 9,7, 8,7, and 7,7.\n\n///\n\n/// For now, only consider horizontal and vertical lines: lines where either x1 = x2 or y1 = y2.\n\n///\n\n/// So, the horizontal and vertical lines from the above list would produce the following diagram:\n\n///\n\n/// ```\n\n/// .......1..\n\n/// ..1....1..\n\n/// ..1....1..\n\n/// .......1..\n\n/// .112111211\n\n/// ..........\n\n/// ..........\n\n/// ..........\n\n/// ..........\n\n/// 222111....\n\n/// ```\n\n///\n\n/// In this diagram, the top left corner is 0,0 and the bottom right corner is 9,9.\n\n/// Each position is shown as the number of lines which cover that point or . if no line covers that point.\n\n/// The top-left pair of 1s, for example, comes from 2,2 -> 2,1; the very bottom row is formed by the overlapping\n\n/// lines 0,9 -> 5,9 and 0,9 -> 2,9.\n\n///\n\n/// To avoid the most dangerous areas, you need to determine the number of points where at least two lines overlap.\n\n/// In the above example, this is anywhere in the diagram with a 2 or larger - a total of 5 points.\n\n///\n\n/// Consider only horizontal and vertical lines. At how many points do at least two lines overlap?\n\nfn solution(input_path: &str, ignore_diagonal: bool) -> usize {\n\n let reader = get_buf_reader(input_path);\n\n let lines = reader.lines();\n\n\n\n let input_stream: Vec<usize> = lines\n\n .map(|line| line.unwrap())\n\n .map(|line| {\n\n line.split(\" -> \")\n\n .map(|x| x.to_string())\n\n .collect::<Vec<String>>()\n\n })\n\n .flatten()\n\n .map(|x: String| x.split(',').map(|x| x.to_string()).collect::<Vec<String>>())\n\n .flatten()\n\n .filter(|x| x.trim() != \"\")\n\n .map(|x| x.parse::<usize>().expect(\"Failed to parse input as usize.\"))\n\n .collect();\n\n\n\n let mut rays: Vec<Ray> = input_stream\n\n .iter()\n", "file_path": "day-5/src/main.rs", "rank": 50, "score": 101796.18305874921 }, { "content": "fn sort_string<S>(s: S) -> String\n\nwhere\n\n S: Into<String>,\n\n{\n\n let s = s.into();\n\n let mut parts = s.trim().split(\"\").collect::<Vec<&str>>();\n\n parts.sort();\n\n parts.join(\"\")\n\n}\n\n\n", "file_path": "day-8/src/main.rs", "rank": 51, "score": 97700.92493706549 }, { "content": "/// Print the number of valid traversals from the starting node to an ending node in a graph,\n\n/// where connections between nodes are defined in the provided input file.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Valid paths: 10\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"Valid paths: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), 36);\n\n }\n\n\n\n #[test]\n\n fn example_medium_correct() {\n\n assert_eq!(solution(\"inputs/example_medium.txt\"), 103);\n\n }\n", "file_path": "day-12/src/main.rs", "rank": 53, "score": 52969.640293839824 }, { "content": "/// Print the cost of the lowest cost path of a grid traversal.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Cost of lowest cost path: 40\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path, 1);\n\n println!(\"Cost of lowest cost path size 1: {:?}\", sol);\n\n let sol = solution(input_path, 5);\n\n println!(\"Cost of lowest cost path size 5: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct_small() {\n\n assert_eq!(solution(\"inputs/example.txt\", 1), 40);\n\n }\n\n\n\n #[test]\n\n fn example_correct_large() {\n", "file_path": "day-15/src/main.rs", "rank": 54, "score": 52968.06809765508 }, { "content": "/// Print the syntax error score in a given input file.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Malformed score: 26397\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"Malformed score: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), (26397, 288957));\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), (296535, 4245130838));\n\n }\n\n}\n", "file_path": "day-10/src/main.rs", "rank": 55, "score": 52967.48090685937 }, { "content": "/// Record the final horizontal / depth position in a file of movements.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ day-2 inputs/example.txt\n\n/// Final coordinates: (15, 10), multiplied: 150\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let (x, y) = record_movements(input_path);\n\n println!(\n\n \"Final coordinates no aim: ({}, {}), multiplied: {}\",\n\n x,\n\n y,\n\n x * y\n\n );\n\n\n\n let (x, y) = record_movements_with_aim(input_path);\n\n println!(\n\n \"Final coordinates with aim: ({}, {}), multiplied: {}\",\n\n x,\n\n y,\n\n x * y\n\n );\n\n}\n\n\n", "file_path": "day-2/src/main.rs", "rank": 56, "score": 52967.43313965339 }, { "content": "/// Print the packet version sums for each packet in the input file.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Packet version sums: [6, 9, 14, 16, 12, 23, 31]\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"Evaluated packets: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(\n\n solution(\"inputs/example.txt\"),\n\n vec![2021, 1, 3, 15, 46, 46, 54],\n\n );\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), vec![19348959966392]);\n\n }\n\n}\n", "file_path": "day-16/src/main.rs", "rank": 57, "score": 52967.43313965339 }, { "content": "/// Count the number of lines in a file of numeric values whose value increases from the preceding line.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ day-1 inputs/challenge.txt\n\n/// Found 1446 increases\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let (input_path, window_size) = parse_file_path(&args);\n\n println!(\n\n \"Found {} increases\",\n\n count_numeric_increases(input_path, window_size)\n\n );\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n (\"arg_text\", 1)\n\n );\n\n }\n", "file_path": "day-1/src/main.rs", "rank": 58, "score": 52967.29881010731 }, { "content": "/// Print the total risk value of an array.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Total risk value: 15\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"Total risk value: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), (15, 1134));\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), (580, 856716));\n\n }\n\n}\n", "file_path": "day-9/src/main.rs", "rank": 59, "score": 52964.57246390522 }, { "content": "/// Print the number of points visible after 1 fold.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Points after 1 fold: 17\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path, 1);\n\n println!(\"Points after 1 fold: {:?}\", sol);\n\n let sol = solution(input_path, 0);\n\n println!(\"Points after all folds: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\", 1), 17);\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\", 1), 720);\n\n }\n\n\n\n #[test]\n\n fn question_part2_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\", 0), 104);\n\n }\n\n}\n", "file_path": "day-13/src/main.rs", "rank": 60, "score": 52964.57246390522 }, { "content": "/// Parse a set of polymer building instructions, and print the quantity of the most\n\n/// common element minus the quantity of the least common element after 10 steps.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Count of most common - count of least common: 1588\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path, 10);\n\n println!(\"Count of most common - count of least common: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\", 40), 2188189693529);\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\", 40), 4807056953866);\n\n }\n\n}\n", "file_path": "day-14/src/main.rs", "rank": 61, "score": 52964.57246390522 }, { "content": "/// Output the number that is closest to a given set of numbers\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Closest number: 2, total distance: 37\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let (num, dist) = solution(input_path);\n\n println!(\"Closest number: {}, total distance: {}\", num, dist);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), (5, 168));\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), (466, 92948968));\n\n }\n\n}\n", "file_path": "day-7/src/main.rs", "rank": 62, "score": 52964.57246390522 }, { "content": "/// TODO\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ day-TODO inputs/example.txt\n\n/// TODO\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"Winning score: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), (4512, 1924));\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), (35670, 22704));\n\n }\n\n}\n", "file_path": "day-4/src/main.rs", "rank": 63, "score": 52964.57246390522 }, { "content": "/// Record the gamma / epsilon rate of the power report.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ day-3 inputs/example.txt\n\n/// Power rates: (22, 9), multiplied: 198\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let (x, y) = read_power_report(input_path);\n\n println!(\"Power rates: ({}, {}), multiplied: {}\", x, y, x * y);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_read_power_report {\n\n use crate::read_power_report;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(read_power_report(\"inputs/example.txt\"), (22, 9));\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(read_power_report(\"inputs/challenge.txt\"), (654, 3441));\n\n }\n\n}\n", "file_path": "day-3/src/main.rs", "rank": 65, "score": 52964.57246390522 }, { "content": "/// Print the number of lanternfish 80 days after an initial population.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Number of lanternfish after 80 days: 5934\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let days = 256;\n\n let sol = solution(input_path, days);\n\n println!(\"Number of lanternfish after {} days: {:?}\", days, sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\", 80), 5934);\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\", 80), 365862);\n\n }\n\n}\n", "file_path": "day-6/src/main.rs", "rank": 66, "score": 52964.57246390522 }, { "content": "/// Read an input of rays (two points in space) and output the number of integer points where horizontal or vertical rays overlap at least twice, as well as including diagonal lines.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Lines overlapping at least twice without diagonals: 5\n\n/// Lines overlapping at least twice with diagonals: 12\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path, true);\n\n println!(\"Lines overlapping at least twice without diagonals: {:?}\", sol);\n\n let sol = solution(input_path, false);\n\n println!(\"Lines overlapping at least twice with diagonals: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\", true), 5);\n\n assert_eq!(solution(\"inputs/example.txt\", false), 12);\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\", true), 8111);\n\n assert_eq!(solution(\"inputs/challenge.txt\", false), 22088);\n\n }\n\n}\n", "file_path": "day-5/src/main.rs", "rank": 67, "score": 52964.57246390522 }, { "content": "/// TODO\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// TODO\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"TODO: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), (0, 0));\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), (0, 0));\n\n }\n\n}\n", "file_path": "template/src/main.rs", "rank": 68, "score": 52964.57246390522 }, { "content": "/// Print the count of digits in an encoded input.\n\n///\n\n/// Usage:\n\n///\n\n/// ```\n\n/// $ aoc inputs/example.txt\n\n/// Number of 1, 4, 7, 8 digits: 26\n\n/// ```\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let input_path = parse_file_path(&args);\n\n let sol = solution(input_path);\n\n println!(\"Number of 1, 4, 7, 8 digits: {:?}\", sol);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_solution {\n\n use crate::solution;\n\n\n\n #[test]\n\n fn example_correct() {\n\n assert_eq!(solution(\"inputs/example.txt\"), 61229);\n\n }\n\n\n\n #[test]\n\n fn question_correct() {\n\n assert_eq!(solution(\"inputs/challenge.txt\"), 1073431);\n\n }\n\n}\n", "file_path": "day-8/src/main.rs", "rank": 69, "score": 52964.57246390522 }, { "content": "#[cfg(test)]\n\nmod test_parse_file_path {\n\n use crate::parse_file_path;\n\n\n\n #[test]\n\n fn one_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(&vec![\"script_path\".to_string(), \"arg_text\".to_string()][..]),\n\n \"arg_text\"\n\n );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n", "file_path": "day-2/src/main.rs", "rank": 70, "score": 17.473030124894343 }, { "content": "use std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-3/src/main.rs", "rank": 71, "score": 16.43378467102874 }, { "content": "use std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-7/src/main.rs", "rank": 72, "score": 16.43378467102874 }, { "content": "use std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-13/src/main.rs", "rank": 73, "score": 16.433784671028743 }, { "content": "use std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "template/src/main.rs", "rank": 74, "score": 16.433784671028743 }, { "content": "\n\n #[test]\n\n #[should_panic]\n\n fn bad_window_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n", "file_path": "day-1/src/main.rs", "rank": 75, "score": 16.27380479533968 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-6/src/main.rs", "rank": 76, "score": 16.142208348600374 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-4/src/main.rs", "rank": 77, "score": 16.142208348600374 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-8/src/main.rs", "rank": 78, "score": 16.142208348600374 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-5/src/main.rs", "rank": 79, "score": 16.142208348600374 }, { "content": "use std::collections::VecDeque;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-10/src/main.rs", "rank": 80, "score": 16.142208348600374 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-14/src/main.rs", "rank": 81, "score": 16.142208348600377 }, { "content": "use std::collections::HashSet;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader, Error};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-9/src/main.rs", "rank": 82, "score": 15.999552148480053 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\nuse std::vec::IntoIter;\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-16/src/main.rs", "rank": 83, "score": 15.902079013050143 }, { "content": "use std::collections::{HashMap, VecDeque};\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-12/src/main.rs", "rank": 85, "score": 15.85974386101069 }, { "content": "\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n fn window_arg_ok() {\n\n assert_eq!(\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"5\".to_string()\n\n ][..]\n\n ),\n\n (\"arg_text\", 5)\n\n );\n\n }\n", "file_path": "day-1/src/main.rs", "rank": 86, "score": 15.477481650438083 }, { "content": "use std::cmp::{Ord, Ordering};\n\nuse std::collections::{BinaryHeap, HashMap, HashSet};\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader, Error};\n\n\n\n/// Parse the file path from command line arguments.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - the command line arguments\n\n///\n\n/// # Returns\n\n///\n\n/// A single command line argument - panics if zero or more than one argument are passed.\n", "file_path": "day-15/src/main.rs", "rank": 87, "score": 15.160860187517233 }, { "content": "use std::collections::VecDeque;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n\n/// Parse an input file path, counting the number of numeric increases in the file.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input_path` - the OS fully qualified path to the file containing the input data.\n\n/// * `window_size` - the number of lines to include in a sliding comparison\n\n///\n\n/// # Returns\n\n///\n\n/// The count of lines whose numeric value are greater than the preceding value.\n\n///\n\n/// # Examples\n\n///\n\n/// ## `window_size = 1`\n\n///\n\n/// For example, suppose you had the following input file:\n", "file_path": "day-1/src/main.rs", "rank": 88, "score": 14.698763947414136 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-15/src/main.rs", "rank": 89, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-4/src/main.rs", "rank": 90, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 91, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-6/src/main.rs", "rank": 92, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "template/src/main.rs", "rank": 93, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-5/src/main.rs", "rank": 94, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-14/src/main.rs", "rank": 95, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-8/src/main.rs", "rank": 96, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-3/src/main.rs", "rank": 97, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 98, "score": 13.895167328083705 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_arg_fail() {\n\n parse_file_path(&Vec::new());\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn many_arg_fail() {\n\n parse_file_path(\n\n &vec![\n\n \"script_path\".to_string(),\n\n \"arg_text\".to_string(),\n\n \"extra_arg\".to_string(),\n\n ][..],\n\n );\n\n }\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 99, "score": 13.895167328083705 } ]
Rust
src/server/snap.rs
Caoming/tikv
7c25c38965692ccfc17d175fbd529d64c6695e13
use std::fmt::{self, Formatter, Display}; use std::io; use std::fs::File; use std::net::{SocketAddr, TcpStream}; use std::io::Read; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::boxed::FnBox; use std::time::{Instant, Duration}; use threadpool::ThreadPool; use mio::Token; use super::metrics::*; use super::{Result, ConnData, Msg}; use super::transport::RaftStoreRouter; use raftstore::store::{SnapFile, SnapManager, SnapKey, SnapEntry}; use util::worker::Runnable; use util::codec::rpc; use util::buf::PipeBuffer; use util::HandyRwLock; use util::transport::SendCh; use kvproto::raft_serverpb::RaftMessage; pub type Callback = Box<FnBox(Result<()>) + Send>; const DEFAULT_SENDER_POOL_SIZE: usize = 3; const DEFAULT_READ_TIMEOUT: u64 = 30; const DEFAULT_WRITE_TIMEOUT: u64 = 30; pub enum Task { Register(Token, RaftMessage), Write(Token, PipeBuffer), Close(Token), Discard(Token), SendTo { addr: SocketAddr, data: ConnData, cb: Callback, }, } impl Display for Task { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { Task::Register(token, ref meta) => write!(f, "Register {:?} token: {:?}", meta, token), Task::Write(token, _) => write!(f, "Write snap for {:?}", token), Task::Close(token) => write!(f, "Close file {:?}", token), Task::Discard(token) => write!(f, "Discard file {:?}", token), Task::SendTo { ref addr, ref data, .. } => { write!(f, "SendTo Snap[to: {}, snap: {:?}]", addr, data.msg) } } } } fn send_snap(mgr: SnapManager, addr: SocketAddr, data: ConnData) -> Result<()> { assert!(data.is_snapshot()); let timer = Instant::now(); let send_timer = SEND_SNAP_HISTOGRAM.start_timer(); let snap = data.msg.get_raft().get_message().get_snapshot(); let key = try!(SnapKey::from_snap(&snap)); mgr.wl().register(key.clone(), SnapEntry::Sending); let snap_file = box_try!(mgr.rl().get_snap_file(&key, true)); defer!({ snap_file.delete(); mgr.wl().deregister(&key, &SnapEntry::Sending); }); if !snap_file.exists() { return Err(box_err!("missing snap file: {:?}", snap_file.path())); } let mut f = try!(File::open(snap_file.path())); let mut conn = try!(TcpStream::connect(&addr)); try!(conn.set_nodelay(true)); try!(conn.set_read_timeout(Some(Duration::from_secs(DEFAULT_READ_TIMEOUT)))); try!(conn.set_write_timeout(Some(Duration::from_secs(DEFAULT_WRITE_TIMEOUT)))); let res = rpc::encode_msg(&mut conn, data.msg_id, &data.msg) .and_then(|_| io::copy(&mut f, &mut conn).map_err(From::from)) .and_then(|_| conn.read(&mut [0]).map_err(From::from)) .map(|_| ()) .map_err(From::from); let size = snap_file.meta().map(|m| m.len()).unwrap_or(0); info!("[region {}] sent snapshot {} [size: {}, dur: {:?}]", key.region_id, key, size, timer.elapsed()); send_timer.observe_duration(); res } pub struct Runner<R: RaftStoreRouter + 'static> { snap_mgr: SnapManager, files: HashMap<Token, (SnapFile, RaftMessage)>, pool: ThreadPool, ch: SendCh<Msg>, raft_router: R, } impl<R: RaftStoreRouter + 'static> Runner<R> { pub fn new(snap_mgr: SnapManager, r: R, ch: SendCh<Msg>) -> Runner<R> { Runner { snap_mgr: snap_mgr, files: map![], pool: ThreadPool::new_with_name(thd_name!("snap sender"), DEFAULT_SENDER_POOL_SIZE), raft_router: r, ch: ch, } } pub fn close(&self, token: Token) { if let Err(e) = self.ch.send(Msg::CloseConn { token: token }) { error!("failed to close connection {:?}: {:?}", token, e); } } } impl<R: RaftStoreRouter + 'static> Runnable<Task> for Runner<R> { fn run(&mut self, task: Task) { match task { Task::Register(token, meta) => { SNAP_TASK_COUNTER.with_label_values(&["register"]).inc(); let mgr = self.snap_mgr.clone(); match SnapKey::from_snap(meta.get_message().get_snapshot()) .and_then(|key| mgr.rl().get_snap_file(&key, false).map(|r| (r, key))) { Ok((f, k)) => { if f.exists() { info!("file {} already exists, skip receiving.", f.path().display()); if let Err(e) = self.raft_router.send_raft_msg(meta) { error!("send snapshot for token {:?} err {:?}", token, e); } self.close(token); return; } debug!("begin to receive snap {:?}", meta); mgr.wl().register(k.clone(), SnapEntry::Receiving); self.files.insert(token, (f, meta)); } Err(e) => error!("failed to create snap file for {:?}: {:?}", token, e), } } Task::Write(token, mut data) => { SNAP_TASK_COUNTER.with_label_values(&["write"]).inc(); let mut should_close = false; match self.files.entry(token) { Entry::Occupied(mut e) => { if let Err(err) = data.write_all_to(&mut e.get_mut().0) { error!("failed to write data to {:?}: {:?}", token, err); let (_, msg) = e.remove(); let key = SnapKey::from_snap(msg.get_message().get_snapshot()).unwrap(); self.snap_mgr.wl().deregister(&key, &SnapEntry::Receiving); should_close = true; } } Entry::Vacant(_) => error!("invalid snap token {:?}", token), } if should_close { self.close(token); } } Task::Close(token) => { SNAP_TASK_COUNTER.with_label_values(&["close"]).inc(); match self.files.remove(&token) { Some((mut writer, msg)) => { let key = SnapKey::from_snap(msg.get_message().get_snapshot()).unwrap(); info!("saving snapshot to {}", writer.path().display()); defer!({ self.snap_mgr.wl().deregister(&key, &SnapEntry::Receiving); self.close(token); }); if let Err(e) = writer.save() { error!("failed to save file {:?}: {:?}", token, e); return; } if let Err(e) = self.raft_router.send_raft_msg(msg) { error!("send snapshot for token {:?} err {:?}", token, e); } } None => error!("invalid snap token {:?}", token), } } Task::Discard(token) => { SNAP_TASK_COUNTER.with_label_values(&["discard"]).inc(); if let Some((_, msg)) = self.files.remove(&token) { debug!("discard snapshot: {:?}", msg); let key = SnapKey::from_snap(msg.get_message().get_snapshot()).unwrap(); self.snap_mgr.wl().deregister(&key, &SnapEntry::Receiving); } } Task::SendTo { addr, data, cb } => { SNAP_TASK_COUNTER.with_label_values(&["send"]).inc(); let mgr = self.snap_mgr.clone(); self.pool.execute(move || { let res = send_snap(mgr, addr, data); if res.is_err() { error!("failed to send snap to {}: {:?}", addr, res); } cb(res) }); } } } }
use std::fmt::{self, Formatter, Display}; use std::io; use std::fs::File; use std::net::{SocketAddr, TcpStream}; use std::io::Read; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::boxed::FnBox; use std::time::{Instant, Duration}; use threadpool::ThreadPool; use mio::Token; use super::metrics::*; use super::{Result, ConnData, Msg}; use super::transport::RaftStoreRouter; use raftstore::store::{SnapFile, SnapManager, SnapKey, SnapEntry}; use util::worker::Runnable; use util::codec::rpc; use util::buf::PipeBuffer; use util::HandyRwLock; use util::transport::SendCh; use kvproto::raft_serverpb::RaftMessage; pub type Callback = Box<FnBox(Result<()>) + Send>; const DEFAULT_SENDER_POOL_SIZE: usize = 3; const DEFAULT_READ_TIMEOUT: u64 = 30; const DEFAULT_WRITE_TIMEOUT: u64 = 30; pub enum Task { Register(Token, RaftMessage), Write(Token, PipeBuffer), Close(Token), Discard(Token), SendTo { addr: SocketAddr, data: ConnData, cb: Callback, }, } impl Display for Task {
} fn send_snap(mgr: SnapManager, addr: SocketAddr, data: ConnData) -> Result<()> { assert!(data.is_snapshot()); let timer = Instant::now(); let send_timer = SEND_SNAP_HISTOGRAM.start_timer(); let snap = data.msg.get_raft().get_message().get_snapshot(); let key = try!(SnapKey::from_snap(&snap)); mgr.wl().register(key.clone(), SnapEntry::Sending); let snap_file = box_try!(mgr.rl().get_snap_file(&key, true)); defer!({ snap_file.delete(); mgr.wl().deregister(&key, &SnapEntry::Sending); }); if !snap_file.exists() { return Err(box_err!("missing snap file: {:?}", snap_file.path())); } let mut f = try!(File::open(snap_file.path())); let mut conn = try!(TcpStream::connect(&addr)); try!(conn.set_nodelay(true)); try!(conn.set_read_timeout(Some(Duration::from_secs(DEFAULT_READ_TIMEOUT)))); try!(conn.set_write_timeout(Some(Duration::from_secs(DEFAULT_WRITE_TIMEOUT)))); let res = rpc::encode_msg(&mut conn, data.msg_id, &data.msg) .and_then(|_| io::copy(&mut f, &mut conn).map_err(From::from)) .and_then(|_| conn.read(&mut [0]).map_err(From::from)) .map(|_| ()) .map_err(From::from); let size = snap_file.meta().map(|m| m.len()).unwrap_or(0); info!("[region {}] sent snapshot {} [size: {}, dur: {:?}]", key.region_id, key, size, timer.elapsed()); send_timer.observe_duration(); res } pub struct Runner<R: RaftStoreRouter + 'static> { snap_mgr: SnapManager, files: HashMap<Token, (SnapFile, RaftMessage)>, pool: ThreadPool, ch: SendCh<Msg>, raft_router: R, } impl<R: RaftStoreRouter + 'static> Runner<R> { pub fn new(snap_mgr: SnapManager, r: R, ch: SendCh<Msg>) -> Runner<R> { Runner { snap_mgr: snap_mgr, files: map![], pool: ThreadPool::new_with_name(thd_name!("snap sender"), DEFAULT_SENDER_POOL_SIZE), raft_router: r, ch: ch, } } pub fn close(&self, token: Token) { if let Err(e) = self.ch.send(Msg::CloseConn { token: token }) { error!("failed to close connection {:?}: {:?}", token, e); } } } impl<R: RaftStoreRouter + 'static> Runnable<Task> for Runner<R> { fn run(&mut self, task: Task) { match task { Task::Register(token, meta) => { SNAP_TASK_COUNTER.with_label_values(&["register"]).inc(); let mgr = self.snap_mgr.clone(); match SnapKey::from_snap(meta.get_message().get_snapshot()) .and_then(|key| mgr.rl().get_snap_file(&key, false).map(|r| (r, key))) { Ok((f, k)) => { if f.exists() { info!("file {} already exists, skip receiving.", f.path().display()); if let Err(e) = self.raft_router.send_raft_msg(meta) { error!("send snapshot for token {:?} err {:?}", token, e); } self.close(token); return; } debug!("begin to receive snap {:?}", meta); mgr.wl().register(k.clone(), SnapEntry::Receiving); self.files.insert(token, (f, meta)); } Err(e) => error!("failed to create snap file for {:?}: {:?}", token, e), } } Task::Write(token, mut data) => { SNAP_TASK_COUNTER.with_label_values(&["write"]).inc(); let mut should_close = false; match self.files.entry(token) { Entry::Occupied(mut e) => { if let Err(err) = data.write_all_to(&mut e.get_mut().0) { error!("failed to write data to {:?}: {:?}", token, err); let (_, msg) = e.remove(); let key = SnapKey::from_snap(msg.get_message().get_snapshot()).unwrap(); self.snap_mgr.wl().deregister(&key, &SnapEntry::Receiving); should_close = true; } } Entry::Vacant(_) => error!("invalid snap token {:?}", token), } if should_close { self.close(token); } } Task::Close(token) => { SNAP_TASK_COUNTER.with_label_values(&["close"]).inc(); match self.files.remove(&token) { Some((mut writer, msg)) => { let key = SnapKey::from_snap(msg.get_message().get_snapshot()).unwrap(); info!("saving snapshot to {}", writer.path().display()); defer!({ self.snap_mgr.wl().deregister(&key, &SnapEntry::Receiving); self.close(token); }); if let Err(e) = writer.save() { error!("failed to save file {:?}: {:?}", token, e); return; } if let Err(e) = self.raft_router.send_raft_msg(msg) { error!("send snapshot for token {:?} err {:?}", token, e); } } None => error!("invalid snap token {:?}", token), } } Task::Discard(token) => { SNAP_TASK_COUNTER.with_label_values(&["discard"]).inc(); if let Some((_, msg)) = self.files.remove(&token) { debug!("discard snapshot: {:?}", msg); let key = SnapKey::from_snap(msg.get_message().get_snapshot()).unwrap(); self.snap_mgr.wl().deregister(&key, &SnapEntry::Receiving); } } Task::SendTo { addr, data, cb } => { SNAP_TASK_COUNTER.with_label_values(&["send"]).inc(); let mgr = self.snap_mgr.clone(); self.pool.execute(move || { let res = send_snap(mgr, addr, data); if res.is_err() { error!("failed to send snap to {}: {:?}", addr, res); } cb(res) }); } } } }
fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { Task::Register(token, ref meta) => write!(f, "Register {:?} token: {:?}", meta, token), Task::Write(token, _) => write!(f, "Write snap for {:?}", token), Task::Close(token) => write!(f, "Close file {:?}", token), Task::Discard(token) => write!(f, "Discard file {:?}", token), Task::SendTo { ref addr, ref data, .. } => { write!(f, "SendTo Snap[to: {}, snap: {:?}]", addr, data.msg) } } }
function_block-full_function
[ { "content": "pub fn new_message(from: u64, to: u64, t: MessageType, n: usize) -> Message {\n\n let mut m = new_message_with_entries(from, to, t, vec![]);\n\n if n > 0 {\n\n let mut ents = Vec::with_capacity(n);\n\n for _ in 0..n {\n\n ents.push(new_entry(0, 0, SOME_DATA));\n\n }\n\n m.set_entries(RepeatedField::from_vec(ents));\n\n }\n\n m\n\n}\n\n\n", "file_path": "tests/test_raft.rs", "rank": 1, "score": 214851.82178917454 }, { "content": "/// Creates a callback to receive async results of write prepare from the storage engine.\n\nfn make_engine_cb(cid: u64, pr: ProcessResult, ch: SendCh<Msg>) -> EngineCallback<()> {\n\n Box::new(move |(cb_ctx, result)| {\n\n if let Err(e) = ch.send(Msg::WriteFinished {\n\n cid: cid,\n\n pr: pr,\n\n cb_ctx: cb_ctx,\n\n result: result,\n\n }) {\n\n panic!(\"send write finished to scheduler failed cid={}, err:{:?}\",\n\n cid,\n\n e);\n\n }\n\n })\n\n}\n\n\n\n/// Scheduler which schedules the execution of `storage::Command`s.\n\npub struct Scheduler {\n\n engine: Box<Engine>,\n\n\n\n // cid -> context\n", "file_path": "src/storage/txn/scheduler.rs", "rank": 2, "score": 204677.13969519865 }, { "content": "pub fn new_entry(term: u64, index: u64, data: Option<&str>) -> Entry {\n\n let mut e = Entry::new();\n\n e.set_index(index);\n\n e.set_term(term);\n\n if let Some(d) = data {\n\n e.set_data(d.as_bytes().to_vec());\n\n }\n\n e\n\n}\n\n\n", "file_path": "tests/test_raft.rs", "rank": 3, "score": 201653.39487435407 }, { "content": "// Encodes data with message ID and any arbitrary body.\n\npub fn encode_data<T: io::Write>(w: &mut T, msg_id: u64, data: &[u8]) -> Result<()> {\n\n let header = encode_msg_header(msg_id, data.len());\n\n\n\n try!(w.write(&header));\n\n try!(w.write(data));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/util/codec/rpc.rs", "rank": 4, "score": 201049.63140155654 }, { "content": "#[inline]\n\npub fn duration_to_ms(d: Duration) -> u64 {\n\n let nanos = d.subsec_nanos() as u64;\n\n // Most of case, we can't have so large Duration, so here just panic if overflow now.\n\n d.as_secs() * 1_000 + (nanos / 1_000_000)\n\n}\n\n\n\n/// Convert Duration to nanoseconds.\n", "file_path": "src/util/mod.rs", "rank": 5, "score": 199129.33389632206 }, { "content": "#[inline]\n\npub fn duration_to_nanos(d: Duration) -> u64 {\n\n let nanos = d.subsec_nanos() as u64;\n\n // Most of case, we can't have so large Duration, so here just panic if overflow now.\n\n d.as_secs() * 1_000_000_000 + nanos\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 6, "score": 199129.33389632206 }, { "content": "// Encodes msg header to a 16 bytes header buffer.\n\npub fn encode_msg_header(msg_id: u64, payload_len: usize) -> Vec<u8> {\n\n let mut buf = vec![0;MSG_HEADER_LEN];\n\n\n\n BigEndian::write_u16(&mut buf[0..2], MSG_MAGIC);\n\n BigEndian::write_u16(&mut buf[2..4], MSG_VERSION_V1);\n\n BigEndian::write_u32(&mut buf[4..8], payload_len as u32);\n\n BigEndian::write_u64(&mut buf[8..16], msg_id);\n\n\n\n buf\n\n}\n\n\n", "file_path": "src/util/codec/rpc.rs", "rank": 7, "score": 195797.02378889098 }, { "content": "pub fn new_store(store_id: u64, addr: String) -> metapb::Store {\n\n let mut store = metapb::Store::new();\n\n store.set_id(store_id);\n\n store.set_address(addr);\n\n\n\n store\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 8, "score": 187305.9237707447 }, { "content": "fn send_msg(stream: &mut TcpStream, msg_id: u64, message: &Request) -> Result<(u64, Response)> {\n\n let timer = PD_SEND_MSG_HISTOGRAM.start_timer();\n\n\n\n let mut req = Message::new();\n\n\n\n req.set_msg_type(MessageType::PdReq);\n\n // TODO: optimize clone later in HTTP refactor.\n\n req.set_pd_req(message.clone());\n\n\n\n try!(stream.set_write_timeout(Some(Duration::from_secs(SOCKET_WRITE_TIMEOUT))));\n\n try!(rpc::encode_msg(stream, msg_id, &req));\n\n\n\n try!(stream.set_read_timeout(Some(Duration::from_secs(SOCKET_READ_TIMEOUT))));\n\n let mut resp = Message::new();\n\n let id = try!(rpc::decode_msg(stream, &mut resp));\n\n if resp.get_msg_type() != MessageType::PdResp {\n\n return Err(box_err!(\"invalid pd response type {:?}\", resp.get_msg_type()));\n\n }\n\n timer.observe_duration();\n\n\n\n Ok((id, resp.take_pd_resp()))\n\n}\n\n\n", "file_path": "src/pd/client.rs", "rank": 9, "score": 180366.76854806452 }, { "content": "enum Task {\n\n Write(Vec<Modify>, Callback<()>),\n\n Snapshot(Callback<Box<Snapshot>>),\n\n}\n\n\n\nimpl Display for Task {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n match *self {\n\n Task::Write(..) => write!(f, \"write task\"),\n\n Task::Snapshot(_) => write!(f, \"snapshot task\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/storage/engine/rocksdb.rs", "rank": 10, "score": 177924.88125435176 }, { "content": "pub fn new_test_config(id: u64, peers: Vec<u64>, election: usize, heartbeat: usize) -> Config {\n\n Config {\n\n id: id,\n\n peers: peers,\n\n election_tick: election,\n\n heartbeat_tick: heartbeat,\n\n max_size_per_msg: NO_LIMIT,\n\n max_inflight_msgs: 256,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "tests/test_raft.rs", "rank": 11, "score": 177059.05469581764 }, { "content": "#[inline]\n\npub fn has_unsigned_flag(flag: u64) -> bool {\n\n flag & UNSIGNED_FLAG > 0\n\n}\n\n\n\n/// `has_not_null_flag` checks if `NOT_NULL_FLAG` is set.\n", "file_path": "src/util/codec/mysql/types.rs", "rank": 12, "score": 174887.5646057538 }, { "content": "#[inline]\n\npub fn has_not_null_flag(flag: u64) -> bool {\n\n flag & NOT_NULL_FLAG > 0\n\n}\n\n\n\n/// `MySQL` type informations.\n\npub const UNSPECIFIED: u8 = 0;\n\npub const TINY: u8 = 1;\n\npub const SHORT: u8 = 2;\n\npub const LONG: u8 = 3;\n\npub const FLOAT: u8 = 4;\n\npub const DOUBLE: u8 = 5;\n\npub const NULL: u8 = 6;\n\npub const TIMESTAMP: u8 = 7;\n\npub const LONG_LONG: u8 = 8;\n\npub const INT24: u8 = 9;\n\npub const DATE: u8 = 10;\n\npub const DURATION: u8 = 11;\n\npub const DATETIME: u8 = 12;\n\npub const YEAR: u8 = 13;\n\npub const NEWDATE: u8 = 14;\n", "file_path": "src/util/codec/mysql/types.rs", "rank": 13, "score": 174887.5646057538 }, { "content": "// A helper function to parse SocketAddr for mio.\n\n// In mio example, it uses \"127.0.0.1:80\".parse() to get the SocketAddr,\n\n// but it is just ok for \"ip:port\", not \"host:port\".\n\npub fn to_socket_addr<A: ToSocketAddrs>(addr: A) -> io::Result<SocketAddr> {\n\n let addrs = try!(addr.to_socket_addrs());\n\n Ok(addrs.collect::<Vec<SocketAddr>>()[0])\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 14, "score": 174655.38486624663 }, { "content": "// Decodes msg header in header buffer, the buffer length size must be equal MSG_HEADER_LEN;\n\npub fn decode_msg_header<R: io::Read>(header: &mut R) -> Result<(u64, usize)> {\n\n let magic = try!(header.read_u16::<BigEndian>());\n\n if MSG_MAGIC != magic {\n\n return Err(other_err(format!(\"invalid magic {}, not {}\", magic, MSG_MAGIC)));\n\n }\n\n\n\n let version = try!(header.read_u16::<BigEndian>());\n\n if MSG_VERSION_V1 != version {\n\n return Err(other_err(format!(\"unsupported version {}, we need {} now\",\n\n version,\n\n MSG_VERSION_V1)));\n\n }\n\n\n\n let payload_len = try!(header.read_u32::<BigEndian>()) as usize;\n\n // TODO: check max payload\n\n\n\n let message_id = try!(header.read_u64::<BigEndian>());\n\n\n\n Ok((message_id, payload_len))\n\n}\n\n\n", "file_path": "src/util/codec/rpc.rs", "rank": 15, "score": 171673.02829159144 }, { "content": "fn entry(t: EntryType, term: u64, i: u64, data: Option<Vec<u8>>) -> Entry {\n\n let mut e = Entry::new();\n\n e.set_index(i);\n\n e.set_term(term);\n\n if let Some(d) = data {\n\n e.set_data(d);\n\n }\n\n e.set_entry_type(t);\n\n e\n\n}\n\n\n", "file_path": "tests/test_raw_node.rs", "rank": 16, "score": 169763.88466805464 }, { "content": "pub fn new_message_with_entries(from: u64, to: u64, t: MessageType, ents: Vec<Entry>) -> Message {\n\n let mut m = Message::new();\n\n m.set_from(from);\n\n m.set_to(to);\n\n m.set_msg_type(t);\n\n if !ents.is_empty() {\n\n m.set_entries(RepeatedField::from_vec(ents));\n\n }\n\n m\n\n}\n\n\n", "file_path": "tests/test_raft.rs", "rank": 17, "score": 168578.49385567073 }, { "content": "pub fn hard_state(t: u64, c: u64, v: u64) -> HardState {\n\n let mut hs = HardState::new();\n\n hs.set_term(t);\n\n hs.set_commit(c);\n\n hs.set_vote(v);\n\n hs\n\n}\n\n\n", "file_path": "tests/test_raft_paper.rs", "rank": 18, "score": 161972.17354862596 }, { "content": "pub fn new_request(cluster_id: u64, cmd_type: pdpb::CommandType) -> pdpb::Request {\n\n let mut header = pdpb::RequestHeader::new();\n\n header.set_cluster_id(cluster_id);\n\n header.set_uuid(Uuid::new_v4().as_bytes().to_vec());\n\n let mut req = pdpb::Request::new();\n\n req.set_header(header);\n\n req.set_cmd_type(cmd_type);\n\n req\n\n}\n\n\n", "file_path": "src/pd/protocol.rs", "rank": 19, "score": 161399.7982174452 }, { "content": "// vote_resp_msg_type maps vote and pre_vote message types to their correspond responses.\n\npub fn vote_resp_msg_type(t: MessageType) -> MessageType {\n\n match t {\n\n MessageType::MsgRequestVote => MessageType::MsgRequestVoteResponse,\n\n MessageType::MsgRequestPreVote => MessageType::MsgRequestPreVoteResponse,\n\n _ => panic!(\"Not a vote message: {:?}\", t),\n\n }\n\n}\n\n\n", "file_path": "src/raft/raft.rs", "rank": 20, "score": 159249.2418468535 }, { "content": "pub fn make_std_tcp_conn<A: ToSocketAddrs>(addr: A) -> io::Result<TcpStream> {\n\n let stream = try!(TcpStream::connect(addr));\n\n try!(stream.set_nodelay(true));\n\n Ok(stream)\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 21, "score": 159136.80708430658 }, { "content": "pub trait Snapshot: Send {\n\n fn get(&self, key: &Key) -> Result<Option<Value>>;\n\n fn get_cf(&self, cf: CfName, key: &Key) -> Result<Option<Value>>;\n\n #[allow(needless_lifetimes)]\n\n fn iter<'a>(&'a self,\n\n upper_bound: Option<&[u8]>,\n\n fill_cache: bool,\n\n mode: ScanMode)\n\n -> Result<Cursor<'a>>;\n\n #[allow(needless_lifetimes)]\n\n fn iter_cf<'a>(&'a self,\n\n cf: CfName,\n\n upper_bound: Option<&[u8]>,\n\n fill_cache: bool,\n\n mode: ScanMode)\n\n -> Result<Cursor<'a>>;\n\n}\n\n\n", "file_path": "src/storage/engine/mod.rs", "rank": 22, "score": 157913.9437694981 }, { "content": "pub fn empty_entry(term: u64, index: u64) -> Entry {\n\n new_entry(term, index, None)\n\n}\n\n\n", "file_path": "tests/test_raft.rs", "rank": 23, "score": 156190.52920335584 }, { "content": "pub fn new_snapshot(index: u64, term: u64, nodes: Vec<u64>) -> Snapshot {\n\n let mut s = Snapshot::new();\n\n s.mut_metadata().set_index(index);\n\n s.mut_metadata().set_term(term);\n\n s.mut_metadata().mut_conf_state().set_nodes(nodes);\n\n s\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Network {\n\n pub peers: HashMap<u64, Interface>,\n\n storage: HashMap<u64, MemStorage>,\n\n dropm: HashMap<Connem, f64>,\n\n ignorem: HashMap<MessageType, bool>,\n\n}\n\n\n\nimpl Network {\n\n // initializes a network from peers.\n\n // A nil node will be replaced with a new *stateMachine.\n\n // A *stateMachine will get its k, id.\n", "file_path": "tests/test_raft.rs", "rank": 24, "score": 155945.28481417318 }, { "content": "pub fn new_server_cluster(id: u64, count: usize) -> Cluster<ServerCluster> {\n\n new_server_cluster_with_cfs(id, count, ALL_CFS)\n\n}\n\n\n", "file_path": "tests/raftstore/server.rs", "rank": 25, "score": 155822.74350319395 }, { "content": "pub fn new_node_cluster(id: u64, count: usize) -> Cluster<NodeCluster> {\n\n let pd_client = Arc::new(TestPdClient::new(id));\n\n let sim = Arc::new(RwLock::new(NodeCluster::new(pd_client.clone())));\n\n Cluster::new(id, count, ALL_CFS, sim, pd_client)\n\n}\n", "file_path": "tests/raftstore/node.rs", "rank": 26, "score": 155822.74350319395 }, { "content": "pub trait Runnable<T: Display> {\n\n fn run(&mut self, t: T);\n\n}\n\n\n", "file_path": "src/util/worker/mod.rs", "rank": 27, "score": 150473.05457631822 }, { "content": "// Client to communicate with placement driver (pd) for special cluster.\n\n// Because now one pd only supports one cluster, so it is no need to pass\n\n// cluster id in trait interface every time, so passing the cluster id when\n\n// creating the PdClient is enough and the PdClient will use this cluster id\n\n// all the time.\n\npub trait PdClient: Send + Sync {\n\n // Return the cluster ID.\n\n fn get_cluster_id(&self) -> Result<u64>;\n\n\n\n // Create the cluster with cluster ID, node, stores and first region.\n\n // If the cluster is already bootstrapped, return ClusterBootstrapped error.\n\n // When a node starts, if it finds nothing in the node and\n\n // cluster is not bootstrapped, it begins to create node, stores, first region\n\n // and then call bootstrap_cluster to let pd know it.\n\n // It may happen that multi nodes start at same time to try to\n\n // bootstrap, and only one can success, others will fail\n\n // and must remove their created local region data themselves.\n\n fn bootstrap_cluster(&self, stores: metapb::Store, region: metapb::Region) -> Result<()>;\n\n\n\n // Return whether the cluster is bootstrapped or not.\n\n // We must use the cluster after bootstrapped, so when the\n\n // node starts, it must check it with is_cluster_bootstrapped,\n\n // and panic if not bootstrapped.\n\n fn is_cluster_bootstrapped(&self) -> Result<bool>;\n\n\n", "file_path": "src/pd/mod.rs", "rank": 28, "score": 150416.56621352636 }, { "content": "pub trait Engine: Send + Debug {\n\n fn async_write(&self, ctx: &Context, batch: Vec<Modify>, callback: Callback<()>) -> Result<()>;\n\n fn async_snapshot(&self, ctx: &Context, callback: Callback<Box<Snapshot>>) -> Result<()>;\n\n\n\n fn write(&self, ctx: &Context, batch: Vec<Modify>) -> Result<()> {\n\n let timeout = Duration::from_secs(DEFAULT_TIMEOUT_SECS);\n\n match wait_op!(|cb| self.async_write(ctx, batch, cb).unwrap(), timeout) {\n\n Some((_, res)) => res,\n\n None => Err(Error::Timeout(timeout)),\n\n }\n\n }\n\n\n\n fn snapshot(&self, ctx: &Context) -> Result<Box<Snapshot>> {\n\n let timeout = Duration::from_secs(DEFAULT_TIMEOUT_SECS);\n\n match wait_op!(|cb| self.async_snapshot(ctx, cb).unwrap(), timeout) {\n\n Some((_, res)) => res,\n\n None => Err(Error::Timeout(timeout)),\n\n }\n\n }\n\n\n", "file_path": "src/storage/engine/mod.rs", "rank": 29, "score": 150412.51621425667 }, { "content": "// Transports message between different raft peers.\n\npub trait Transport: Send + Clone {\n\n fn send(&self, msg: RaftMessage) -> Result<()>;\n\n}\n", "file_path": "src/raftstore/store/transport.rs", "rank": 30, "score": 150412.51621425667 }, { "content": "/// Get the region id and index from raft log key generated by `raft_log_key`.\n\npub fn decode_raft_log_key(key: &[u8]) -> Result<(u64, u64)> {\n\n let suffix_idx = REGION_RAFT_PREFIX_KEY.len() + mem::size_of::<u64>();\n\n let expect_key_len = suffix_idx + mem::size_of::<u8>() + mem::size_of::<u64>();\n\n if key.len() != expect_key_len || !key.starts_with(REGION_RAFT_PREFIX_KEY) ||\n\n key[suffix_idx] != RAFT_LOG_SUFFIX {\n\n return Err(box_err!(\"key {} is not a valid raft log key\", escape(key)));\n\n }\n\n let region_id = BigEndian::read_u64(&key[REGION_RAFT_PREFIX_KEY.len()..suffix_idx]);\n\n let index = BigEndian::read_u64(&key[suffix_idx + mem::size_of::<u8>()..]);\n\n Ok((region_id, index))\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 31, "score": 150171.2201697543 }, { "content": "pub fn sleep_ms(ms: u64) {\n\n thread::sleep(Duration::from_millis(ms));\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 32, "score": 149771.6016109523 }, { "content": "/// `check_addr` validates an address. Addresses are formed like \"Host:Port\".\n\n/// More details about **Host** and **Port** can be found in WHATWG URL Standard.\n\npub fn check_addr(addr: &str) -> Result<(), ConfigError> {\n\n // Try to validate \"IPv4:Port\" and \"[IPv6]:Port\".\n\n if SocketAddrV4::from_str(addr).is_ok() {\n\n return Ok(());\n\n }\n\n if SocketAddrV6::from_str(addr).is_ok() {\n\n return Ok(());\n\n }\n\n\n\n let parts: Vec<&str> = addr.split(':')\n\n .filter(|s| !s.is_empty()) // \"Host:\" or \":Port\" are invalid.\n\n .collect();\n\n\n\n // [\"Host\", \"Port\"]\n\n if parts.len() != 2 {\n\n return Err(ConfigError::Address(format!(\"invalid addr: {}\", addr)));\n\n }\n\n\n\n // Check Port.\n\n let port: u16 = try!(parts[1]\n", "file_path": "src/util/config.rs", "rank": 33, "score": 148712.9465697001 }, { "content": "pub trait BatchRunnable<T: Display> {\n\n /// run a batch of tasks.\n\n ///\n\n /// Please note that ts will be clear after invoking this method.\n\n fn run_batch(&mut self, ts: &mut Vec<T>);\n\n}\n\n\n\nimpl<T: Display, R: Runnable<T>> BatchRunnable<T> for R {\n\n fn run_batch(&mut self, ts: &mut Vec<T>) {\n\n for t in ts.drain(..) {\n\n let task_str = format!(\"{}\", t);\n\n let timer = SlowTimer::new();\n\n self.run(t);\n\n slow_log!(timer, \"handle task {}\", task_str);\n\n }\n\n }\n\n}\n\n\n\n/// Scheduler provides interface to schedule task to underlying workers.\n\npub struct Scheduler<T> {\n", "file_path": "src/util/worker/mod.rs", "rank": 34, "score": 147845.26061263465 }, { "content": "pub trait RaftStoreRouter: Send + Clone {\n\n /// Send StoreMsg, retry if failed. Try times may vary from implementation.\n\n fn send(&self, msg: StoreMsg) -> RaftStoreResult<()>;\n\n\n\n /// Send StoreMsg.\n\n fn try_send(&self, msg: StoreMsg) -> RaftStoreResult<()>;\n\n\n\n // Send RaftMessage to local store.\n\n fn send_raft_msg(&self, msg: RaftMessage) -> RaftStoreResult<()> {\n\n self.try_send(StoreMsg::RaftMessage(msg))\n\n }\n\n\n\n // Send RaftCmdRequest to local store.\n\n fn send_command(&self, req: RaftCmdRequest, cb: Callback) -> RaftStoreResult<()> {\n\n self.try_send(StoreMsg::RaftCmd {\n\n request: req,\n\n callback: cb,\n\n })\n\n }\n\n\n", "file_path": "src/server/transport.rs", "rank": 35, "score": 147785.69128995854 }, { "content": "pub fn new_peer(store_id: u64, peer_id: u64) -> metapb::Peer {\n\n let mut peer = metapb::Peer::new();\n\n peer.set_store_id(store_id);\n\n peer.set_id(peer_id);\n\n peer\n\n}\n\n\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 36, "score": 147274.04650442017 }, { "content": "pub fn new_request(region_id: u64,\n\n epoch: RegionEpoch,\n\n requests: Vec<Request>,\n\n read_quorum: bool)\n\n -> RaftCmdRequest {\n\n let mut req = new_base_request(region_id, epoch, read_quorum);\n\n req.set_requests(protobuf::RepeatedField::from_vec(requests));\n\n req\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 37, "score": 147148.99148151925 }, { "content": "pub fn new_test_raft(id: u64,\n\n peers: Vec<u64>,\n\n election: usize,\n\n heartbeat: usize,\n\n storage: MemStorage)\n\n -> Interface {\n\n Interface::new(Raft::new(&new_test_config(id, peers, election, heartbeat), storage))\n\n}\n\n\n", "file_path": "tests/test_raft.rs", "rank": 38, "score": 147148.99148151925 }, { "content": "/// A function to escape a byte array to a readable ascii string.\n\n/// escape rules follow golang/protobuf.\n\n/// https://github.com/golang/protobuf/blob/master/proto/text.go#L578\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tikv::util::escape;\n\n///\n\n/// assert_eq!(r\"ab\", escape(b\"ab\"));\n\n/// assert_eq!(r\"a\\\\023\", escape(b\"a\\\\023\"));\n\n/// assert_eq!(r\"a\\000\", escape(b\"a\\0\"));\n\n/// assert_eq!(\"a\\\\r\\\\n\\\\t '\\\\\\\"\\\\\\\\\", escape(b\"a\\r\\n\\t '\\\"\\\\\"));\n\n/// assert_eq!(r\"\\342\\235\\244\\360\\237\\220\\267\", escape(\"❤🐷\".as_bytes()));\n\n/// ```\n\npub fn escape(data: &[u8]) -> String {\n\n let mut escaped = Vec::with_capacity(data.len() * 4);\n\n for &c in data {\n\n match c {\n\n b'\\n' => escaped.extend_from_slice(br\"\\n\"),\n\n b'\\r' => escaped.extend_from_slice(br\"\\r\"),\n\n b'\\t' => escaped.extend_from_slice(br\"\\t\"),\n\n b'\"' => escaped.extend_from_slice(b\"\\\\\\\"\"),\n\n b'\\\\' => escaped.extend_from_slice(br\"\\\\\"),\n\n _ => {\n\n if c >= 0x20 && c < 0x7f {\n\n // c is printable\n\n escaped.push(c);\n\n } else {\n\n escaped.push(b'\\\\');\n\n escaped.push(b'0' + (c >> 6));\n\n escaped.push(b'0' + ((c >> 3) & 7));\n\n escaped.push(b'0' + (c & 7));\n\n }\n\n }\n\n }\n\n }\n\n escaped.shrink_to_fit();\n\n unsafe { String::from_utf8_unchecked(escaped) }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 39, "score": 146284.84230202483 }, { "content": "// a helper function to create peer easily.\n\npub fn new_peer(store_id: u64, peer_id: u64) -> metapb::Peer {\n\n let mut peer = metapb::Peer::new();\n\n peer.set_store_id(store_id);\n\n peer.set_id(peer_id);\n\n peer\n\n}\n\n\n", "file_path": "src/raftstore/store/util.rs", "rank": 40, "score": 145455.52795358934 }, { "content": "pub fn new_server_cluster_with_cfs(id: u64,\n\n count: usize,\n\n cfs: &[CfName])\n\n -> Cluster<ServerCluster> {\n\n let pd_client = Arc::new(TestPdClient::new(id));\n\n let sim = Arc::new(RwLock::new(ServerCluster::new(pd_client.clone())));\n\n Cluster::new(id, count, cfs, sim, pd_client)\n\n}\n", "file_path": "tests/raftstore/server.rs", "rank": 41, "score": 144692.43732493572 }, { "content": "pub fn new_test_raft_with_config(id: u64,\n\n peers: Vec<u64>,\n\n election: usize,\n\n heartbeat: usize,\n\n storage: MemStorage,\n\n pre_vote: bool)\n\n -> Interface {\n\n let mut config = new_test_config(id, peers, election, heartbeat);\n\n config.pre_vote = pre_vote;\n\n Interface::new(Raft::new(&config, storage))\n\n}\n\n\n\n\n", "file_path": "tests/test_raft.rs", "rank": 42, "score": 144692.43732493572 }, { "content": "pub fn new_admin_request(region_id: u64,\n\n epoch: &RegionEpoch,\n\n request: AdminRequest)\n\n -> RaftCmdRequest {\n\n let mut req = new_base_request(region_id, epoch.clone(), false);\n\n req.set_admin_request(request);\n\n req\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 43, "score": 144692.43732493572 }, { "content": "pub fn new_status_request(region_id: u64,\n\n peer: metapb::Peer,\n\n request: StatusRequest)\n\n -> RaftCmdRequest {\n\n let mut req = new_base_request(region_id, RegionEpoch::new(), false);\n\n req.mut_header().set_peer(peer);\n\n req.set_status_request(request);\n\n req\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 44, "score": 144692.43732493572 }, { "content": "pub fn raft_log_key(region_id: u64, log_index: u64) -> Vec<u8> {\n\n let mut key = make_region_id_key(region_id, RAFT_LOG_SUFFIX, mem::size_of::<u64>());\n\n // no need check error here, can't panic;\n\n key.write_u64::<BigEndian>(log_index).unwrap();\n\n key\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 45, "score": 143719.92262438417 }, { "content": "pub trait Channel<M>: Send + Clone {\n\n fn send(&self, m: M) -> Result<()>;\n\n}\n\n\n\nimpl Channel<RaftMessage> for ServerTransport {\n\n fn send(&self, m: RaftMessage) -> Result<()> {\n\n Transport::send(self, m)\n\n }\n\n}\n\n\n\nimpl Channel<StoreMsg> for ServerRaftStoreRouter {\n\n fn send(&self, m: StoreMsg) -> Result<()> {\n\n RaftStoreRouter::try_send(self, m)\n\n }\n\n}\n\n\n", "file_path": "tests/raftstore/transport_simulate.rs", "rank": 46, "score": 143648.7887696764 }, { "content": "pub trait Filter<M>: Send + Sync {\n\n /// `before` is run before sending the messages.\n\n fn before(&self, msgs: &mut Vec<M>) -> Result<()>;\n\n /// `after` is run after sending the messages,\n\n /// so that the returned value could be changed if necessary.\n\n fn after(&self, res: Result<()>) -> Result<()> {\n\n res\n\n }\n\n}\n\n\n\npub type SendFilter = Box<Filter<RaftMessage>>;\n\npub type RecvFilter = Box<Filter<StoreMsg>>;\n\n\n\n#[derive(Clone)]\n\npub struct DropPacketFilter {\n\n rate: u32,\n\n}\n\n\n\nimpl DropPacketFilter {\n\n pub fn new(rate: u32) -> DropPacketFilter {\n", "file_path": "tests/raftstore/transport_simulate.rs", "rank": 47, "score": 143648.7887696764 }, { "content": "/// Processes a write command within a worker thread, then posts either a `WritePrepareFinished`\n\n/// message if successful or a `WritePrepareFailed` message back to the event loop.\n\nfn process_write(cid: u64, cmd: Command, ch: SendCh<Msg>, snapshot: Box<Snapshot>) {\n\n SCHED_WORKER_COUNTER_VEC.with_label_values(&[cmd.tag(), \"write\"]).inc();\n\n if let Err(e) = process_write_impl(cid, cmd, ch.clone(), snapshot.as_ref()) {\n\n if let Err(err) = ch.send(Msg::WritePrepareFailed { cid: cid, err: e }) {\n\n // Todo: if this happens, lock will hold for ever\n\n panic!(\"send WritePrepareFailed message to channel failed. cid={}, err={:?}\",\n\n cid,\n\n err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/storage/txn/scheduler.rs", "rank": 48, "score": 141948.32919566223 }, { "content": "// Decodes encoded data, returns message ID and body.\n\npub fn decode_data<T: io::Read>(r: &mut T) -> Result<(u64, Vec<u8>)> {\n\n let mut header = vec![0;MSG_HEADER_LEN];\n\n try!(r.read_exact(&mut header));\n\n let mut reader = header.as_slice();\n\n let (msg_id, payload_len) = try!(decode_msg_header(&mut reader));\n\n let mut payload = vec![0;payload_len];\n\n try!(r.read_exact(&mut payload));\n\n\n\n Ok((msg_id, payload))\n\n}\n\n\n", "file_path": "src/util/codec/rpc.rs", "rank": 49, "score": 141223.2596675921 }, { "content": "// Bootstrap the store, the DB for this store must be empty and has no data.\n\npub fn bootstrap_store(engine: &DB, cluster_id: u64, store_id: u64) -> Result<()> {\n\n let mut ident = StoreIdent::new();\n\n\n\n let mut count: u32 = 0;\n\n try!(engine.scan(keys::MIN_KEY,\n\n keys::MAX_KEY,\n\n false,\n\n &mut |_, _| {\n\n count += 1;\n\n Ok(false)\n\n }));\n\n\n\n if count > 0 {\n\n return Err(box_err!(\"store is not empty and has already had data.\"));\n\n }\n\n\n\n let ident_key = keys::store_ident_key();\n\n\n\n ident.set_cluster_id(cluster_id);\n\n ident.set_store_id(store_id);\n\n\n\n engine.put_msg(&ident_key, &ident)\n\n}\n\n\n", "file_path": "src/raftstore/store/bootstrap.rs", "rank": 50, "score": 141065.3368202021 }, { "content": "/// Processes a read command within a worker thread, then posts `ReadFinished` message back to the\n\n/// event loop.\n\nfn process_read(cid: u64, mut cmd: Command, ch: SendCh<Msg>, snapshot: Box<Snapshot>) {\n\n debug!(\"process read cmd(cid={}) in worker pool.\", cid);\n\n SCHED_WORKER_COUNTER_VEC.with_label_values(&[cmd.tag(), \"read\"]).inc();\n\n\n\n let pr = match cmd {\n\n // Gets from the snapshot.\n\n Command::Get { ref key, start_ts, .. } => {\n\n let snap_store = SnapshotStore::new(snapshot.as_ref(), start_ts);\n\n let res = snap_store.get(key);\n\n match res {\n\n Ok(val) => ProcessResult::Value { value: val },\n\n Err(e) => ProcessResult::Failed { err: StorageError::from(e) },\n\n }\n\n }\n\n // Batch gets from the snapshot.\n\n Command::BatchGet { ref keys, start_ts, .. } => {\n\n let snap_store = SnapshotStore::new(snapshot.as_ref(), start_ts);\n\n match snap_store.batch_get(keys) {\n\n Ok(results) => {\n\n let mut res = vec![];\n", "file_path": "src/storage/txn/scheduler.rs", "rank": 51, "score": 137548.9805519314 }, { "content": "pub fn bind(addr: &str) -> Result<TcpListener> {\n\n let laddr = try!(addr.parse());\n\n let listener = try!(TcpListener::bind(&laddr));\n\n Ok(listener)\n\n}\n\n\n\n// A helper structure to bundle all senders for messages to raftstore.\n\npub struct ServerChannel<T: RaftStoreRouter + 'static> {\n\n pub raft_router: T,\n\n pub snapshot_status_sender: Sender<SnapshotStatusMsg>,\n\n}\n\n\n\npub struct Server<T: RaftStoreRouter + 'static, S: StoreAddrResolver> {\n\n listener: TcpListener,\n\n // We use HashMap instead of common use mio slab to avoid token reusing.\n\n // In our raft server, a client with token 1 sends a raft command, we will\n\n // propose this command, execute it then send the response to the client with\n\n // token 1. But before the response, the client connection is broken and another\n\n // new client connects, mio slab may reuse the token 1 for it. So the subsequent\n\n // response will be sent to the new client.\n", "file_path": "src/server/server.rs", "rank": 52, "score": 137542.60078623876 }, { "content": "pub fn new_server_config(cluster_id: u64) -> ServerConfig {\n\n let store_cfg = new_store_cfg();\n\n\n\n ServerConfig {\n\n cluster_id: cluster_id,\n\n addr: \"127.0.0.1:0\".to_owned(),\n\n raft_store: store_cfg,\n\n storage: StorageConfig::default(),\n\n send_buffer_size: 64 * 1024,\n\n recv_buffer_size: 64 * 1024,\n\n ..ServerConfig::default()\n\n }\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 53, "score": 136111.49577913422 }, { "content": "pub fn new_compact_log_cmd(index: u64) -> AdminRequest {\n\n let mut cmd = AdminRequest::new();\n\n cmd.set_cmd_type(AdminCmdType::CompactLog);\n\n cmd.mut_compact_log().set_compact_index(index);\n\n cmd\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 54, "score": 136111.49577913422 }, { "content": "#[cfg(test)]\n\npub fn dummy_scheduler<T: Display>() -> Scheduler<T> {\n\n let (tx, _) = mpsc::channel();\n\n Scheduler::new(\"dummy scheduler\", AtomicUsize::new(0), tx)\n\n}\n\n\n\n/// A worker that can schedule time consuming tasks.\n\npub struct Worker<T: Display> {\n\n scheduler: Scheduler<T>,\n\n receiver: Mutex<Option<Receiver<Option<T>>>>,\n\n handle: Option<JoinHandle<()>>,\n\n}\n\n\n", "file_path": "src/util/worker/mod.rs", "rank": 55, "score": 135257.53433625997 }, { "content": "pub fn new_snap_mgr<T: Into<String>>(path: T, ch: Option<SendCh<Msg>>) -> SnapManager {\n\n Arc::new(RwLock::new(SnapManagerCore::new(path, ch)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::path::Path;\n\n use std::fs::{File, OpenOptions};\n\n use std::io::*;\n\n use std::sync::*;\n\n\n\n use tempdir::TempDir;\n\n\n\n use util::HandyRwLock;\n\n use super::*;\n\n\n\n #[test]\n\n fn test_snap_mgr() {\n\n let path = TempDir::new(\"test-snap-mgr\").unwrap();\n\n\n", "file_path": "src/raftstore/store/snap.rs", "rank": 56, "score": 134604.9833759053 }, { "content": "fn new_message(to: u64, field_type: MessageType, from: Option<u64>) -> Message {\n\n let mut m = Message::new();\n\n m.set_to(to);\n\n if let Some(id) = from {\n\n m.set_from(id);\n\n }\n\n m.set_msg_type(field_type);\n\n m\n\n}\n\n\n", "file_path": "src/raft/raft.rs", "rank": 57, "score": 134202.14253448287 }, { "content": "#[derive(PartialEq)]\n\nenum ConnType {\n\n Handshake,\n\n Rpc,\n\n Snapshot,\n\n}\n\n\n\nconst SNAPSHOT_PAYLOAD_BUF: usize = 4 * 1024 * 1024;\n\nconst DEFAULT_SEND_BUFFER_SIZE: usize = 8 * 1024;\n\nconst DEFAULT_RECV_BUFFER_SIZE: usize = 8 * 1024;\n\nconst DEFAULT_BUFFER_SHRINK_THRESHOLD: usize = 1024 * 1024;\n\n\n\npub struct Conn {\n\n pub sock: TcpStream,\n\n pub token: Token,\n\n pub interest: EventSet,\n\n\n\n conn_type: ConnType,\n\n\n\n // store id is for remote store, we only set this\n\n // when we connect to the remote store.\n", "file_path": "src/server/conn.rs", "rank": 58, "score": 133828.5039021343 }, { "content": "/// Get the log index from raft log key generated by `raft_log_key`.\n\npub fn raft_log_index(key: &[u8]) -> Result<u64> {\n\n let expect_key_len = REGION_RAFT_PREFIX_KEY.len() + mem::size_of::<u64>() +\n\n mem::size_of::<u8>() + mem::size_of::<u64>();\n\n if key.len() != expect_key_len {\n\n return Err(box_err!(\"key {} is not a valid raft log key\", escape(key)));\n\n }\n\n Ok(BigEndian::read_u64(&key[expect_key_len - mem::size_of::<u64>()..]))\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 59, "score": 132462.72930993498 }, { "content": "pub fn region_state_key(region_id: u64) -> Vec<u8> {\n\n make_region_meta_key(region_id, REGION_STATE_SUFFIX)\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 60, "score": 130419.6385952044 }, { "content": "pub fn region_raft_prefix(region_id: u64) -> Vec<u8> {\n\n let mut key = Vec::with_capacity(REGION_RAFT_PREFIX_KEY.len() + mem::size_of::<u64>());\n\n key.extend_from_slice(REGION_RAFT_PREFIX_KEY);\n\n // no need check error here, can't panic;\n\n key.write_u64::<BigEndian>(region_id).unwrap();\n\n key\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 61, "score": 130419.6385952044 }, { "content": "pub fn raft_state_key(region_id: u64) -> Vec<u8> {\n\n make_region_id_key(region_id, RAFT_STATE_SUFFIX, 0)\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 62, "score": 130419.6385952044 }, { "content": "#[cfg(not(unix))]\n\npub fn check_max_open_fds(expect: u64) -> Result<(), ConfigError> {\n\n Ok(())\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\nmod check_kernel {\n\n use std::fs;\n\n use std::io::Read;\n\n\n\n use super::ConfigError;\n\n\n\n // pub for tests.\n\n pub type Checker = Fn(i64, i64) -> bool;\n\n\n\n // pub for tests.\n\n pub fn check_kernel_params(param_path: &str,\n\n expect: i64,\n\n checker: Box<Checker>)\n\n -> Result<(), ConfigError> {\n\n let mut buffer = String::new();\n", "file_path": "src/util/config.rs", "rank": 63, "score": 130419.6385952044 }, { "content": "pub fn region_meta_prefix(region_id: u64) -> Vec<u8> {\n\n let mut key = Vec::with_capacity(REGION_META_PREFIX_KEY.len() + mem::size_of::<u64>());\n\n key.extend_from_slice(REGION_META_PREFIX_KEY);\n\n key.write_u64::<BigEndian>(region_id).unwrap();\n\n key\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 64, "score": 130419.6385952044 }, { "content": "pub fn raft_log_prefix(region_id: u64) -> Vec<u8> {\n\n make_region_id_key(region_id, RAFT_LOG_SUFFIX, 0)\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 65, "score": 130419.6385952044 }, { "content": "pub fn apply_state_key(region_id: u64) -> Vec<u8> {\n\n make_region_id_key(region_id, APPLY_STATE_SUFFIX, 0)\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 66, "score": 130419.6385952044 }, { "content": "// Decodes encoded message, returns message ID.\n\npub fn decode_msg<T: io::Read, M: protobuf::Message>(r: &mut T, m: &mut M) -> Result<u64> {\n\n let (message_id, payload) = try!(decode_data(r));\n\n let mut reader = payload.as_slice();\n\n try!(decode_body(&mut reader, m));\n\n\n\n Ok(message_id)\n\n}\n\n\n", "file_path": "src/util/codec/rpc.rs", "rank": 67, "score": 127539.74014900035 }, { "content": "// Clear first region meta.\n\npub fn clear_region(engine: &DB, region_id: u64) -> Result<()> {\n\n try!(engine.delete(&keys::region_state_key(region_id)));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/raftstore/store/bootstrap.rs", "rank": 68, "score": 127156.23577780704 }, { "content": "// Calculate the quorum of a Raft cluster with the specified total nodes.\n\npub fn quorum(total: usize) -> usize {\n\n total / 2 + 1\n\n}\n\n\n\nimpl<T: Storage> Raft<T> {\n\n pub fn new(c: &Config, store: T) -> Raft<T> {\n\n c.validate().expect(\"configuration is invalid\");\n\n let rs = store.initial_state().expect(\"\");\n\n let raft_log = RaftLog::new(store, c.tag.clone());\n\n let mut peers: &[u64] = &c.peers;\n\n if !rs.conf_state.get_nodes().is_empty() {\n\n if !peers.is_empty() {\n\n // TODO: the peers argument is always nil except in\n\n // tests; the argument should be removed and these tests should be\n\n // updated to specify their nodes through a snap\n\n panic!(\"{} cannot specify both new(peers) and ConfState.Nodes\",\n\n c.tag)\n\n }\n\n peers = rs.conf_state.get_nodes();\n\n }\n", "file_path": "src/raft/raft.rs", "rank": 69, "score": 125773.20524910853 }, { "content": "// Decode region meta key, return the region key and meta suffix type.\n\npub fn decode_region_meta_key(key: &[u8]) -> Result<(u64, u8)> {\n\n if REGION_META_PREFIX_KEY.len() + mem::size_of::<u64>() + mem::size_of::<u8>() != key.len() {\n\n return Err(box_err!(\"invalid region meta key length for key {}\", escape(key)));\n\n }\n\n\n\n if !key.starts_with(REGION_META_PREFIX_KEY) {\n\n return Err(box_err!(\"invalid region meta prefix for key {}\", escape(key)));\n\n }\n\n\n\n let region_id =\n\n BigEndian::read_u64(&key[REGION_META_PREFIX_KEY.len()..REGION_META_PREFIX_KEY.len() +\n\n mem::size_of::<u64>()]);\n\n\n\n Ok((region_id, key[key.len() - 1]))\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 70, "score": 125233.19436403742 }, { "content": "type SubTmp = (usize, usize, u8);\n\n\n\n/// calculate the carry for lhs - rhs, returns the carry and needed temporary results for\n\n/// begining a subtraction.\n\n///\n\n/// The new carry can be None if lhs is equals to rhs.\n", "file_path": "src/util/codec/mysql/decimal.rs", "rank": 71, "score": 124645.02698178389 }, { "content": "#[inline]\n\nfn parse_data_at<T: Message + MessageStatic>(data: &[u8], index: u64, tag: &str) -> T {\n\n protobuf::parse_from_bytes::<T>(data).unwrap_or_else(|e| {\n\n panic!(\"{} data is corrupted at {}: {:?}\", tag, index, e);\n\n })\n\n}\n\n\n\npub struct ConsistencyState {\n\n pub last_check_time: Instant,\n\n // (computed_result_or_to_be_verified, index, hash)\n\n pub index: u64,\n\n pub hash: Vec<u8>,\n\n}\n\n\n\npub struct Peer {\n\n engine: Arc<DB>,\n\n peer_cache: Rc<RefCell<HashMap<u64, metapb::Peer>>>,\n\n pub peer: metapb::Peer,\n\n region_id: u64,\n\n pub raft_group: RawNode<PeerStorage>,\n\n pending_cmds: PendingCmdQueue,\n", "file_path": "src/raftstore/store/peer.rs", "rank": 72, "score": 124608.37902197833 }, { "content": "pub fn bind_term(resp: &mut RaftCmdResponse, term: u64) {\n\n if term == 0 {\n\n return;\n\n }\n\n\n\n resp.mut_header().set_current_term(term);\n\n}\n\n\n", "file_path": "src/raftstore/store/cmd_resp.rs", "rank": 73, "score": 123405.10569494739 }, { "content": "// StoreAddrResolver resolves the store address.\n\npub trait StoreAddrResolver {\n\n // Resolve resolves the store address asynchronously.\n\n fn resolve(&self, store_id: u64, cb: Callback) -> Result<()>;\n\n}\n\n\n", "file_path": "src/server/resolve.rs", "rank": 74, "score": 122474.0741165567 }, { "content": "pub fn init_log<W: LogWriter + Sync + Send + 'static>(writer: W,\n\n level: LogLevelFilter)\n\n -> Result<(), SetLoggerError> {\n\n log::set_logger(|filter| {\n\n filter.set(level);\n\n Box::new(Logger {\n\n level: level,\n\n writer: writer,\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/util/logger.rs", "rank": 75, "score": 120133.2860872927 }, { "content": "pub trait MsgSender {\n\n fn send(&self, msg: Msg) -> raftstore::Result<()>;\n\n // same as send, but with retry.\n\n fn try_send(&self, msg: Msg) -> raftstore::Result<()>;\n\n}\n\n\n\nimpl MsgSender for SendCh<Msg> {\n\n fn send(&self, msg: Msg) -> raftstore::Result<()> {\n\n SendCh::send(self, msg).map_err(|e| box_err!(\"{:?}\", e))\n\n }\n\n\n\n fn try_send(&self, msg: Msg) -> raftstore::Result<()> {\n\n SendCh::try_send(self, msg).map_err(|e| box_err!(\"{:?}\", e))\n\n }\n\n}\n\n\n\nimpl MsgSender for Sender<Msg> {\n\n fn send(&self, msg: Msg) -> raftstore::Result<()> {\n\n Sender::send(self, msg).unwrap();\n\n Ok(())\n", "file_path": "src/raftstore/store/worker/mod.rs", "rank": 76, "score": 119597.99235674931 }, { "content": "// returns the maximum encoded bytes size.\n\npub fn max_encoded_bytes_size(n: usize) -> usize {\n\n (n / ENC_GROUP_SIZE + 1) * (ENC_GROUP_SIZE + 1)\n\n}\n\n\n", "file_path": "src/util/codec/bytes.rs", "rank": 77, "score": 118726.41433150333 }, { "content": "/// Delivers the process result of a command to the storage callback.\n\nfn execute_callback(callback: StorageCb, pr: ProcessResult) {\n\n match callback {\n\n StorageCb::Boolean(cb) => {\n\n match pr {\n\n ProcessResult::Res => cb(Ok(())),\n\n ProcessResult::Failed { err } => cb(Err(err)),\n\n _ => panic!(\"process result mismatch\"),\n\n }\n\n }\n\n StorageCb::Booleans(cb) => {\n\n match pr {\n\n ProcessResult::MultiRes { results } => cb(Ok(results)),\n\n ProcessResult::Failed { err } => cb(Err(err)),\n\n _ => panic!(\"process result mismatch\"),\n\n }\n\n }\n\n StorageCb::SingleValue(cb) => {\n\n match pr {\n\n ProcessResult::Value { value } => cb(Ok(value)),\n\n ProcessResult::Failed { err } => cb(Err(err)),\n", "file_path": "src/storage/txn/scheduler.rs", "rank": 78, "score": 118326.93706152843 }, { "content": "pub fn new_pd_change_peer(change_type: ConfChangeType,\n\n peer: metapb::Peer)\n\n -> RegionHeartbeatResponse {\n\n let mut change_peer = ChangePeer::new();\n\n change_peer.set_change_type(change_type);\n\n change_peer.set_peer(peer);\n\n\n\n let mut resp = RegionHeartbeatResponse::new();\n\n resp.set_change_peer(change_peer);\n\n resp\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 79, "score": 116624.72298995062 }, { "content": "pub fn err_resp(e: Error, uuid: Uuid, term: u64) -> RaftCmdResponse {\n\n let mut resp = new_error(e);\n\n bind_term(&mut resp, term);\n\n bind_uuid(&mut resp, uuid);\n\n resp\n\n}\n\n\n", "file_path": "src/raftstore/store/cmd_resp.rs", "rank": 80, "score": 116087.65052289618 }, { "content": "fn process_write_impl(cid: u64,\n\n mut cmd: Command,\n\n ch: SendCh<Msg>,\n\n snapshot: &Snapshot)\n\n -> Result<()> {\n\n let (pr, modifies) = match cmd {\n\n Command::Prewrite { ref mutations, ref primary, start_ts, ref options, .. } => {\n\n let mut txn = MvccTxn::new(snapshot, start_ts, None);\n\n let mut results = vec![];\n\n for m in mutations {\n\n match txn.prewrite(m.clone(), primary, options) {\n\n Ok(_) => results.push(Ok(())),\n\n e @ Err(MvccError::KeyIsLocked { .. }) => results.push(e.map_err(Error::from)),\n\n Err(e) => return Err(Error::from(e)),\n\n }\n\n }\n\n let res = results.drain(..).map(|x| x.map_err(StorageError::from)).collect();\n\n let pr = ProcessResult::MultiRes { results: res };\n\n (pr, txn.modifies())\n\n }\n", "file_path": "src/storage/txn/scheduler.rs", "rank": 81, "score": 115516.24003519287 }, { "content": "/// `run_prometheus` runs a background prometheus client.\n\npub fn run_prometheus(interval: Duration,\n\n address: &str,\n\n job: &str)\n\n -> Option<thread::JoinHandle<()>> {\n\n if interval == Duration::from_secs(0) {\n\n return None;\n\n }\n\n\n\n let job = job.to_owned();\n\n let address = address.to_owned();\n\n Some(thread::spawn(move || {\n\n loop {\n\n let metric_familys = prometheus::gather();\n\n\n\n let res = prometheus::push_metrics(&job,\n\n prometheus::hostname_grouping_key(),\n\n &address,\n\n metric_familys);\n\n if let Err(e) = res {\n\n error!(\"fail to push metrics: {}\", e);\n", "file_path": "src/util/mod.rs", "rank": 82, "score": 114702.45406516409 }, { "content": "pub fn find_peer(region: &metapb::Region, store_id: u64) -> Option<&metapb::Peer> {\n\n for peer in region.get_peers() {\n\n if peer.get_store_id() == store_id {\n\n return Some(peer);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/raftstore/store/util.rs", "rank": 83, "score": 113674.20058820205 }, { "content": "pub fn limit_size<T: Message + Clone>(entries: &mut Vec<T>, max: u64) {\n\n if entries.is_empty() {\n\n return;\n\n }\n\n\n\n let mut size = Message::compute_size(&entries[0]) as u64;\n\n let mut limit = 1usize;\n\n while limit < entries.len() {\n\n size += Message::compute_size(&entries[limit]) as u64;\n\n if size > max {\n\n break;\n\n }\n\n limit += 1;\n\n }\n\n entries.truncate(limit);\n\n}\n\n\n\npub struct DefaultRng {\n\n rng: ThreadRng,\n\n}\n", "file_path": "src/util/mod.rs", "rank": 84, "score": 113197.0620212486 }, { "content": "fn tm_to_secs(t: Tm) -> u64 {\n\n t.tm_hour as u64 * SECS_PER_HOUR + t.tm_min as u64 * SECS_PER_MINUTE + t.tm_sec as u64\n\n}\n\n\n\n/// `Duration` is the type for `MySQL` time type.\n\n#[derive(Debug, Clone)]\n\npub struct Duration {\n\n pub dur: StdDuration,\n\n neg: bool,\n\n // Fsp is short for Fractional Seconds Precision.\n\n // See http://dev.mysql.com/doc/refman/5.7/en/fractional-seconds.html\n\n fsp: u8,\n\n}\n\n\n\nimpl Duration {\n\n pub fn zero() -> Duration {\n\n Duration {\n\n dur: StdDuration::from_secs(0),\n\n neg: false,\n\n fsp: 0,\n", "file_path": "src/util/codec/mysql/duration.rs", "rank": 85, "score": 113125.89279219467 }, { "content": "/// Creates a new key from raw bytes.\n\npub fn make_key(k: &[u8]) -> Key {\n\n Key::from_raw(k)\n\n}\n", "file_path": "src/storage/types.rs", "rank": 86, "score": 112647.84758164002 }, { "content": "pub fn conf_change_type_str(conf_type: &eraftpb::ConfChangeType) -> &'static str {\n\n match *conf_type {\n\n ConfChangeType::AddNode => STR_CONF_CHANGE_ADD_NODE,\n\n ConfChangeType::RemoveNode => STR_CONF_CHANGE_REMOVE_NODE,\n\n }\n\n}\n\n\n", "file_path": "src/raftstore/store/util.rs", "rank": 87, "score": 111662.03327622937 }, { "content": "// Create a base request.\n\npub fn new_base_request(region_id: u64, epoch: RegionEpoch, read_quorum: bool) -> RaftCmdRequest {\n\n let mut req = RaftCmdRequest::new();\n\n req.mut_header().set_region_id(region_id);\n\n req.mut_header().set_region_epoch(epoch);\n\n req.mut_header().set_uuid(Uuid::new_v4().as_bytes().to_vec());\n\n req.mut_header().set_read_quorum(read_quorum);\n\n req\n\n}\n\n\n", "file_path": "tests/raftstore/util.rs", "rank": 88, "score": 111420.3195720783 }, { "content": "fn new_split_check_result(region_id: u64, epoch: RegionEpoch, split_key: Vec<u8>) -> Msg {\n\n Msg::SplitCheckResult {\n\n region_id: region_id,\n\n epoch: epoch,\n\n split_key: split_key,\n\n }\n\n}\n", "file_path": "src/raftstore/store/worker/split_check.rs", "rank": 89, "score": 109916.45665796267 }, { "content": "pub fn remove_peer(region: &mut metapb::Region, store_id: u64) -> Option<metapb::Peer> {\n\n region.get_peers()\n\n .iter()\n\n .position(|x| x.get_store_id() == store_id)\n\n .map(|i| region.mut_peers().remove(i))\n\n}\n\n\n", "file_path": "src/raftstore/store/util.rs", "rank": 90, "score": 109834.78041954231 }, { "content": "pub fn create_event_loop(notify_capacity: usize,\n\n messages_per_tick: usize)\n\n -> Result<EventLoop<Scheduler>> {\n\n let mut builder = EventLoopBuilder::new();\n\n builder.notify_capacity(notify_capacity);\n\n builder.messages_per_tick(messages_per_tick);\n\n let el = try!(builder.build());\n\n Ok(el)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::sync::mpsc::{channel, Sender};\n\n use kvproto::kvrpcpb::Context;\n\n\n\n fn expect_get_none(done: Sender<i32>) -> Callback<Option<Value>> {\n\n Box::new(move |x: Result<Option<Value>>| {\n\n assert_eq!(x.unwrap(), None);\n\n done.send(1).unwrap();\n", "file_path": "src/storage/mod.rs", "rank": 91, "score": 109601.49903008637 }, { "content": "// When we bootstrap the region or handling split new region, we must\n\n// call this to initialize region local state first.\n\npub fn write_initial_state<T: Mutable>(engine: &DB, w: &T, region_id: u64) -> Result<()> {\n\n let mut raft_state = RaftLocalState::new();\n\n raft_state.set_last_index(RAFT_INIT_LOG_INDEX);\n\n raft_state.mut_hard_state().set_term(RAFT_INIT_LOG_TERM);\n\n raft_state.mut_hard_state().set_commit(RAFT_INIT_LOG_INDEX);\n\n\n\n let mut apply_state = RaftApplyState::new();\n\n apply_state.set_applied_index(RAFT_INIT_LOG_INDEX);\n\n apply_state.mut_truncated_state().set_index(RAFT_INIT_LOG_INDEX);\n\n apply_state.mut_truncated_state().set_term(RAFT_INIT_LOG_TERM);\n\n\n\n let raft_cf = try!(rocksdb::get_cf_handle(engine, CF_RAFT));\n\n try!(w.put_msg_cf(raft_cf, &keys::raft_state_key(region_id), &raft_state));\n\n try!(w.put_msg_cf(raft_cf, &keys::apply_state_key(region_id), &apply_state));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/raftstore/store/peer_storage.rs", "rank": 92, "score": 108281.04039527851 }, { "content": "pub fn do_snapshot(mgr: SnapManager, snap: &DbSnapshot, region_id: u64) -> raft::Result<Snapshot> {\n\n debug!(\"[region {}] begin to generate a snapshot\", region_id);\n\n\n\n let apply_state: RaftApplyState =\n\n match try!(snap.get_msg_cf(CF_RAFT, &keys::apply_state_key(region_id))) {\n\n None => return Err(box_err!(\"could not load raft state of region {}\", region_id)),\n\n Some(state) => state,\n\n };\n\n\n\n let idx = apply_state.get_applied_index();\n\n let term = if idx == apply_state.get_truncated_state().get_index() {\n\n apply_state.get_truncated_state().get_term()\n\n } else {\n\n match try!(snap.get_msg_cf::<Entry>(CF_RAFT, &keys::raft_log_key(region_id, idx))) {\n\n None => return Err(box_err!(\"entry {} of {} not found.\", idx, region_id)),\n\n Some(entry) => entry.get_term(),\n\n }\n\n };\n\n\n\n let key = SnapKey::new(region_id, term, idx);\n", "file_path": "src/raftstore/store/peer_storage.rs", "rank": 93, "score": 106803.86368747358 }, { "content": "// `cut_row` cut encoded row into byte slices and return interested columns' byte slice.\n\n// Row layout: colID1, value1, colID2, value2, .....\n\npub fn cut_row<'a>(mut data: &'a [u8], cols: &HashSet<i64>) -> Result<ValDict<'a>> {\n\n // hack: HashMap will still allocate memeory when capacity is 0, need to use new instead.\n\n if cols.is_empty() {\n\n return Ok(HashMap::new());\n\n }\n\n let mut res = HashMap::with_capacity(cols.len());\n\n if data.is_empty() || data.len() == 1 && data[0] == datum::NIL_FLAG {\n\n return Ok(res);\n\n }\n\n while !data.is_empty() && res.len() < cols.len() {\n\n let id = try!(data.decode_datum()).i64();\n\n let (val, rem) = try!(datum::split_datum(data, false));\n\n if cols.contains(&id) {\n\n res.insert(id, val);\n\n }\n\n data = rem;\n\n }\n\n Ok(res)\n\n}\n\n\n\npub type ValDict<'a> = HashMap<i64, &'a [u8]>;\n\n\n", "file_path": "src/util/codec/table.rs", "rank": 94, "score": 106389.13765757711 }, { "content": "pub fn validate_data_key(key: &[u8]) -> bool {\n\n key.starts_with(DATA_PREFIX_KEY)\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 95, "score": 105285.67591221364 }, { "content": "// `create_mem_buf` creates the buffer with fixed capacity s.\n\npub fn create_mem_buf(s: usize) -> MutByteBuf {\n\n unsafe {\n\n ByteBuf::from_mem_ref(alloc::heap(s.next_power_of_two()), s as u32, 0, s as u32).flip()\n\n }\n\n}\n\n\n\n/// `PipeBuffer` is useful when you want to move data from `Write` to a `Read` or vice versa.\n\npub struct PipeBuffer {\n\n // the index of the first byte of written data.\n\n start: usize,\n\n // the index of buf that new data should be written in.\n\n end: usize,\n\n buf: RawVec<u8>,\n\n}\n\n\n\nimpl PipeBuffer {\n\n pub fn new(capacity: usize) -> PipeBuffer {\n\n PipeBuffer {\n\n start: 0,\n\n end: 0,\n", "file_path": "src/util/buf.rs", "rank": 96, "score": 105263.85607330712 }, { "content": "pub fn data_key(key: &[u8]) -> Vec<u8> {\n\n let mut v = Vec::with_capacity(DATA_PREFIX_KEY.len() + key.len());\n\n v.extend_from_slice(DATA_PREFIX_KEY);\n\n v.extend_from_slice(key);\n\n v\n\n}\n\n\n", "file_path": "src/raftstore/store/keys.rs", "rank": 97, "score": 103483.19667771237 }, { "content": "fn test_recv_msg_request_vote_for_type(msg_type: MessageType) {\n\n let mut tests = vec![\n\n (StateRole::Follower, 0, 0, INVALID_ID, true),\n\n (StateRole::Follower, 0, 1, INVALID_ID, true),\n\n (StateRole::Follower, 0, 2, INVALID_ID, true),\n\n (StateRole::Follower, 0, 3, INVALID_ID, false),\n\n\n\n (StateRole::Follower, 1, 0, INVALID_ID, true),\n\n (StateRole::Follower, 1, 1, INVALID_ID, true),\n\n (StateRole::Follower, 1, 2, INVALID_ID, true),\n\n (StateRole::Follower, 1, 3, INVALID_ID, false),\n\n\n\n (StateRole::Follower, 2, 0, INVALID_ID, true),\n\n (StateRole::Follower, 2, 1, INVALID_ID, true),\n\n (StateRole::Follower, 2, 2, INVALID_ID, false),\n\n (StateRole::Follower, 2, 3, INVALID_ID, false),\n\n\n\n (StateRole::Follower, 3, 0, INVALID_ID, true),\n\n (StateRole::Follower, 3, 1, INVALID_ID, true),\n\n (StateRole::Follower, 3, 2, INVALID_ID, false),\n", "file_path": "tests/test_raft.rs", "rank": 98, "score": 103334.57149490282 }, { "content": "pub use self::node::{Node, create_raft_storage};\n\npub use self::resolve::{StoreAddrResolver, PdStoreAddrResolver};\n\n\n\npub type OnResponse = Box<FnBox(msgpb::Message) + Send>;\n\n\n\npub struct ConnData {\n\n msg_id: u64,\n\n msg: msgpb::Message,\n\n}\n\n\n\nimpl ConnData {\n\n pub fn new(msg_id: u64, msg: msgpb::Message) -> ConnData {\n\n ConnData {\n\n msg_id: msg_id,\n\n msg: msg,\n\n }\n\n }\n\n\n\n pub fn is_snapshot(&self) -> bool {\n\n if !self.msg.has_raft() {\n", "file_path": "src/server/mod.rs", "rank": 99, "score": 39.32218130243474 } ]
Rust
aml/src/test_utils.rs
Dentosal/acpi
2273964aef430a51afee8735526e7323f597d2ca
use crate::{parser::Propagate, AmlContext, AmlValue, Handler}; use alloc::boxed::Box; struct TestHandler; impl Handler for TestHandler { fn read_u8(&self, _address: usize) -> u8 { unimplemented!() } fn read_u16(&self, _address: usize) -> u16 { unimplemented!() } fn read_u32(&self, _address: usize) -> u32 { unimplemented!() } fn read_u64(&self, _address: usize) -> u64 { unimplemented!() } fn write_u8(&mut self, _address: usize, _value: u8) { unimplemented!() } fn write_u16(&mut self, _address: usize, _value: u16) { unimplemented!() } fn write_u32(&mut self, _address: usize, _value: u32) { unimplemented!() } fn write_u64(&mut self, _address: usize, _value: u64) { unimplemented!() } fn read_io_u8(&self, _port: u16) -> u8 { unimplemented!() } fn read_io_u16(&self, _port: u16) -> u16 { unimplemented!() } fn read_io_u32(&self, _port: u16) -> u32 { unimplemented!() } fn write_io_u8(&self, _port: u16, _value: u8) { unimplemented!() } fn write_io_u16(&self, _port: u16, _value: u16) { unimplemented!() } fn write_io_u32(&self, _port: u16, _value: u32) { unimplemented!() } fn read_pci_u8(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16) -> u8 { unimplemented!() } fn read_pci_u16(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16) -> u16 { unimplemented!() } fn read_pci_u32(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16) -> u32 { unimplemented!() } fn write_pci_u8(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16, _value: u8) { unimplemented!() } fn write_pci_u16(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16, _value: u16) { unimplemented!() } fn write_pci_u32(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16, _value: u32) { unimplemented!() } } pub(crate) fn make_test_context() -> AmlContext { AmlContext::new(Box::new(TestHandler), crate::DebugVerbosity::None) } pub(crate) macro check_err($parse: expr, $error: pat, $remains: expr) { match $parse { Ok((remains, _, result)) => panic!("Expected Err, got {:#?}. Remaining = {:#x?}", result, remains), Err((remains, _, Propagate::Err($error))) if *remains == *$remains => (), Err((remains, _, Propagate::Err($error))) => { panic!("Correct error, incorrect stream returned: {:#x?}", remains) } Err((_, _, err)) => panic!("Got wrong error: {:?}", err), } } pub(crate) macro check_ok($parse: expr, $expected: expr, $remains: expr) { match $parse { Ok((remains, _, ref result)) if remains == *$remains && result == &$expected => (), Ok((remains, _, ref result)) if result == &$expected => { panic!("Correct result, incorrect slice returned: {:x?}", remains) } Ok((_, _, ref result)) => panic!("Successfully parsed Ok, but it was wrong: {:#?}", result), Err((_, _, err)) => panic!("Expected Ok, got {:#?}", err), } } pub(crate) macro check_ok_value($parse: expr, $expected: expr, $remains: expr) { match $parse { Ok((remains, _, ref result)) if remains == *$remains && crudely_cmp_values(result, &$expected) => (), Ok((remains, _, ref result)) if crudely_cmp_values(result, &$expected) => { panic!("Correct result, incorrect slice returned: {:x?}", remains) } Ok((_, _, ref result)) => panic!("Successfully parsed Ok, but it was wrong: {:#?}", result), Err((_, _, err)) => panic!("Expected Ok, got {:#?}", err), } } pub(crate) fn crudely_cmp_values(a: &AmlValue, b: &AmlValue) -> bool { use crate::value::MethodCode; match a { AmlValue::Boolean(a) => match b { AmlValue::Boolean(b) => a == b, _ => false, }, AmlValue::Integer(a) => match b { AmlValue::Integer(b) => a == b, _ => false, }, AmlValue::String(ref a) => match b { AmlValue::String(ref b) => a == b, _ => false, }, AmlValue::OpRegion { region, offset, length, parent_device } => match b { AmlValue::OpRegion { region: b_region, offset: b_offset, length: b_length, parent_device: b_parent_device, } => { region == b_region && offset == b_offset && length == b_length && parent_device == b_parent_device } _ => false, }, AmlValue::Field { region, flags, offset, length } => match b { AmlValue::Field { region: b_region, flags: b_flags, offset: b_offset, length: b_length } => { region == b_region && flags == b_flags && offset == b_offset && length == b_length } _ => false, }, AmlValue::Device => match b { AmlValue::Device => true, _ => false, }, AmlValue::Method { flags, code } => match b { AmlValue::Method { flags: b_flags, code: b_code } => { if flags != b_flags { return false; } match (code, b_code) { (MethodCode::Aml(a), MethodCode::Aml(b)) => a == b, (MethodCode::Aml(_), MethodCode::Native(_)) => false, (MethodCode::Native(_), MethodCode::Aml(_)) => false, (MethodCode::Native(_), MethodCode::Native(_)) => panic!("Can't compare two native methods"), } } _ => false, }, AmlValue::Buffer(a) => match b { AmlValue::Buffer(b) => *a.lock() == *b.lock(), _ => false, }, AmlValue::BufferField { buffer_data, offset, length } => match b { AmlValue::BufferField { buffer_data: b_buffer_data, offset: b_offset, length: b_length } => { alloc::sync::Arc::as_ptr(buffer_data) == alloc::sync::Arc::as_ptr(b_buffer_data) && offset == b_offset && length == b_length } _ => false, }, AmlValue::Processor { id, pblk_address, pblk_len } => match b { AmlValue::Processor { id: b_id, pblk_address: b_pblk_address, pblk_len: b_pblk_len } => { id == b_id && pblk_address == b_pblk_address && pblk_len == b_pblk_len } _ => false, }, AmlValue::Mutex { sync_level } => match b { AmlValue::Mutex { sync_level: b_sync_level } => sync_level == b_sync_level, _ => false, }, AmlValue::Package(a) => match b { AmlValue::Package(b) => { for (a, b) in a.iter().zip(b) { if crudely_cmp_values(a, b) == false { return false; } } true } _ => false, }, AmlValue::PowerResource { system_level, resource_order } => match b { AmlValue::PowerResource { system_level: b_system_level, resource_order: b_resource_order } => { system_level == b_system_level && resource_order == b_resource_order } _ => false, }, AmlValue::ThermalZone => match b { AmlValue::ThermalZone => true, _ => false, }, } }
use crate::{parser::Propagate, AmlContext, AmlValue, Handler}; use alloc::boxed::Box; struct TestHandler; impl Handler for TestHandler { fn read_u8(&self, _address: usize) -> u8 { unimplemented!() } fn read_u16(&self, _address: usize) -> u16 { unimplemented!() } fn read_u32(&self, _address: usize) -> u32 { unimplemented!() } fn read_u64(&self, _address: usize) -> u64 { unimplemented!() } fn write_u8(&mut self, _address: usize, _value: u8) { unimplemented!() } fn write_u16(&mut self, _address: usize, _value: u16) { unimplemented!() } fn write_u32(&mut self, _address: usize, _value: u32) { unimplemented!() } fn write_u64(&mut self, _address: usize, _value: u64) { unimplemented!() } fn read_io_u8(&self, _port: u16) -> u8 { unimplemented!() } fn read_io_u16(&self, _port: u16) -> u16 { unimplemented!() } fn read_io_u32(&self, _port: u16) -> u32 { unimplemented!() } fn write_io_u8(&self, _port: u16, _value: u8) { unimplemented!() } fn write_io_u16(&self, _port: u16, _value: u16) { unimplemented!() } fn write_io_u32(&self, _port: u16, _value: u32) { unimplemented!() } fn read_pci_u8(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16) -> u8 { unimplemented!() } fn read_pci_u16(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16) -> u16 { unimplemented!() } fn read_pci_u32(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16) -> u32 { unimplemented!() } fn write_pci_u8(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16, _value: u8) { unimplemented!() } fn write_pci_u16(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16, _value: u16) { unimplemented!() } fn write_pci_u32(&self, _segment: u16, _bus: u8, device: u8, _function: u8, _offset: u16, _value: u32) { unimplemented!() } } pub(crate) fn make_test_context() -> AmlContext { AmlContext::new(Box::new(TestHandler), crate::DebugVerbosity::None) } pub(crate) macro check_err($parse: expr, $error: pat, $remains: expr) { match $parse { Ok((remains, _, result)) => panic!("Expected Err, got {:#?}. Remaining = {:#x?}", result, remains), Err((remains, _, Propagate::Err($error))) if *remains == *$remains => (), Err((remains, _, Propagate::Err($error))) => { panic!("Correct error, incorrect stream returned: {:#x?}", remains) } Err((_, _, err)) => panic!("Got wrong error: {:?}", err), } } pub(crate) macro check_ok($parse: expr, $expected: expr, $remains: expr) { match $parse { Ok((remains, _, ref result)) if remains == *$remains && result == &$expected => (), Ok((remains, _, ref result)) if result == &$expected => { panic!("Correct result, incorrect slice returned: {:x?}", remains) } Ok((_, _, ref result)) => panic!("Successfully parsed Ok, but it was wrong: {:#?}", result), Err((_, _, err)) => panic!("Expected Ok, got {:#?}", err), } } pub(crate) macro check_ok_value($parse: expr, $expected: expr, $remains: expr) { match $parse { Ok((remains, _, ref result)) if remains == *$remains && crudely_cmp_values(result, &$expected) => (), Ok((remains, _, ref result)) if crudely_cmp_values(result, &$expected) => { panic!("Correct result, incorrect slice returned: {:x?}", remains) } Ok((_, _, ref result)) => panic!("Successfully parsed Ok, but it was wrong: {:#?}", result), Err((_, _, err)) => panic!("Expe
_parent_device } _ => false, }, AmlValue::Field { region, flags, offset, length } => match b { AmlValue::Field { region: b_region, flags: b_flags, offset: b_offset, length: b_length } => { region == b_region && flags == b_flags && offset == b_offset && length == b_length } _ => false, }, AmlValue::Device => match b { AmlValue::Device => true, _ => false, }, AmlValue::Method { flags, code } => match b { AmlValue::Method { flags: b_flags, code: b_code } => { if flags != b_flags { return false; } match (code, b_code) { (MethodCode::Aml(a), MethodCode::Aml(b)) => a == b, (MethodCode::Aml(_), MethodCode::Native(_)) => false, (MethodCode::Native(_), MethodCode::Aml(_)) => false, (MethodCode::Native(_), MethodCode::Native(_)) => panic!("Can't compare two native methods"), } } _ => false, }, AmlValue::Buffer(a) => match b { AmlValue::Buffer(b) => *a.lock() == *b.lock(), _ => false, }, AmlValue::BufferField { buffer_data, offset, length } => match b { AmlValue::BufferField { buffer_data: b_buffer_data, offset: b_offset, length: b_length } => { alloc::sync::Arc::as_ptr(buffer_data) == alloc::sync::Arc::as_ptr(b_buffer_data) && offset == b_offset && length == b_length } _ => false, }, AmlValue::Processor { id, pblk_address, pblk_len } => match b { AmlValue::Processor { id: b_id, pblk_address: b_pblk_address, pblk_len: b_pblk_len } => { id == b_id && pblk_address == b_pblk_address && pblk_len == b_pblk_len } _ => false, }, AmlValue::Mutex { sync_level } => match b { AmlValue::Mutex { sync_level: b_sync_level } => sync_level == b_sync_level, _ => false, }, AmlValue::Package(a) => match b { AmlValue::Package(b) => { for (a, b) in a.iter().zip(b) { if crudely_cmp_values(a, b) == false { return false; } } true } _ => false, }, AmlValue::PowerResource { system_level, resource_order } => match b { AmlValue::PowerResource { system_level: b_system_level, resource_order: b_resource_order } => { system_level == b_system_level && resource_order == b_resource_order } _ => false, }, AmlValue::ThermalZone => match b { AmlValue::ThermalZone => true, _ => false, }, } }
cted Ok, got {:#?}", err), } } pub(crate) fn crudely_cmp_values(a: &AmlValue, b: &AmlValue) -> bool { use crate::value::MethodCode; match a { AmlValue::Boolean(a) => match b { AmlValue::Boolean(b) => a == b, _ => false, }, AmlValue::Integer(a) => match b { AmlValue::Integer(b) => a == b, _ => false, }, AmlValue::String(ref a) => match b { AmlValue::String(ref b) => a == b, _ => false, }, AmlValue::OpRegion { region, offset, length, parent_device } => match b { AmlValue::OpRegion { region: b_region, offset: b_offset, length: b_length, parent_device: b_parent_device, } => { region == b_region && offset == b_offset && length == b_length && parent_device == b
random
[ { "content": "pub fn take_n<'a, 'c>(n: u32) -> impl Parser<'a, 'c, &'a [u8]>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context| {\n\n if (input.len() as u32) < n {\n\n return Err((input, context, Propagate::Err(AmlError::UnexpectedEndOfStream)));\n\n }\n\n\n\n let (result, new_input) = input.split_at(n as usize);\n\n Ok((new_input, context, result))\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 0, "score": 172328.03961998818 }, { "content": "/// Parse a `ResourceDescriptor`. Returns `AmlError::IncompatibleValueConversion` if the passed value is not a\n\n/// `Buffer`.\n\nfn resource_descriptor(bytes: &[u8]) -> Result<(Option<Resource>, &[u8]), AmlError> {\n\n /*\n\n * If bit 7 of Byte 0 is set, it's a large descriptor. If not, it's a small descriptor.\n\n */\n\n if bytes[0].get_bit(7) {\n\n /*\n\n * We're parsing a large item. The descriptor type is encoded in Bits 0-6 of Byte 0. Valid types:\n\n * 0x00: Reserved\n\n * 0x01: 24-bit Memory Range Descriptor\n\n * 0x02: Generic Register Descriptor\n\n * 0x03: Reserved\n\n * 0x04: Vendor-defined Descriptor\n\n * 0x05: 32-bit Memory Range Descriptor\n\n * 0x06: 32-bit Fixed Memory Range Descriptor\n\n * 0x07: Address Space Resource Descriptor\n\n * 0x08: Word Address Space Descriptor\n\n * 0x09: Extended Interrupt Descriptor\n\n * 0x0a: QWord Address Space Descriptor\n\n * 0x0b: Extended Address Space Descriptor\n\n * 0x0c: GPIO Connection Descriptor\n", "file_path": "aml/src/resource.rs", "rank": 1, "score": 171066.82008657296 }, { "content": "fn irq_format_descriptor(bytes: &[u8]) -> Result<Resource, AmlError> {\n\n /*\n\n * IRQ Descriptor Definition\n\n *\n\n * Offset Field Name\n\n * Byte 0 Value = 0x22 or 0x23 (0010001nB)– Type = 0, Small item name = 0x4, Length = 2 or 3\n\n * Byte 1 IRQ mask bits[7:0], _INT\n\n * Bit [0] represents IRQ0, bit[1] is IRQ1, and so on.\n\n * Byte 2 IRQ mask bits[15:8], _INT\n\n * Bit [0] represents IRQ8, bit[1] is IRQ9, and so on.\n\n * Byte 3 IRQ Information. Each bit, when set, indicates this device is capable of driving a certain type of interrupt.\n\n * (Optional—if not included then assume edge sensitive, high true interrupts.)\n\n * These bits can be used both for reporting and setting IRQ resources.\n\n * Note: This descriptor is meant for describing interrupts that are connected to PIC-compatible interrupt controllers, which can only be programmed for Active-High-Edge-Triggered or Active-Low-Level-Triggered interrupts. Any other combination is invalid. The Extended Interrupt Descriptor can be used to describe other combinations.\n\n * Bit [7:6] Reserved (must be 0)\n\n * Bit [5] Wake Capability, _WKC\n\n * 0x0 = Not Wake Capable: This interrupt is not capable of waking the system.\n\n * 0x1 = Wake Capable: This interrupt is capable of waking the system from a\n\n * low-power idle state or a system sleep state.\n\n * Bit [4] Interrupt Sharing, _SHR\n", "file_path": "aml/src/resource.rs", "rank": 2, "score": 161281.81306133393 }, { "content": "fn fixed_memory_descriptor(bytes: &[u8]) -> Result<Resource, AmlError> {\n\n /*\n\n * -- 32-bit Fixed Memory Descriptor ---\n\n * Offset Field Name Definition\n\n * Byte 0 32-bit Fixed Memory Range Descriptor Value = 0x86 (10000110B) – Type = 1, Large item name = 0x06\n\n * Byte 1 Length, bits [7:0] Value = 0x09 (9)\n\n * Byte 2 Length, bits [15:8] Value = 0x00\n\n * Byte 3 Information This field provides extra information about this memory.\n\n * Bit [7:1] Ignored\n\n * Bit [0] Write status, _RW\n\n * 1 writeable (read/write)\n\n * 0 non-writeable (read-only)\n\n * Byte 4 Range base address, _BAS bits [7:0] Address bits [7:0] of the base memory address for which the card may be configured.\n\n * Byte 5 Range base address, _BAS bits [15:8] Address bits [15:8] of the base memory address for which the card may be configured.\n\n * Byte 6 Range base address, _BAS bits [23:16] Address bits [23:16] of the base memory address for which the card may be configured.\n\n * Byte 7 Range base address, _BAS bits [31:24] Address bits [31:24] of the base memory address for which the card may be configured.\n\n * Byte 8 Range length, _LEN bits [7:0] This field contains bits [7:0] of the memory range length. The range length provides the length of the memory range in 1-byte blocks.\n\n * Byte 9 Range length, _LEN bits [15:8] This field contains bits [15:8] of the memory range length. The range length provides the length of the memory range in 1-byte blocks.\n\n * Byte 10 Range length, _LEN bits [23:16] This field contains bits [23:16] of the memory range length. The range length provides the length of the memory range in 1-byte blocks.\n\n * Byte 11 Range length, _LEN bits [31:24] This field contains bits [31:24] of the memory range length. The range length provides the length of the memory range in 1-byte blocks.\n", "file_path": "aml/src/resource.rs", "rank": 3, "score": 161281.81306133396 }, { "content": "fn io_port_descriptor(bytes: &[u8]) -> Result<Resource, AmlError> {\n\n /*\n\n * I/O Port Descriptor Definition\n\n * Offset Field Name Definition\n\n * Byte 0 I/O Port Descriptor Value = 0x47 (01000111B) –\n\n * Type = 0, Small item name = 0x8, Length = 7\n\n * Byte 1 Information Bits [7:1] Reserved and must be 0\n\n * Bit [0] (_DEC)\n\n * 1 The logical device decodes 16-bit addresses\n\n * 0 The logical device only decodes address bits[9:0]\n\n * Byte 2 Range minimum base address, _MIN bits[7:0] Address bits [7:0] of the minimum base I/O address that the card may be configured for.\n\n * Byte 3 Range minimum base address, _MIN bits[15:8] Address bits [15:8] of the minimum base I/O address that the card may be configured for.\n\n * Byte 4 Range maximum base address, _MAX bits[7:0] Address bits [7:0] of the maximum base I/O address that the card may be configured for.\n\n * Byte 5 Range maximum base address, _MAX bits[15:8] Address bits [15:8] of the maximum base I/O address that the card may be configured for.\n\n * Byte 6 Base alignment, _ALN Alignment for minimum base address, increment in 1-byte blocks.\n\n * Byte 7 Range length, _LEN The number of contiguous I/O ports requested.\n\n */\n\n if bytes.len() < 8 {\n\n return Err(AmlError::ResourceDescriptorTooShort);\n\n }\n", "file_path": "aml/src/resource.rs", "rank": 4, "score": 161281.81306133393 }, { "content": "fn extended_interrupt_descriptor(bytes: &[u8]) -> Result<Resource, AmlError> {\n\n /*\n\n * --- Extended Interrupt Descriptor ---\n\n * Byte 3 contains the Interrupt Vector Flags:\n\n * Bit 0: 1 if device consumes the resource, 0 if it produces it\n\n * Bit 1: 1 if edge-triggered, 0 if level-triggered\n\n * Bit 2: 1 = active-high, 0 = active-low\n\n * Bit 3: 1 if interrupt is shared with other devices\n\n * Bit 4: 1 if this interrupt is capable of waking the system, 0 if it is not\n\n * Byte 4 contains the number of interrupt numbers that follow. When this descriptor is\n\n * returned from `_CRS` or send to `_SRS`, this field must be 1.\n\n *\n\n * From Byte 5 onwards, there are `n` interrupt numbers, each of which is encoded as a\n\n * 4-byte little-endian number.\n\n *\n\n * NOTE: We only support the case where there is a single interrupt number.\n\n */\n\n if bytes.len() < 9 {\n\n return Err(AmlError::ResourceDescriptorTooShort);\n\n }\n", "file_path": "aml/src/resource.rs", "rank": 5, "score": 161281.81306133393 }, { "content": "fn address_space_descriptor<T>(bytes: &[u8]) -> Result<Resource, AmlError> {\n\n /*\n\n * WORD Address Space Descriptor Definition\n\n * Note: The definitions for DWORD and QWORD are the same other than the width of the address fields.\n\n *\n\n * Offset Field Name Definition\n\n * Byte 0 WORD Address Space Descriptor Value = 0x88 (10001000B) – Type = 1, Large item name = 0x08\n\n * Byte 1 Length, bits [7:0] Variable length, minimum value = 0x0D (13)\n\n * Byte 2 Length, bits [15:8] Variable length, minimum value = 0x00\n\n * Byte 3 Resource Type Indicates which type of resource this descriptor describes. Defined values are:\n\n * 0 Memory range\n\n * 1 I/O range\n\n * 2 Bus number range\n\n * 3–191 Reserved\n\n * 192-255 Hardware Vendor Defined\n\n * Byte 4 General Flags Flags that are common to all resource types:\n\n * Bits [7:4] Reserved (must be 0)\n\n * Bit [3] Max Address Fixed, _MAF:\n\n * 1 The specified maximum address is fixed\n\n * 0 The specified maximum address is not fixed\n", "file_path": "aml/src/resource.rs", "rank": 6, "score": 155205.5421150146 }, { "content": "pub fn dma_format_descriptor(bytes: &[u8]) -> Result<Resource, AmlError> {\n\n /*\n\n * DMA Descriptor Definition\n\n * Offset Field Name\n\n * Byte 0 Value = 0x2A (00101010B) – Type = 0, Small item name = 0x5, Length = 2\n\n * Byte 1 DMA channel mask bits [7:0] (channels 0 – 7), _DMA\n\n * Bit [0] is channel 0, etc.\n\n * Byte 2 Bit [7] Reserved (must be 0)\n\n * Bits [6:5] DMA channel speed supported, _TYP\n\n * 00 Indicates compatibility mode\n\n * 01 Indicates Type A DMA as described in the EISA\n\n * 10 Indicates Type B DMA\n\n * 11 Indicates Type F\n\n * Bits [4:3] Ignored\n\n * Bit [2] Logical device bus master status, _BM\n\n * 0 Logical device is not a bus master\n\n * 1 Logical device is a bus master\n\n * Bits [1:0] DMA transfer type preference, _SIZ\n\n * 00 8-bit only\n\n * 01 8- and 16-bit\n", "file_path": "aml/src/resource.rs", "rank": 7, "score": 155205.5421150146 }, { "content": "fn parse_mps_inti_flags(flags: u16) -> Result<(Polarity, TriggerMode), AcpiError> {\n\n let polarity = match flags.get_bits(0..2) {\n\n 0b00 => Polarity::SameAsBus,\n\n 0b01 => Polarity::ActiveHigh,\n\n 0b11 => Polarity::ActiveLow,\n\n _ => return Err(AcpiError::InvalidMadt(MadtError::MpsIntiInvalidPolarity)),\n\n };\n\n\n\n let trigger_mode = match flags.get_bits(2..4) {\n\n 0b00 => TriggerMode::SameAsBus,\n\n 0b01 => TriggerMode::Edge,\n\n 0b11 => TriggerMode::Level,\n\n _ => return Err(AcpiError::InvalidMadt(MadtError::MpsIntiInvalidTriggerMode)),\n\n };\n\n\n\n Ok((polarity, trigger_mode))\n\n}\n", "file_path": "acpi/src/madt.rs", "rank": 8, "score": 149993.95198844094 }, { "content": "fn compile_asl_files(dir_path: &Path) -> std::io::Result<(u32, u32)> {\n\n let mut asl_files = fs::read_dir(dir_path)?\n\n .filter(|entry| entry.is_ok() && entry.as_ref().unwrap().path().extension() == Some(OsStr::new(\"asl\")))\n\n .map(Result::unwrap)\n\n .peekable();\n\n\n\n if !asl_files.peek().is_none() {\n\n // Test if `iasl` is installed, so we can give a good error if it's not\n\n if Command::new(\"iasl\").arg(\"-v\").status().unwrap().success().not() {\n\n panic!(\"`iasl` is not installed, but we want to compile some ASL files! Pass --no-compile, or install `iasl`\");\n\n }\n\n }\n\n\n\n let mut passed = 0;\n\n let mut failed = 0;\n\n\n\n for file in asl_files {\n\n let aml_path = file.path().with_extension(OsStr::new(\"aml\"));\n\n\n\n /*\n", "file_path": "aml_tester/src/main.rs", "rank": 9, "score": 142987.39866475575 }, { "content": "pub fn take_u64<'a, 'c>() -> impl Parser<'a, 'c, u64>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| {\n\n if input.len() < 8 {\n\n return Err((input, context, Propagate::Err(AmlError::UnexpectedEndOfStream)));\n\n }\n\n\n\n Ok((&input[8..], context, u64::from_le_bytes(input[0..8].try_into().unwrap())))\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 10, "score": 142354.71619412384 }, { "content": "pub fn take_u16<'a, 'c>() -> impl Parser<'a, 'c, u16>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| {\n\n if input.len() < 2 {\n\n return Err((input, context, Propagate::Err(AmlError::UnexpectedEndOfStream)));\n\n }\n\n\n\n Ok((&input[2..], context, u16::from_le_bytes(input[0..2].try_into().unwrap())))\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 11, "score": 142296.8624392446 }, { "content": "pub fn take_u32<'a, 'c>() -> impl Parser<'a, 'c, u32>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| {\n\n if input.len() < 4 {\n\n return Err((input, context, Propagate::Err(AmlError::UnexpectedEndOfStream)));\n\n }\n\n\n\n Ok((&input[4..], context, u32::from_le_bytes(input[0..4].try_into().unwrap())))\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 12, "score": 142189.0088302712 }, { "content": "pub fn take<'a, 'c>() -> impl Parser<'a, 'c, u8>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| match input.first() {\n\n Some(&byte) => Ok((&input[1..], context, byte)),\n\n None => Err((input, context, Propagate::Err(AmlError::UnexpectedEndOfStream))),\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 13, "score": 134253.49845069367 }, { "content": "struct Handler;\n\n\n\nimpl aml::Handler for Handler {\n\n fn read_u8(&self, _address: usize) -> u8 {\n\n unimplemented!()\n\n }\n\n fn read_u16(&self, _address: usize) -> u16 {\n\n unimplemented!()\n\n }\n\n fn read_u32(&self, _address: usize) -> u32 {\n\n unimplemented!()\n\n }\n\n fn read_u64(&self, _address: usize) -> u64 {\n\n unimplemented!()\n\n }\n\n\n\n fn write_u8(&mut self, _address: usize, _value: u8) {\n\n unimplemented!()\n\n }\n\n fn write_u16(&mut self, _address: usize, _value: u16) {\n", "file_path": "aml_tester/src/main.rs", "rank": 14, "score": 129342.735697131 }, { "content": "/// Parses a `PkgLength` and returns the *raw length*. If you want an instance of `PkgLength`, use\n\n/// `pkg_length` instead.\n\npub fn raw_pkg_length<'a, 'c>() -> impl Parser<'a, 'c, u32>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * PkgLength := PkgLeadByte |\n\n * <PkgLeadByte ByteData> |\n\n * <PkgLeadByte ByteData ByteData> |\n\n * <PkgLeadByte ByteData ByteData ByteData>\n\n *\n\n * The length encoded by the PkgLength includes the number of bytes used to encode it.\n\n */\n\n move |input: &'a [u8], context: &'c mut AmlContext| {\n\n let (new_input, context, lead_byte) = take().parse(input, context)?;\n\n let byte_count = lead_byte.get_bits(6..8);\n\n\n\n if byte_count == 0 {\n\n let length = u32::from(lead_byte.get_bits(0..6));\n\n return Ok((new_input, context, length));\n\n }\n", "file_path": "aml/src/pkg_length.rs", "rank": 15, "score": 125887.78322387855 }, { "content": "struct Handler;\n\n\n\nimpl aml::Handler for Handler {\n\n fn read_u8(&self, _address: usize) -> u8 {\n\n 0\n\n }\n\n fn read_u16(&self, _address: usize) -> u16 {\n\n 0\n\n }\n\n fn read_u32(&self, _address: usize) -> u32 {\n\n 0\n\n }\n\n fn read_u64(&self, _address: usize) -> u64 {\n\n 0\n\n }\n\n\n\n fn write_u8(&mut self, _address: usize, _value: u8) {}\n\n fn write_u16(&mut self, _address: usize, _value: u16) {}\n\n fn write_u32(&mut self, _address: usize, _value: u32) {}\n\n fn write_u64(&mut self, _address: usize, _value: u64) {}\n", "file_path": "aml/fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 16, "score": 124012.36468375815 }, { "content": "fn def_return<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefReturn := 0xa4 ArgObject\n\n * ArgObject := TermArg => DataRefObject\n\n */\n\n opcode(opcode::DEF_RETURN_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefReturn\",\n\n term_arg().map(|return_arg| -> Result<(), Propagate> {\n\n /*\n\n * To return a value, we want to halt execution of the method and propagate the\n\n * return value all the way up to the start of the method invocation. To do this,\n\n * we emit a special error that is intercepted during method invocation and turned\n\n * into a valid result.\n\n */\n\n Err(Propagate::Return(return_arg))\n\n }),\n\n ))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 17, "score": 119857.57835925429 }, { "content": "pub fn consume<'a, 'c, F>(condition: F) -> impl Parser<'a, 'c, u8>\n\nwhere\n\n 'c: 'a,\n\n F: Fn(u8) -> bool,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| match input.first() {\n\n Some(&byte) if condition(byte) => Ok((&input[1..], context, byte)),\n\n Some(&byte) => Err((input, context, Propagate::Err(AmlError::UnexpectedByte(byte)))),\n\n None => Err((input, context, Propagate::Err(AmlError::UnexpectedEndOfStream))),\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 18, "score": 119854.19223720356 }, { "content": "pub fn take_to_end_of_pkglength<'a, 'c>(length: PkgLength) -> impl Parser<'a, 'c, &'a [u8]>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context| {\n\n /*\n\n * TODO: fuzzing manages to find PkgLengths that correctly parse during construction, but later crash here.\n\n * I would've thought we would pick up all invalid lengths there, so have a look at why this is needed.\n\n */\n\n let bytes_to_take = match (input.len() as u32).checked_sub(length.end_offset) {\n\n Some(bytes_to_take) => bytes_to_take,\n\n None => return Err((input, context, Propagate::Err(AmlError::InvalidPkgLength))),\n\n };\n\n take_n(bytes_to_take).parse(input, context)\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 19, "score": 114870.73511670076 }, { "content": "pub fn def_device<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefDevice := ExtOpPrefix 0x82 PkgLength NameString TermList\n\n */\n\n ext_opcode(opcode::EXT_DEF_DEVICE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefDevice\",\n\n pkg_length()\n\n .then(name_string())\n\n .map_with_context(|(length, name), context| {\n\n let resolved_name = try_with_context!(context, name.resolve(&context.current_scope));\n\n try_with_context!(\n\n context,\n\n context.namespace.add_value(resolved_name.clone(), AmlValue::Device)\n\n );\n\n try_with_context!(\n", "file_path": "aml/src/term_object.rs", "rank": 20, "score": 111837.76025395074 }, { "content": "/// `extract` observes another parser consuming part of the stream, and returns the result of the parser, and the\n\n/// section of the stream that was parsed by the parser. This is useful for re-parsing that section of the stream,\n\n/// which allows the result of a piece of AML to be reevaluated with a new context, for example.\n\n///\n\n/// Note that reparsing the stream is not idempotent - the context is changed by this parse.\n\npub fn extract<'a, 'c, P, R>(parser: P) -> impl Parser<'a, 'c, (R, &'a [u8])>\n\nwhere\n\n 'c: 'a,\n\n P: Parser<'a, 'c, R>,\n\n{\n\n move |input, context: &'c mut AmlContext| {\n\n let before = input;\n\n let (after, context, result) = parser.parse(input, context)?;\n\n let bytes_parsed = before.len() - after.len();\n\n let parsed = &before[..bytes_parsed];\n\n\n\n Ok((after, context, (result, parsed)))\n\n }\n\n}\n\n\n\npub struct Or<'a, 'c, P1, P2, R>\n\nwhere\n\n 'c: 'a,\n\n P1: Parser<'a, 'c, R>,\n\n P2: Parser<'a, 'c, R>,\n", "file_path": "aml/src/parser.rs", "rank": 21, "score": 110518.01333174264 }, { "content": "/// Find the areas we should search for the RSDP in.\n\npub fn find_search_areas<H>(handler: H) -> [Range<usize>; 2]\n\nwhere\n\n H: AcpiHandler,\n\n{\n\n /*\n\n * Read the base address of the EBDA from its location in the BDA (BIOS Data Area). Not all BIOSs fill this out\n\n * unfortunately, so we might not get a sensible result. We shift it left 4, as it's a segment address.\n\n */\n\n let ebda_start_mapping =\n\n unsafe { handler.map_physical_region::<u16>(EBDA_START_SEGMENT_PTR, mem::size_of::<u16>()) };\n\n let ebda_start = (*ebda_start_mapping as usize) << 4;\n\n\n\n [\n\n /*\n\n * The main BIOS area below 1MiB. In practice, from my [Restioson's] testing, the RSDP is more often here\n\n * than the EBDA. We also don't want to search the entire possibele EBDA range, if we've failed to find it\n\n * from the BDA.\n\n */\n\n RSDP_BIOS_AREA_START..(RSDP_BIOS_AREA_END + 1),\n\n // Check if base segment ptr is in valid range for EBDA base\n", "file_path": "rsdp/src/lib.rs", "rank": 22, "score": 109596.42799747054 }, { "content": "/// Parse a `ResourceDescriptor` into a list of resources. Returns `AmlError::IncompatibleValueConversion` if the passed value is not a\n\n/// `Buffer`.\n\npub fn resource_descriptor_list(descriptor: &AmlValue) -> Result<Vec<Resource>, AmlError> {\n\n if let AmlValue::Buffer(bytes) = descriptor {\n\n let mut descriptors = Vec::new();\n\n let buffer_data = bytes.lock();\n\n let mut bytes = buffer_data.as_slice();\n\n\n\n while bytes.len() > 0 {\n\n let (descriptor, remaining_bytes) = resource_descriptor(bytes)?;\n\n\n\n if let Some(descriptor) = descriptor {\n\n descriptors.push(descriptor);\n\n bytes = remaining_bytes;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n Ok(descriptors)\n\n } else {\n\n Err(AmlError::IncompatibleValueConversion { current: descriptor.type_of(), target: AmlType::Buffer })\n\n }\n\n}\n\n\n", "file_path": "aml/src/resource.rs", "rank": 23, "score": 102407.26567613955 }, { "content": "pub fn data_ref_object<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DataRefObject := DataObject | ObjectReference | DDBHandle\n\n */\n\n comment_scope(DebugVerbosity::AllScopes, \"DataRefObject\", choice!(data_object()))\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 24, "score": 102280.50693007385 }, { "content": "fn main() -> std::io::Result<()> {\n\n log::set_logger(&Logger).unwrap();\n\n log::set_max_level(log::LevelFilter::Trace);\n\n\n\n let matches = App::new(\"aml_tester\")\n\n .version(\"v0.1.0\")\n\n .author(\"Isaac Woods\")\n\n .about(\"Compiles and tests ASL files\")\n\n .arg(Arg::with_name(\"path\").short(\"p\").long(\"path\").required(true).takes_value(true))\n\n .arg(Arg::with_name(\"no_compile\").long(\"no-compile\"))\n\n .get_matches();\n\n\n\n let dir_path = Path::new(matches.value_of(\"path\").unwrap());\n\n println!(\"Running tests in directory: {:?}\", dir_path);\n\n\n\n if !matches.is_present(\"no_compile\") {\n\n let (passed, failed) = compile_asl_files(dir_path)?;\n\n println!(\"Compiled {} ASL files: {} passed, {} failed.\", passed + failed, passed, failed);\n\n }\n\n\n", "file_path": "aml_tester/src/main.rs", "rank": 25, "score": 99135.48663633951 }, { "content": "pub fn take_while<'a, 'c, P, R>(parser: P) -> impl Parser<'a, 'c, usize>\n\nwhere\n\n 'c: 'a,\n\n P: Parser<'a, 'c, R>,\n\n{\n\n move |mut input: &'a [u8], mut context: &'c mut AmlContext| {\n\n let mut num_passed = 0;\n\n loop {\n\n match parser.parse(input, context) {\n\n Ok((new_input, new_context, _)) => {\n\n input = new_input;\n\n context = new_context;\n\n num_passed += 1;\n\n }\n\n Err((_, context, Propagate::Err(AmlError::WrongParser))) => {\n\n return Ok((input, context, num_passed))\n\n }\n\n Err((_, context, err)) => return Err((input, context, err)),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 26, "score": 98199.68497719441 }, { "content": "fn is_name_char(byte: u8) -> bool {\n\n is_lead_name_char(byte) || is_digit_char(byte)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{parser::Parser, test_utils::*, AmlError};\n\n\n\n #[test]\n\n fn test_name_seg() {\n\n let mut context = crate::test_utils::make_test_context();\n\n\n\n check_ok!(\n\n name_seg().parse(&[b'A', b'F', b'3', b'Z'], &mut context),\n\n NameSeg([b'A', b'F', b'3', b'Z']),\n\n &[]\n\n );\n\n check_ok!(\n\n name_seg().parse(&[b'A', b'F', b'3', b'Z', 0xff], &mut context),\n", "file_path": "aml/src/name_object.rs", "rank": 27, "score": 96740.50561630003 }, { "content": "fn is_digit_char(byte: u8) -> bool {\n\n byte >= b'0' && byte <= b'9'\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 28, "score": 96740.50561630003 }, { "content": "fn is_lead_name_char(byte: u8) -> bool {\n\n (byte >= b'A' && byte <= b'Z') || byte == b'_'\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 30, "score": 94756.2737372366 }, { "content": "pub fn n_of<'a, 'c, P, R>(parser: P, n: usize) -> impl Parser<'a, 'c, Vec<R>>\n\nwhere\n\n 'c: 'a,\n\n P: Parser<'a, 'c, R>,\n\n{\n\n // TODO: can we write this more nicely?\n\n move |mut input, mut context| {\n\n let mut results = Vec::with_capacity(n);\n\n\n\n for _ in 0..n {\n\n let (new_input, new_context, result) = match parser.parse(input, context) {\n\n Ok((input, context, result)) => (input, context, result),\n\n Err((_, context, propagate)) => return Err((input, context, propagate)),\n\n };\n\n results.push(result);\n\n input = new_input;\n\n context = new_context;\n\n }\n\n\n\n Ok((input, context, results))\n\n }\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 31, "score": 93435.38330363443 }, { "content": "fn def_while<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefWhile := 0xa2 PkgLength Predicate TermList\n\n * Predicate := TermArg => Integer (0 = false, >0 = true)\n\n *\n\n * Parsing this does something a little unusual - it 'extracts' the predicate when it's first parsed, which\n\n * allows us to reevaluate it to see if we should break out of the while yet. This is required, to make sure\n\n * we're observing changes to the context between the iterations of the loop.\n\n */\n\n opcode(opcode::DEF_WHILE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefWhile\",\n\n pkg_length()\n\n .then(extract(term_arg()))\n\n .feed(move |(length, (first_predicate, predicate_stream))| {\n\n take_to_end_of_pkglength(length)\n", "file_path": "aml/src/statement.rs", "rank": 32, "score": 91972.80448484507 }, { "content": "fn def_fatal<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefFatal := ExtOpPrefix 0x32 FatalType FatalCode FatalArg\n\n * FatalType := ByteData\n\n * FatalCode := DWordData\n\n * FatalArg := TermArg => Integer\n\n */\n\n ext_opcode(opcode::EXT_DEF_FATAL_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefFatal\",\n\n take().then(take_u32()).then(term_arg()).map_with_context(\n\n |((fatal_type, fatal_code), fatal_arg), context| -> (Result<(), Propagate>, &'c mut AmlContext) {\n\n let fatal_arg = try_with_context!(context, fatal_arg.as_integer(context));\n\n context.handler.handle_fatal_error(fatal_type, fatal_code, fatal_arg);\n\n (Err(Propagate::Err(AmlError::FatalError)), context)\n\n },\n\n ),\n\n ))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 33, "score": 89497.48690704558 }, { "content": "fn def_noop<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefNoop := 0xa3\n\n */\n\n opcode(opcode::DEF_NOOP_OP).then(comment_scope(DebugVerbosity::AllScopes, \"DefNoop\", id())).discard_result()\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 34, "score": 89497.48690704558 }, { "content": "fn def_if_else<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefIfElse := 0xa0 PkgLength Predicate TermList DefElse\n\n * Predicate := TermArg => Integer (0 = false, >0 = true)\n\n * DefElse := Nothing | <0xa1 PkgLength TermList>\n\n */\n\n opcode(opcode::DEF_IF_ELSE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefIfElse\",\n\n pkg_length()\n\n .then(term_arg())\n\n .feed(|(length, predicate_arg)| {\n\n take_to_end_of_pkglength(length)\n\n .map(move |then_branch| Ok((predicate_arg.as_bool()?, then_branch)))\n\n })\n\n .then(choice!(\n", "file_path": "aml/src/statement.rs", "rank": 35, "score": 89497.48690704558 }, { "content": "fn def_continue<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefContinue := 0x9f\n\n */\n\n opcode(opcode::DEF_CONTINUE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefContinue\",\n\n id().map(|()| -> Result<(), Propagate> { Err(Propagate::Continue) }),\n\n ))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 36, "score": 89497.48690704558 }, { "content": "fn def_break<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefBreak := 0xa5\n\n */\n\n opcode(opcode::DEF_BREAK_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefBreak\",\n\n id().map(|()| -> Result<(), Propagate> { Err(Propagate::Break) }),\n\n ))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 37, "score": 89497.48690704558 }, { "content": "fn def_breakpoint<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefBreakPoint := 0xcc\n\n * TODO: there is no debugger, so this doesn't do anything. If there was, this should stop execution and enter\n\n * the AML debugger.\n\n */\n\n opcode(opcode::DEF_BREAKPOINT_OP)\n\n .then(comment_scope(DebugVerbosity::AllScopes, \"DefBreakPoint\", id()))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 38, "score": 89497.48690704558 }, { "content": "/// The identity parser - returns the stream and context unchanged. Useful for producing parsers\n\n/// that produce a result without parsing anything by doing: `id().map(|()| Ok(foo))`.\n\npub fn id<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| Ok((input, context, ()))\n\n}\n\n\n", "file_path": "aml/src/parser.rs", "rank": 39, "score": 88161.88345633681 }, { "content": "pub fn statement_opcode<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * StatementOpcode := DefBreak | DefBreakPoint | DefContinue | DefFatal | DefIfElse | DefLoad | DefNoop |\n\n * DefNotify | DefRelease | DefReset | DefReturn | DefSignal | DefSleep | DefStall | DefWhile\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"StatementOpcode\",\n\n choice!(\n\n def_break(),\n\n def_breakpoint(),\n\n def_continue(),\n\n def_fatal(),\n\n def_if_else(),\n\n def_noop(),\n\n def_return(),\n\n def_while()\n\n ),\n\n )\n\n}\n\n\n", "file_path": "aml/src/statement.rs", "rank": 40, "score": 85861.49347830827 }, { "content": "fn def_l_or<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLOr := 0x91 Operand Operand\n\n * Operand := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_L_OR_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLOr\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let left = try_with_context!(context, left_arg.as_bool());\n\n let right = try_with_context!(context, right_arg.as_bool());\n\n (Ok(AmlValue::Boolean(left || right)), context)\n\n }),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 41, "score": 85861.49347830827 }, { "content": "pub fn debug_obj<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DebugObj := ExtOpPrefix 0x31\n\n */\n\n ext_opcode(opcode::EXT_DEBUG_OP)\n\n}\n\n\n\n/// Takes a value between `0` and `7`, where 0 represents `Local0` etc.\n\npub type LocalNum = u8;\n\n\n", "file_path": "aml/src/misc.rs", "rank": 42, "score": 85861.49347830827 }, { "content": "pub fn def_processor<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefProcessor := ExtOpPrefix 0x83 PkgLength NameString ProcID PblkAddress PblkLen TermList\n\n * ProcID := ByteData\n\n * PblkAddress := DWordData\n\n * PblkLen := ByteData\n\n */\n\n ext_opcode(opcode::EXT_DEF_PROCESSOR_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefProcessor\",\n\n pkg_length()\n\n .then(name_string())\n\n .then(take())\n\n .then(take_u32())\n\n .then(take())\n\n .map_with_context(|((((pkg_length, name), proc_id), pblk_address), pblk_len), context| {\n", "file_path": "aml/src/term_object.rs", "rank": 43, "score": 83749.41698430915 }, { "content": "fn def_increment<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefIncrement := 0x75 SuperName\n\n */\n\n opcode(opcode::DEF_INCREMENT_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefIncrement\",\n\n super_name().map_with_context(|addend, context| {\n\n let value = try_with_context!(context, context.read_target(&addend));\n\n let value = try_with_context!(context, value.as_integer(context));\n\n let new_value = AmlValue::Integer(value + 1);\n\n try_with_context!(context, context.store(addend, new_value.clone()));\n\n (Ok(new_value), context)\n\n }),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 44, "score": 83749.41698430915 }, { "content": "pub fn namespace_modifier<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * NamespaceModifierObj := DefAlias | DefName | DefScope\n\n */\n\n choice!(def_name(), def_scope())\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 45, "score": 83749.41698430915 }, { "content": "fn def_l_equal<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLEqual := 0x93 Operand Operand\n\n * Operand := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_L_EQUAL_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLEqual\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let ord = try_with_context!(context, left_arg.cmp(right_arg, context));\n\n (Ok(AmlValue::Boolean(ord == Ordering::Equal)), context)\n\n }),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 46, "score": 83749.41698430915 }, { "content": "fn def_to_integer<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefToInteger := 0x99 Operand Target\n\n * Operand := TermArg\n\n */\n\n opcode(opcode::DEF_TO_INTEGER_OP)\n\n .then(comment_scope(DebugVerbosity::AllScopes, \"DefToInteger\", term_arg().then(target())))\n\n .map_with_context(|((), (operand, target)), context| {\n\n let result = match operand {\n\n AmlValue::Integer(value) => AmlValue::Integer(value),\n\n AmlValue::Buffer(data) => {\n\n AmlValue::Integer(try_with_context!(context, AmlValue::Buffer(data).as_integer(context)))\n\n }\n\n AmlValue::String(string) => AmlValue::Integer(try_with_context!(\n\n context,\n\n if string.starts_with(\"0x\") {\n\n u64::from_str_radix(string.trim_start_matches(\"0x\"), 16)\n", "file_path": "aml/src/expression.rs", "rank": 47, "score": 83749.41698430915 }, { "content": "fn method_invocation<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * MethodInvocation := NameString TermArgList\n\n *\n\n * MethodInvocation is the worst of the AML structures, because you're meant to figure out how much you're\n\n * meant to parse using the name of the method (by knowing from its definition how how many arguments it\n\n * takes). However, the definition of a method can in theory appear after an invocation of that method, and\n\n * so parsing them properly can be very difficult.\n\n * NOTE: We don't support the case of the definition appearing after the invocation.\n\n */\n\n comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"MethodInvocation\",\n\n name_string()\n\n .map_with_context(move |name, context| {\n\n let (full_path, handle) =\n\n try_with_context!(context, context.namespace.search(&name, &context.current_scope)).clone();\n", "file_path": "aml/src/expression.rs", "rank": 48, "score": 83749.41698430915 }, { "content": "fn def_l_greater<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLGreater := 0x94 Operand Operand\n\n */\n\n opcode(opcode::DEF_L_GREATER_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLGreater\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let ord = try_with_context!(context, left_arg.cmp(right_arg, context));\n\n (Ok(AmlValue::Boolean(ord == Ordering::Greater)), context)\n\n }),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 49, "score": 83749.41698430915 }, { "content": "fn def_decrement<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefDecrement := 0x76 SuperName\n\n */\n\n opcode(opcode::DEF_DECREMENT_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefDecrement\",\n\n super_name().map_with_context(|minuend, context| {\n\n let value = try_with_context!(context, context.read_target(&minuend));\n\n let value = try_with_context!(context, value.as_integer(context));\n\n let new_value = AmlValue::Integer(value - 1);\n\n try_with_context!(context, context.store(minuend, new_value.clone()));\n\n (Ok(new_value), context)\n\n }),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 50, "score": 83749.41698430915 }, { "content": "pub fn def_mutex<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefMutex := ExtOpPrefix 0x01 NameString SyncFlags\n\n * SyncFlags := ByteData (where bits 0-3: SyncLevel\n\n * bits 4-7: Reserved)\n\n */\n\n ext_opcode(opcode::EXT_DEF_MUTEX_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefMutex\",\n\n name_string().then(take()).map_with_context(|(name, sync_level), context| {\n\n try_with_context!(\n\n context,\n\n context.namespace.add_value_at_resolved_path(\n\n name,\n\n &context.current_scope,\n\n AmlValue::Mutex { sync_level }\n\n )\n\n );\n\n (Ok(()), context)\n\n }),\n\n ))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 51, "score": 83749.41698430915 }, { "content": "pub fn named_obj<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * NamedObj := DefBankField | DefCreateBitField | DefCreateByteField | DefCreateWordField | DefCreateDWordField |\n\n * DefCreateQWordField | DefCreateField | DefDataRegion | DefExternal | DefOpRegion | DefPowerRes |\n\n * DefProcessor | DefThermalZone | DefMethod | DefMutex\n\n *\n\n * XXX: DefMethod and DefMutex (at least) are not included in any rule in the AML grammar,\n\n * but are defined in the NamedObj section so we assume they're part of NamedObj\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"NamedObj\",\n\n choice!(\n\n def_create_bit_field(),\n\n def_create_byte_field(),\n\n def_create_word_field(),\n\n def_create_dword_field(),\n", "file_path": "aml/src/term_object.rs", "rank": 52, "score": 83749.41698430915 }, { "content": "pub fn def_name<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefName := 0x08 NameString DataRefObject\n\n */\n\n opcode(opcode::DEF_NAME_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefName\",\n\n name_string().then(data_ref_object()).map_with_context(|(name, data_ref_object), context| {\n\n try_with_context!(\n\n context,\n\n context.namespace.add_value_at_resolved_path(name, &context.current_scope, data_ref_object)\n\n );\n\n (Ok(()), context)\n\n }),\n\n ))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 53, "score": 83749.41698430915 }, { "content": "fn def_store<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefStore := 0x70 TermArg SuperName\n\n *\n\n * Implicit conversion is only applied when the destination target is a `Name` - not when we\n\n * are storing into a method local or argument (these stores are semantically identical to\n\n * CopyObject). We must also make sure to return a copy of the data that is in the destination\n\n * after the store (as opposed to the data we think we put into it), because some stores can\n\n * alter the data during the store.\n\n */\n\n opcode(opcode::DEF_STORE_OP)\n\n .then(comment_scope(DebugVerbosity::Scopes, \"DefStore\", term_arg().then(super_name())))\n\n .map_with_context(|((), (value, target)), context| {\n\n (Ok(try_with_context!(context, context.store(target, value))), context)\n\n })\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 54, "score": 83749.41698430915 }, { "content": "fn def_l_not_equal<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLNotEqual := LNotOp(0x92) LEqualOp(0x93) Operand Operand\n\n */\n\n opcode(opcode::DEF_L_NOT_OP)\n\n .then(opcode(opcode::DEF_L_EQUAL_OP))\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLNotEqual\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let ord = try_with_context!(context, left_arg.cmp(right_arg, context));\n\n (Ok(AmlValue::Boolean(ord != Ordering::Equal)), context)\n\n }),\n\n ))\n\n .map(|(((), ()), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 55, "score": 83749.41698430915 }, { "content": "fn def_mid<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefMid := 0x9e MidObj TermArg TermArg Target\n\n * MidObj := TermArg => Buffer | String\n\n */\n\n opcode(opcode::DEF_MID_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefMid\",\n\n term_arg().then(term_arg()).then(term_arg()).then(target()).map_with_context(\n\n |(((source, index), length), target), context| {\n\n let index = try_with_context!(context, index.as_integer(context)) as usize;\n\n let length = try_with_context!(context, length.as_integer(context)) as usize;\n\n\n\n let result = try_with_context!(\n\n context,\n\n match source {\n", "file_path": "aml/src/expression.rs", "rank": 56, "score": 83749.41698430915 }, { "content": "pub fn def_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefField = ExtOpPrefix 0x81 PkgLength NameString FieldFlags FieldList\n\n * FieldFlags := ByteData\n\n */\n\n let opregion_as_handle = name_string().map_with_context(|region_name, context| {\n\n /*\n\n * We search for the opregion that this field is referencing here as we already have the correct starting\n\n * scope. If we leave this to later, it becomes much harder as we also need to know the field's scope.\n\n */\n\n let (_, handle) =\n\n try_with_context!(context, context.namespace.search(&region_name, &context.current_scope));\n\n (Ok(handle), context)\n\n });\n\n\n\n ext_opcode(opcode::EXT_DEF_FIELD_OP)\n\n .then(comment_scope(\n", "file_path": "aml/src/term_object.rs", "rank": 57, "score": 83749.41698430915 }, { "content": "pub fn def_scope<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefScope := 0x10 PkgLength NameString TermList\n\n */\n\n opcode(opcode::DEF_SCOPE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefScope\",\n\n pkg_length()\n\n .then(name_string())\n\n .map_with_context(|(length, name), context| {\n\n let previous_scope = context.current_scope.clone();\n\n context.current_scope = try_with_context!(context, name.resolve(&context.current_scope));\n\n\n\n context.comment(\n\n DebugVerbosity::Scopes,\n\n &(String::from(\"Scope name: \") + &context.current_scope.as_string()),\n", "file_path": "aml/src/term_object.rs", "rank": 58, "score": 83749.41698430915 }, { "content": "pub fn def_method<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefMethod := 0x14 PkgLength NameString MethodFlags TermList\n\n * MethodFlags := ByteData (where bits 0-2: ArgCount (0 to 7)\n\n * bit 3: SerializeFlag (0 = Not Serialized, 1 = Serialized)\n\n * bits 4-7: SyncLevel (0x00 to 0x0f))\n\n */\n\n opcode(opcode::DEF_METHOD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefMethod\",\n\n pkg_length()\n\n .then(name_string())\n\n .then(take())\n\n .feed(|((length, name), flags)| {\n\n take_to_end_of_pkglength(length).map(move |code| Ok((name.clone(), flags, code)))\n\n })\n", "file_path": "aml/src/term_object.rs", "rank": 59, "score": 83749.41698430915 }, { "content": "pub fn def_external<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefExternal = 0x15 NameString ObjectType ArgumentCount\n\n * ObjectType := ByteData\n\n * ArgumentCount := ByteData (0 to 7)\n\n */\n\n opcode(opcode::DEF_EXTERNAL_OP)\n\n .then(comment_scope(DebugVerbosity::Scopes, \"DefExternal\", name_string().then(take()).then(take())))\n\n .discard_result()\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 60, "score": 83749.41698430915 }, { "content": "fn def_l_less<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLLess := 0x95 Operand Operand\n\n */\n\n opcode(opcode::DEF_L_LESS_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLLess\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let ord = try_with_context!(context, left_arg.cmp(right_arg, context));\n\n (Ok(AmlValue::Boolean(ord == Ordering::Less)), context)\n\n }),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 61, "score": 83749.41698430915 }, { "content": "pub fn def_and<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefAnd := 0x7b Operand Operand Target\n\n * Operand := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_AND_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefAnd\",\n\n term_arg().then(term_arg()).then(target()).map_with_context(\n\n |((left_arg, right_arg), target), context| {\n\n let left = try_with_context!(context, left_arg.as_integer(context));\n\n let right = try_with_context!(context, right_arg.as_integer(context));\n\n let result = AmlValue::Integer(left & right);\n\n\n\n try_with_context!(context, context.store(target, result.clone()));\n\n (Ok(result), context)\n\n },\n\n ),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 62, "score": 82510.34042746105 }, { "content": "pub fn target<'a, 'c>() -> impl Parser<'a, 'c, Target>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * Target := SuperName | NullName\n\n * NullName := 0x00\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"Target\",\n\n choice!(null_name().map(|_| Ok(Target::Null)), super_name()),\n\n )\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 63, "score": 82510.34042746105 }, { "content": "pub fn def_power_res<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefPowerRes := ExtOpPrefix 0x84 PkgLength NameString SystemLevel ResourceOrder TermList\n\n * SystemLevel := ByteData\n\n * ResourceOrder := WordData\n\n */\n\n ext_opcode(opcode::EXT_DEF_POWER_RES_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefPowerRes\",\n\n pkg_length()\n\n .then(name_string())\n\n .then(take())\n\n .then(take_u16())\n\n .map_with_context(|(((pkg_length, name), system_level), resource_order), context| {\n\n /*\n\n * `PowerResource` objects contain data within themselves, and can also have sub-objects,\n", "file_path": "aml/src/term_object.rs", "rank": 64, "score": 81789.68082513992 }, { "content": "fn def_shift_left<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefShiftLeft := 0x79 Operand ShiftCount Target\n\n * Operand := TermArg => Integer\n\n * ShiftCount := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_SHIFT_LEFT)\n\n .then(comment_scope(DebugVerbosity::Scopes, \"DefShiftLeft\", term_arg().then(term_arg()).then(target())))\n\n .map_with_context(|((), ((operand, shift_count), target)), context| {\n\n let operand = try_with_context!(context, operand.as_integer(context));\n\n let shift_count = try_with_context!(context, shift_count.as_integer(context));\n\n let shift_count =\n\n try_with_context!(context, shift_count.try_into().map_err(|_| AmlError::InvalidShiftLeft));\n\n\n\n let result = AmlValue::Integer(try_with_context!(\n\n context,\n\n operand.checked_shl(shift_count).ok_or(AmlError::InvalidShiftLeft)\n\n ));\n\n\n\n try_with_context!(context, context.store(target, result.clone()));\n\n (Ok(result), context)\n\n })\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 65, "score": 81789.68082513992 }, { "content": "pub fn def_create_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefCreateField := ExtOpPrefix 0x13 SourceBuf BitIndex NumBits NameString\n\n * SourceBuf := TermArg => Buffer\n\n * BitIndex := TermArg => Integer\n\n * NumBits := TermArg => Integer\n\n */\n\n ext_opcode(opcode::EXT_DEF_CREATE_FIELD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefCreateField\",\n\n term_arg().then(term_arg()).then(term_arg()).then(name_string()).map_with_context(\n\n |(((source, index), num_bits), name), context| {\n\n let source_data: Arc<spinning_top::Spinlock<Vec<u8>>> =\n\n try_with_context!(context, source.as_buffer(context)).clone();\n\n let index = try_with_context!(context, index.as_integer(context));\n\n let num_bits = try_with_context!(context, num_bits.as_integer(context));\n", "file_path": "aml/src/term_object.rs", "rank": 66, "score": 81789.68082513992 }, { "content": "fn def_shift_right<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefShiftRight := 0x7a Operand ShiftCount Target\n\n * Operand := TermArg => Integer\n\n * ShiftCount := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_SHIFT_RIGHT)\n\n .then(comment_scope(DebugVerbosity::Scopes, \"DefShiftRight\", term_arg().then(term_arg()).then(target())))\n\n .map_with_context(|((), ((operand, shift_count), target)), context| {\n\n let operand = try_with_context!(context, operand.as_integer(context));\n\n let shift_count = try_with_context!(context, shift_count.as_integer(context));\n\n let shift_count =\n\n try_with_context!(context, shift_count.try_into().map_err(|_| AmlError::InvalidShiftRight));\n\n\n\n let result = AmlValue::Integer(try_with_context!(\n\n context,\n\n operand.checked_shr(shift_count).ok_or(AmlError::InvalidShiftRight)\n\n ));\n\n\n\n try_with_context!(context, context.store(target, result.clone()));\n\n (Ok(result), context)\n\n })\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 67, "score": 81789.68082513992 }, { "content": "pub fn def_thermal_zone<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefThermalZone := ExtOpPrefix 0x85 PkgLength NameString TermList\n\n * TODO: we use this pattern a lot (move into scope, parse a term_list, move back out). Could we simplify into\n\n * just a `feed` by passing a scope into term_list?\n\n */\n\n ext_opcode(opcode::EXT_DEF_THERMAL_ZONE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::Scopes,\n\n \"DefThermalZone\",\n\n pkg_length()\n\n .then(name_string())\n\n .map_with_context(|(pkg_length, name), context| {\n\n let resolved_name = try_with_context!(context, name.resolve(&context.current_scope));\n\n try_with_context!(\n\n context,\n\n context.namespace.add_value(resolved_name.clone(), AmlValue::ThermalZone)\n", "file_path": "aml/src/term_object.rs", "rank": 68, "score": 81789.68082513992 }, { "content": "pub fn def_op_region<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefOpRegion := ExtOpPrefix 0x80 NameString RegionSpace RegionOffset RegionLen\n\n * RegionSpace := ByteData (where 0x00 = SystemMemory\n\n * 0x01 = SystemIO\n\n * 0x02 = PciConfig\n\n * 0x03 = EmbeddedControl\n\n * 0x04 = SMBus\n\n * 0x05 = SystemCMOS\n\n * 0x06 = PciBarTarget\n\n * 0x07 = IPMI\n\n * 0x08 = GeneralPurposeIO\n\n * 0x09 = GenericSerialBus\n\n * 0x80-0xff = OEM Defined)\n\n * ByteData := 0x00 - 0xff\n\n * RegionOffset := TermArg => Integer\n\n * RegionLen := TermArg => Integer\n", "file_path": "aml/src/term_object.rs", "rank": 69, "score": 81789.68082513992 }, { "content": "fn def_l_less_equal<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLLessEqual := LNotOp(0x92) LGreaterOp(0x94) Operand Operand\n\n */\n\n opcode(opcode::DEF_L_NOT_OP)\n\n .then(opcode(opcode::DEF_L_GREATER_OP))\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLLessEqual\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let ord = try_with_context!(context, left_arg.cmp(right_arg, context));\n\n (Ok(AmlValue::Boolean(ord != Ordering::Greater)), context)\n\n }),\n\n ))\n\n .map(|(((), ()), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 70, "score": 81789.68082513992 }, { "content": "fn def_l_greater_equal<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefLGreaterEqual := LNotOp(0x92) LLessOp(0x95) Operand Operand\n\n */\n\n opcode(opcode::DEF_L_NOT_OP)\n\n .then(opcode(opcode::DEF_L_LESS_OP))\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefLGreaterEqual\",\n\n term_arg().then(term_arg()).map_with_context(|(left_arg, right_arg), context| {\n\n let ord = try_with_context!(context, left_arg.cmp(right_arg, context));\n\n (Ok(AmlValue::Boolean(ord != Ordering::Less)), context)\n\n }),\n\n ))\n\n .map(|(((), ()), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 71, "score": 81789.68082513992 }, { "content": "pub fn expression_opcode<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * ExpressionOpcode := DefAquire | DefAdd | DefAnd | DefBuffer | DefConcat | DefConcatRes |\n\n * DefCondRefOf | DefCopyObject | DefDecrement | DefDerefOf | DefDivide |\n\n * DefFindSetLeftBit | DefFindSetRightBit | DefFromBCD | DefIncrement | DefIndex |\n\n * DefLAnd | DefLEqual | DefLGreater | DefLGreaterEqual | DefLLess | DefLLessEqual |\n\n * DefMid | DefLNot | DefLNotEqual | DefLoad | DefLoadTable | DefLOr | DefMatch | DefMod |\n\n * DefMultiply | DefNAnd | DefNOr | DefNot | DefObjectType | DefOr | DefPackage |\n\n * DefVarPackage | DefRefOf | DefShiftLeft | DefShiftRight | DefSizeOf | DefStore |\n\n * DefSubtract | DefTimer | DefToBCD | DefToBuffer | DefToDecimalString |\n\n * DefToHexString | DefToInteger | DefToString | DefWait | DefXOr | MethodInvocation\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"ExpressionOpcode\",\n\n choice!(\n\n def_add(),\n", "file_path": "aml/src/expression.rs", "rank": 72, "score": 80550.60426829182 }, { "content": "pub fn def_concat<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefConcat := 0x73 Data Data Target\n\n * Data := TermArg => ComputationalData\n\n */\n\n opcode(opcode::DEF_CONCAT_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefConcat\",\n\n term_arg().then(term_arg()).then(target()).map_with_context(|((left, right), target), context| {\n\n let result = match left.as_concat_type() {\n\n AmlValue::Integer(left) => {\n\n let right = try_with_context!(context, right.as_integer(context));\n\n\n\n let mut buffer = Vec::with_capacity(mem::size_of::<u64>() * 2);\n\n buffer.extend_from_slice(&left.to_le_bytes());\n\n buffer.extend_from_slice(&right.to_le_bytes());\n", "file_path": "aml/src/expression.rs", "rank": 73, "score": 80550.60426829182 }, { "content": "pub fn package_element<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n choice!(data_ref_object(), name_string().map(|string| Ok(AmlValue::String(string.as_string()))))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 74, "score": 80550.60426829182 }, { "content": "pub fn simple_name<'a, 'c>() -> impl Parser<'a, 'c, Target>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * SimpleName := NameString | ArgObj | LocalObj\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"SimpleName\",\n\n choice!(\n\n arg_obj().map(|arg_num| Ok(Target::Arg(arg_num))),\n\n local_obj().map(|local_num| Ok(Target::Local(local_num))),\n\n name_string().map(move |name| Ok(Target::Name(name)))\n\n ),\n\n )\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 75, "score": 80550.60426829182 }, { "content": "pub fn def_buffer<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefBuffer := 0x11 PkgLength BufferSize ByteList\n\n * BufferSize := TermArg => Integer\n\n *\n\n * XXX: The spec says that zero-length buffers (e.g. the PkgLength is 0) are illegal, but\n\n * we've encountered them in QEMU-generated tables, so we return an empty buffer in these\n\n * cases.\n\n *\n\n * Uninitialized elements are initialized to zero.\n\n */\n\n opcode(opcode::DEF_BUFFER_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefBuffer\",\n\n pkg_length().then(term_arg()).feed(|(pkg_length, buffer_size)| {\n\n take_to_end_of_pkglength(pkg_length).map_with_context(move |bytes, context| {\n", "file_path": "aml/src/expression.rs", "rank": 76, "score": 80550.60426829182 }, { "content": "pub fn arg_obj<'a, 'c>() -> impl Parser<'a, 'c, ArgNum>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * ArgObj := Arg0Op | Arg1Op | Arg2Op | Arg3Op | Arg4Op | Arg5Op | Arg6Op\n\n * Arg0Op = 0x68\n\n * Arg1Op = 0x69\n\n * Arg2Op = 0x6a\n\n * Arg3Op = 0x6b\n\n * Arg4Op = 0x6c\n\n * Arg5Op = 0x6d\n\n * Arg6Op = 0x6e\n\n */\n\n let arg_parser = |i, arg_opcode| {\n\n opcode(arg_opcode).then(comment_scope(DebugVerbosity::AllScopes, \"ArgObj\", id())).map(move |((), _)| Ok(i))\n\n };\n\n\n\n choice!(\n\n arg_parser(0, opcode::ARG0_OP),\n\n arg_parser(1, opcode::ARG1_OP),\n\n arg_parser(2, opcode::ARG2_OP),\n\n arg_parser(3, opcode::ARG3_OP),\n\n arg_parser(4, opcode::ARG4_OP),\n\n arg_parser(5, opcode::ARG5_OP),\n\n arg_parser(6, opcode::ARG6_OP)\n\n )\n\n}\n", "file_path": "aml/src/misc.rs", "rank": 77, "score": 80550.60426829182 }, { "content": "pub fn super_name<'a, 'c>() -> impl Parser<'a, 'c, Target>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * SuperName := SimpleName | DebugObj | ReferenceTypeOpcode\n\n * TODO: this doesn't cover ReferenceTypeOpcode yet\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"SuperName\",\n\n choice!(debug_obj().map(|()| Ok(Target::Debug)), simple_name()),\n\n )\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 78, "score": 80550.60426829182 }, { "content": "pub fn def_add<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefAdd := 0x72 Operand Operand Target\n\n * Operand := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_ADD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefAdd\",\n\n term_arg().then(term_arg()).then(target()).map_with_context(\n\n |((left_arg, right_arg), target), context| {\n\n let left = try_with_context!(context, left_arg.as_integer(context));\n\n let right = try_with_context!(context, right_arg.as_integer(context));\n\n let result = AmlValue::Integer(left.wrapping_add(right));\n\n\n\n try_with_context!(context, context.store(target, result.clone()));\n\n (Ok(result), context)\n\n },\n\n ),\n\n ))\n\n .map(|((), result)| Ok(result))\n\n}\n\n\n", "file_path": "aml/src/expression.rs", "rank": 79, "score": 80550.60426829182 }, { "content": "pub fn def_package<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefPackage := 0x12 PkgLength NumElements PackageElementList\n\n * NumElements := ByteData\n\n * PackageElementList := Nothing | <PackageElement PackageElementList>\n\n * PackageElement := DataRefObject | NameString\n\n */\n\n opcode(opcode::DEF_PACKAGE_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefPackage\",\n\n pkg_length().then(take()).feed(|(pkg_length, num_elements)| {\n\n move |mut input, mut context| {\n\n let mut package_contents = Vec::new();\n\n\n\n while pkg_length.still_parsing(input) {\n\n let (new_input, new_context, value) = package_element().parse(input, context)?;\n", "file_path": "aml/src/expression.rs", "rank": 80, "score": 80550.60426829182 }, { "content": "pub fn local_obj<'a, 'c>() -> impl Parser<'a, 'c, LocalNum>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * LocalObj := Local0Op | Local1Op | Local2Op | Local3Op | Local4Op | Local5Op | Local6Op | Local7Op\n\n * Local0Op := 0x60\n\n * Local1Op := 0x61\n\n * Local2Op := 0x62\n\n * Local3Op := 0x63\n\n * Local4Op := 0x64\n\n * Local5Op := 0x65\n\n * Local6Op := 0x66\n\n * Local7Op := 0x67\n\n */\n\n let local_parser = |i, local_opcode| {\n\n opcode(local_opcode)\n\n .then(comment_scope(DebugVerbosity::AllScopes, \"LocalObj\", id()))\n\n .map(move |((), _)| Ok(i))\n\n };\n", "file_path": "aml/src/misc.rs", "rank": 81, "score": 80550.60426829182 }, { "content": "pub fn def_create_byte_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefCreateByteField := 0x8c SourceBuf ByteIndex NameString\n\n * SourceBuf := TermArg => Buffer\n\n * ByteIndex := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_CREATE_BYTE_FIELD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefCreateByteField\",\n\n term_arg().then(term_arg()).then(name_string()).map_with_context(\n\n |((source, index), name), context| {\n\n let source_data: Arc<spinning_top::Spinlock<Vec<u8>>> =\n\n try_with_context!(context, source.as_buffer(context)).clone();\n\n let index = try_with_context!(context, index.as_integer(context));\n\n\n\n try_with_context!(\n", "file_path": "aml/src/term_object.rs", "rank": 82, "score": 79966.37666330002 }, { "content": "pub fn def_create_dword_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefCreateDWordField := 0x8a SourceBuf ByteIndex NameString\n\n * SourceBuf := TermArg => Buffer\n\n * ByteIndex := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_CREATE_DWORD_FIELD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefCreateDWordField\",\n\n term_arg().then(term_arg()).then(name_string()).map_with_context(\n\n |((source, index), name), context| {\n\n let source_data: Arc<spinning_top::Spinlock<Vec<u8>>> =\n\n try_with_context!(context, source.as_buffer(context)).clone();\n\n let index = try_with_context!(context, index.as_integer(context));\n\n\n\n try_with_context!(\n", "file_path": "aml/src/term_object.rs", "rank": 83, "score": 79966.37666330002 }, { "content": "pub fn def_create_bit_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefCreateBitField := 0x8d SourceBuf BitIndex NameString\n\n * SourceBuf := TermArg => Buffer\n\n * BitIndex := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_CREATE_BIT_FIELD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefCreateBitField\",\n\n term_arg().then(term_arg()).then(name_string()).map_with_context(\n\n |((source, index), name), context| {\n\n let source_data: Arc<spinning_top::Spinlock<Vec<u8>>> =\n\n try_with_context!(context, source.as_buffer(context)).clone();\n\n let index = try_with_context!(context, index.as_integer(context));\n\n\n\n try_with_context!(\n", "file_path": "aml/src/term_object.rs", "rank": 84, "score": 79966.37666330002 }, { "content": "pub fn def_create_qword_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefCreateQWordField := 0x8f SourceBuf ByteIndex NameString\n\n * SourceBuf := TermArg => Buffer\n\n * ByteIndex := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_CREATE_QWORD_FIELD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefCreateQWordField\",\n\n term_arg().then(term_arg()).then(name_string()).map_with_context(\n\n |((source, index), name), context| {\n\n let source_data: Arc<spinning_top::Spinlock<Vec<u8>>> =\n\n try_with_context!(context, source.as_buffer(context)).clone();\n\n let index = try_with_context!(context, index.as_integer(context));\n\n\n\n try_with_context!(\n", "file_path": "aml/src/term_object.rs", "rank": 85, "score": 79966.37666330002 }, { "content": "pub fn def_create_word_field<'a, 'c>() -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefCreateWordField := 0x8b SourceBuf ByteIndex NameString\n\n * SourceBuf := TermArg => Buffer\n\n * ByteIndex := TermArg => Integer\n\n */\n\n opcode(opcode::DEF_CREATE_WORD_FIELD_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefCreateWordField\",\n\n term_arg().then(term_arg()).then(name_string()).map_with_context(\n\n |((source, index), name), context| {\n\n let source_data: Arc<spinning_top::Spinlock<Vec<u8>>> =\n\n try_with_context!(context, source.as_buffer(context)).clone();\n\n let index = try_with_context!(context, index.as_integer(context));\n\n\n\n try_with_context!(\n", "file_path": "aml/src/term_object.rs", "rank": 86, "score": 79966.37666330002 }, { "content": "pub fn name_string<'a, 'c>() -> impl Parser<'a, 'c, AmlName>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * NameString := <RootChar('\\') NamePath> | <PrefixPath NamePath>\n\n * PrefixPath := Nothing | <'^' PrefixPath>\n\n */\n\n let root_name_string = opcode(ROOT_CHAR).then(name_path()).map(|((), ref name_path)| {\n\n let mut name = alloc::vec![NameComponent::Root];\n\n name.extend_from_slice(name_path);\n\n Ok(AmlName::from_components(name))\n\n });\n\n\n\n let prefix_path =\n\n take_while(opcode(PREFIX_CHAR)).then(name_path()).map(|(num_prefix_chars, ref name_path)| {\n\n let mut name = alloc::vec![NameComponent::Prefix; num_prefix_chars];\n\n name.extend_from_slice(name_path);\n\n Ok(AmlName::from_components(name))\n\n });\n", "file_path": "aml/src/name_object.rs", "rank": 87, "score": 78727.30010645193 }, { "content": "pub fn term_arg<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * TermArg := ExpressionOpcode | DataObject | ArgObj | LocalObj\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"TermArg\",\n\n choice!(\n\n data_object(),\n\n arg_obj().map_with_context(|arg_num, context| {\n\n (Ok(try_with_context!(context, context.current_arg(arg_num)).clone()), context)\n\n }),\n\n local_obj().map_with_context(|local_num, context| {\n\n (Ok(try_with_context!(context, context.local(local_num)).clone()), context)\n\n }),\n\n expression_opcode()\n\n ),\n\n )\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 88, "score": 78727.30010645193 }, { "content": "pub fn name_seg<'a, 'c>() -> impl Parser<'a, 'c, NameSeg>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * NameSeg := <LeadNameChar NameChar NameChar NameChar>\n\n */\n\n // TODO: can we write this better?\n\n move |input, context: &'c mut AmlContext| {\n\n let (input, context, char_1) = consume(is_lead_name_char).parse(input, context)?;\n\n let (input, context, char_2) = consume(is_name_char).parse(input, context)?;\n\n let (input, context, char_3) = consume(is_name_char).parse(input, context)?;\n\n let (input, context, char_4) = consume(is_name_char).parse(input, context)?;\n\n Ok((input, context, NameSeg([char_1, char_2, char_3, char_4])))\n\n }\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 89, "score": 78727.30010645193 }, { "content": "pub fn def_concat_res<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DefConcatRes := 0x84 BufData BufData Target\n\n * BufData := TermArg => Buffer\n\n */\n\n opcode(opcode::DEF_CONCAT_RES_OP)\n\n .then(comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"DefConcatRes\",\n\n term_arg().then(term_arg()).then(target()).map_with_context(|((left, right), target), context| {\n\n let left = try_with_context!(context, left.as_buffer(context));\n\n let right = try_with_context!(context, right.as_buffer(context));\n\n\n\n let left_len = left.lock().len();\n\n let right_len = right.lock().len();\n\n\n\n if left_len == 1 || right_len == 1 {\n", "file_path": "aml/src/expression.rs", "rank": 90, "score": 78727.30010645193 }, { "content": "pub fn computational_data<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * ComputationalData := ByteConst | WordConst | DWordConst | QWordConst | String |\n\n * ConstObj | RevisionOp | DefBuffer\n\n * ByteConst := 0x0a ByteData\n\n * WordConst := 0x0b WordData\n\n * DWordConst := 0x0c DWordData\n\n * QWordConst := 0x0e QWordData\n\n * String := 0x0d AsciiCharList NullChar\n\n * ConstObj := ZeroOp(0x00) | OneOp(0x01) | OnesOp(0xff)\n\n * RevisionOp := ExtOpPrefix(0x5b) 0x30\n\n */\n\n let const_parser = |input: &'a [u8], context: &'c mut AmlContext| {\n\n let string_parser = |input: &'a [u8], context| -> ParseResult<'a, 'c, AmlValue> {\n\n /*\n\n * Using `position` isn't very efficient here, but is probably fine because the\n\n * strings are usually quite short.\n", "file_path": "aml/src/term_object.rs", "rank": 91, "score": 78727.30010645193 }, { "content": "pub fn pkg_length<'a, 'c>() -> impl Parser<'a, 'c, PkgLength>\n\nwhere\n\n 'c: 'a,\n\n{\n\n move |input: &'a [u8], context: &'c mut AmlContext| -> crate::parser::ParseResult<'a, 'c, PkgLength> {\n\n let (new_input, context, raw_length) = raw_pkg_length().parse(input, context)?;\n\n\n\n /*\n\n * NOTE: we use the original input here, because `raw_length` includes the length of the\n\n * `PkgLength`.\n\n */\n\n match PkgLength::from_raw_length(input, raw_length) {\n\n Ok(pkg_length) => Ok((new_input, context, pkg_length)),\n\n Err(err) => Err((input, context, Propagate::Err(err))),\n\n }\n\n }\n\n}\n\n\n", "file_path": "aml/src/pkg_length.rs", "rank": 92, "score": 78727.30010645193 }, { "content": "pub fn data_object<'a, 'c>() -> impl Parser<'a, 'c, AmlValue>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DataObject := DefPackage | DefVarPackage | ComputationalData\n\n *\n\n * The order of the parsers are important here, as DefPackage and DefVarPackage can be\n\n * accidently parsed as ComputationalDatas.\n\n */\n\n // TODO: this doesn't yet parse DefVarPackage\n\n comment_scope(DebugVerbosity::AllScopes, \"DataObject\", choice!(def_package(), computational_data()))\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 93, "score": 78727.30010645193 }, { "content": "pub fn term_object<'a, 'c>() -> impl Parser<'a, 'c, Option<AmlValue>>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * TermObj := NamespaceModifierObj | NamedObj | StatementOpcode | ExpressionOpcode\n\n */\n\n comment_scope(\n\n DebugVerbosity::AllScopes,\n\n \"TermObj\",\n\n choice!(\n\n namespace_modifier().map(|()| Ok(None)),\n\n named_obj().map(|()| Ok(None)),\n\n statement_opcode().map(|()| Ok(None)),\n\n expression_opcode().map(|value| Ok(Some(value)))\n\n ),\n\n )\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 94, "score": 75887.77297592435 }, { "content": "pub fn name_path<'a, 'c>() -> impl Parser<'a, 'c, Vec<NameComponent>>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * NamePath := NullName | DualNamePath | MultiNamePath | NameSeg\n\n */\n\n choice!(\n\n null_name(),\n\n dual_name_path(),\n\n multi_name_path(),\n\n name_seg().map(|seg| Ok(alloc::vec![NameComponent::Segment(seg)]))\n\n )\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 95, "score": 75887.77297592435 }, { "content": "pub fn null_name<'a, 'c>() -> impl Parser<'a, 'c, Vec<NameComponent>>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * NullName := 0x00\n\n */\n\n opcode(NULL_NAME).map(|_| Ok(Vec::with_capacity(0)))\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 96, "score": 75887.77297592435 }, { "content": "/// `TermList`s are usually found within explicit-length objects (so they have a `PkgLength`\n\n/// elsewhere in the structure), so this takes a number of bytes to parse.\n\npub fn term_list<'a, 'c>(list_length: PkgLength) -> impl Parser<'a, 'c, ()>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * TermList := Nothing | <TermObj TermList>\n\n */\n\n // TODO: why does this use still_parsing, instead of just taking the whole thing and parsing it til it's empty?\n\n move |mut input: &'a [u8], mut context: &'c mut AmlContext| {\n\n while list_length.still_parsing(input) {\n\n // TODO: currently, we ignore the value of the expression. We may need to propagate\n\n // this.\n\n let (new_input, new_context, _) = term_object().parse(input, context)?;\n\n input = new_input;\n\n context = new_context;\n\n }\n\n\n\n Ok((input, context, ()))\n\n }\n\n}\n\n\n", "file_path": "aml/src/term_object.rs", "rank": 97, "score": 74297.82146665655 }, { "content": "pub fn multi_name_path<'a, 'c>() -> impl Parser<'a, 'c, Vec<NameComponent>>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * MultiNamePath := 0x2f ByteData{SegCount} NameSeg(SegCount)\n\n */\n\n move |input, context| {\n\n let (new_input, context, ((), seg_count)) =\n\n opcode(MULTI_NAME_PREFIX).then(take()).parse(input, context)?;\n\n match n_of(name_seg(), usize::from(seg_count)).parse(new_input, context) {\n\n Ok((new_input, context, name_segs)) => {\n\n Ok((new_input, context, name_segs.iter().map(|&seg| NameComponent::Segment(seg)).collect()))\n\n }\n\n // Correct returned input to the one we haven't touched\n\n Err((_, context, err)) => Err((input, context, err)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 98, "score": 74297.82146665655 }, { "content": "pub fn dual_name_path<'a, 'c>() -> impl Parser<'a, 'c, Vec<NameComponent>>\n\nwhere\n\n 'c: 'a,\n\n{\n\n /*\n\n * DualNamePath := 0x2e NameSeg NameSeg\n\n */\n\n opcode(DUAL_NAME_PREFIX).then(name_seg()).then(name_seg()).map(|(((), first), second)| {\n\n Ok(alloc::vec![NameComponent::Segment(first), NameComponent::Segment(second)])\n\n })\n\n}\n\n\n", "file_path": "aml/src/name_object.rs", "rank": 99, "score": 74297.82146665655 } ]
Rust
src/setting_serial.rs
opendoor-labs/libnm-rs
ddd1c6c4599fb9d4c62f44f5eb17a0286eeabe1a
use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::Value; use glib_sys; use gobject_sys; use nm_sys; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; use Setting; use SettingSerialParity; glib_wrapper! { pub struct SettingSerial(Object<nm_sys::NMSettingSerial, nm_sys::NMSettingSerialClass, SettingSerialClass>) @extends Setting; match fn { get_type => || nm_sys::nm_setting_serial_get_type(), } } impl SettingSerial { pub fn new() -> SettingSerial { unsafe { Setting::from_glib_full(nm_sys::nm_setting_serial_new()).unsafe_cast() } } } impl Default for SettingSerial { fn default() -> Self { Self::new() } } pub const NONE_SETTING_SERIAL: Option<&SettingSerial> = None; pub trait SettingSerialExt: 'static { fn get_baud(&self) -> u32; fn get_bits(&self) -> u32; fn get_parity(&self) -> SettingSerialParity; fn get_send_delay(&self) -> u64; fn get_stopbits(&self) -> u32; fn set_property_baud(&self, baud: u32); fn set_property_bits(&self, bits: u32); fn set_property_parity(&self, parity: SettingSerialParity); fn set_property_send_delay(&self, send_delay: u64); fn set_property_stopbits(&self, stopbits: u32); fn connect_property_baud_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_bits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_parity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_send_delay_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_stopbits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<SettingSerial>> SettingSerialExt for O { fn get_baud(&self) -> u32 { unsafe { nm_sys::nm_setting_serial_get_baud(self.as_ref().to_glib_none().0) } } fn get_bits(&self) -> u32 { unsafe { nm_sys::nm_setting_serial_get_bits(self.as_ref().to_glib_none().0) } } fn get_parity(&self) -> SettingSerialParity { unsafe { from_glib(nm_sys::nm_setting_serial_get_parity( self.as_ref().to_glib_none().0, )) } } fn get_send_delay(&self) -> u64 { unsafe { nm_sys::nm_setting_serial_get_send_delay(self.as_ref().to_glib_none().0) } } fn get_stopbits(&self) -> u32 { unsafe { nm_sys::nm_setting_serial_get_stopbits(self.as_ref().to_glib_none().0) } } fn set_property_baud(&self, baud: u32) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"baud\0".as_ptr() as *const _, Value::from(&baud).to_glib_none().0, ); } } fn set_property_bits(&self, bits: u32) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"bits\0".as_ptr() as *const _, Value::from(&bits).to_glib_none().0, ); } } fn set_property_parity(&self, parity: SettingSerialParity) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"parity\0".as_ptr() as *const _, Value::from(&parity).to_glib_none().0, ); } } fn set_property_send_delay(&self, send_delay: u64) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"send-delay\0".as_ptr() as *const _, Value::from(&send_delay).to_glib_none().0, ); } } fn set_property_stopbits(&self, stopbits: u32) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"stopbits\0".as_ptr() as *const _, Value::from(&stopbits).to_glib_none().0, ); } } fn connect_property_baud_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_baud_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::baud\0".as_ptr() as *const _, Some(transmute(notify_baud_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_bits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_bits_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::bits\0".as_ptr() as *const _, Some(transmute(notify_bits_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_parity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_parity_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::parity\0".as_ptr() as *const _, Some(transmute(notify_parity_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_send_delay_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_send_delay_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::send-delay\0".as_ptr() as *const _, Some(transmute(notify_send_delay_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_stopbits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_stopbits_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::stopbits\0".as_ptr() as *const _, Some(transmute(notify_stopbits_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } } impl fmt::Display for SettingSerial { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "SettingSerial") } }
use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::Value; use glib_sys; use gobject_sys; use nm_sys; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; use Setting; use SettingSerialParity; glib_wrapper! { pub struct SettingSerial(Object<nm_sys::NMSettingSerial, nm_sys::NMSettingSerialClass, SettingSerialClass>) @extends Setting; match fn { get_type => || nm_sys::nm_setting_serial_get_type(), } } impl SettingSerial { pub fn new() -> SettingSerial { unsafe { Setting::from_glib_full(nm_sys::nm_setting_serial_new()).unsafe_cast() } } } impl Default for SettingSerial { fn default() -> Self { Self::new() } } pub const NONE_SETTING_SERIAL: Option<&SettingSerial> = None; pub trait SettingSerialExt: 'static { fn get_baud(&self) -> u32; fn get_bits(&self) -> u32; fn get_parity(&self) -> SettingSerialParity; fn get_send_delay(&self) -> u64; fn get_stopbits(&self) -> u32; fn set_property_baud(&self, baud: u32); fn set_property_bits(&self, bits: u32); fn set_property_parity(&self, parity: SettingSerialParity); fn set_property_send_delay(&self, send_delay: u64); fn set_property_stopbits(&self, stopbits: u32); fn connect_property_baud_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_bits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_parity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_send_delay_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_stopbits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<SettingSerial>> SettingSerialExt for O { fn get_baud(&self) -> u32 { unsafe { nm_sys::nm_setting_serial_get_baud(self.as_ref().to_glib_none().0) } } fn get_bits(&self) -> u32 { unsafe { nm_sys::nm_setting_serial_get_bits(self.as_ref().to_glib_none().0) } } fn get_parity(&self) -> SettingSerialParity { unsafe { from_glib(nm_sys::nm_setting_serial_get_parity( self.as_ref().to_glib_none().0, )) } } fn get_send_delay(&self) -> u64 { unsafe { nm_sys::nm_setting_serial_get_send_delay(self.as_ref().to_glib_none().0) } } fn get_stopbits(&self) -> u32 { unsafe { nm_sys::nm_setting_serial_get_stopbits(self.as_ref().to_glib_none().0) } } fn set_property_baud(&self, baud: u32) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"baud\0".as_ptr() as *const _, Value::from(&baud).to_glib_none().0, ); } } fn set_property_bits(&self, bits: u32) { unsafe { gobject_sys::g_object_set_property(
fn set_property_parity(&self, parity: SettingSerialParity) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"parity\0".as_ptr() as *const _, Value::from(&parity).to_glib_none().0, ); } } fn set_property_send_delay(&self, send_delay: u64) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"send-delay\0".as_ptr() as *const _, Value::from(&send_delay).to_glib_none().0, ); } } fn set_property_stopbits(&self, stopbits: u32) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"stopbits\0".as_ptr() as *const _, Value::from(&stopbits).to_glib_none().0, ); } } fn connect_property_baud_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_baud_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::baud\0".as_ptr() as *const _, Some(transmute(notify_baud_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_bits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_bits_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::bits\0".as_ptr() as *const _, Some(transmute(notify_bits_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_parity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_parity_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::parity\0".as_ptr() as *const _, Some(transmute(notify_parity_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_send_delay_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_send_delay_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::send-delay\0".as_ptr() as *const _, Some(transmute(notify_send_delay_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_stopbits_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_stopbits_trampoline<P, F: Fn(&P) + 'static>( this: *mut nm_sys::NMSettingSerial, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<SettingSerial>, { let f: &F = &*(f as *const F); f(&SettingSerial::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::stopbits\0".as_ptr() as *const _, Some(transmute(notify_stopbits_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } } impl fmt::Display for SettingSerial { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "SettingSerial") } }
self.to_glib_none().0 as *mut gobject_sys::GObject, b"bits\0".as_ptr() as *const _, Value::from(&bits).to_glib_none().0, ); } }
function_block-function_prefix_line
[ { "content": "pub trait SettingExt: 'static {\n\n fn compare<P: IsA<Setting>>(&self, b: &P, flags: SettingCompareFlags) -> bool;\n\n\n\n //fn diff<P: IsA<Setting>>(&self, b: &P, flags: SettingCompareFlags, invert_results: bool, results: /*Unknown conversion*//*Unimplemented*/HashTable TypeId { ns_id: 0, id: 28 }/TypeId { ns_id: 0, id: 7 }) -> bool;\n\n\n\n fn duplicate(&self) -> Option<Setting>;\n\n\n\n //fn enumerate_values(&self, func: /*Unimplemented*/FnMut(&Setting, &str, /*Ignored*/glib::Value, /*Ignored*/glib::ParamFlags), user_data: /*Unimplemented*/Option<Fundamental: Pointer>);\n\n\n\n fn get_dbus_property_type(&self, property_name: &str) -> Option<glib::VariantType>;\n\n\n\n fn get_name(&self) -> Option<GString>;\n\n\n\n fn set_secret_flags(\n\n &self,\n\n secret_name: &str,\n\n flags: SettingSecretFlags,\n\n ) -> Result<(), glib::Error>;\n\n\n\n fn to_string(&self) -> GString;\n", "file_path": "src/setting.rs", "rank": 1, "score": 262620.30565425224 }, { "content": "pub trait SettingVpnExt: 'static {\n\n fn add_data_item(&self, key: &str, item: &str);\n\n\n\n fn add_secret(&self, key: &str, secret: &str);\n\n\n\n fn foreach_data_item<P: FnMut(&str, &str)>(&self, func: P);\n\n\n\n fn foreach_secret<P: FnMut(&str, &str)>(&self, func: P);\n\n\n\n fn get_data_item(&self, key: &str) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_data_keys(&self) -> Vec<GString>;\n\n\n\n fn get_num_data_items(&self) -> u32;\n\n\n\n fn get_num_secrets(&self) -> u32;\n\n\n\n fn get_persistent(&self) -> bool;\n\n\n", "file_path": "src/setting_vpn.rs", "rank": 2, "score": 252130.38571050842 }, { "content": "pub trait SettingDcbExt: 'static {\n\n fn get_app_fcoe_flags(&self) -> SettingDcbFlags;\n\n\n\n fn get_app_fcoe_mode(&self) -> Option<GString>;\n\n\n\n fn get_app_fcoe_priority(&self) -> i32;\n\n\n\n fn get_app_fip_flags(&self) -> SettingDcbFlags;\n\n\n\n fn get_app_fip_priority(&self) -> i32;\n\n\n\n fn get_app_iscsi_flags(&self) -> SettingDcbFlags;\n\n\n\n fn get_app_iscsi_priority(&self) -> i32;\n\n\n\n fn get_priority_bandwidth(&self, user_priority: u32) -> u32;\n\n\n\n fn get_priority_flow_control(&self, user_priority: u32) -> bool;\n\n\n\n fn get_priority_flow_control_flags(&self) -> SettingDcbFlags;\n", "file_path": "src/setting_dcb.rs", "rank": 3, "score": 252130.38571050842 }, { "content": "pub trait SettingGsmExt: 'static {\n\n fn get_apn(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_22\", feature = \"dox\"))]\n\n fn get_auto_config(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_device_id(&self) -> Option<GString>;\n\n\n\n fn get_home_only(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_8\", feature = \"dox\"))]\n\n fn get_mtu(&self) -> u32;\n\n\n\n fn get_network_id(&self) -> Option<GString>;\n\n\n\n #[cfg_attr(feature = \"v1_16\", deprecated)]\n\n fn get_number(&self) -> Option<GString>;\n\n\n\n fn get_password(&self) -> Option<GString>;\n", "file_path": "src/setting_gsm.rs", "rank": 4, "score": 252130.38571050842 }, { "content": "pub trait SettingVlanExt: 'static {\n\n fn add_priority(&self, map: VlanPriorityMap, from: u32, to: u32) -> bool;\n\n\n\n fn add_priority_str(&self, map: VlanPriorityMap, str: &str) -> bool;\n\n\n\n fn clear_priorities(&self, map: VlanPriorityMap);\n\n\n\n fn get_flags(&self) -> u32;\n\n\n\n fn get_id(&self) -> u32;\n\n\n\n fn get_num_priorities(&self, map: VlanPriorityMap) -> i32;\n\n\n\n fn get_parent(&self) -> Option<GString>;\n\n\n\n fn get_priority(&self, map: VlanPriorityMap, idx: u32) -> Option<(u32, u32)>;\n\n\n\n fn remove_priority(&self, map: VlanPriorityMap, idx: u32);\n\n\n\n fn remove_priority_by_value(&self, map: VlanPriorityMap, from: u32, to: u32) -> bool;\n", "file_path": "src/setting_vlan.rs", "rank": 5, "score": 252130.38571050842 }, { "content": "pub trait SettingVxlanExt: 'static {\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_ageing(&self) -> u32;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_destination_port(&self) -> u32;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_id(&self) -> u32;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_l2_miss(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_l3_miss(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_learning(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n", "file_path": "src/setting_vxlan.rs", "rank": 6, "score": 252130.38571050842 }, { "content": "pub trait SettingBridgeExt: 'static {\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn add_vlan(&self, vlan: &BridgeVlan);\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn clear_vlans(&self);\n\n\n\n fn get_ageing_time(&self) -> u32;\n\n\n\n fn get_forward_delay(&self) -> u16;\n\n\n\n #[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n fn get_group_forward_mask(&self) -> u16;\n\n\n\n fn get_hello_time(&self) -> u16;\n\n\n\n fn get_mac_address(&self) -> Option<GString>;\n\n\n\n fn get_max_age(&self) -> u16;\n\n\n", "file_path": "src/setting_bridge.rs", "rank": 7, "score": 252130.38571050842 }, { "content": "pub trait SettingMacvlanExt: 'static {\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_mode(&self) -> SettingMacvlanMode;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_parent(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_promiscuous(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_tap(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn set_property_mode(&self, mode: u32);\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn set_property_parent(&self, parent: Option<&str>);\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n", "file_path": "src/setting_macvlan.rs", "rank": 8, "score": 252130.38571050842 }, { "content": "pub trait SettingConnectionExt: 'static {\n\n fn add_permission(&self, ptype: &str, pitem: &str, detail: Option<&str>) -> bool;\n\n\n\n fn add_secondary(&self, sec_uuid: &str) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n fn get_auth_retries(&self) -> i32;\n\n\n\n fn get_autoconnect(&self) -> bool;\n\n\n\n fn get_autoconnect_priority(&self) -> i32;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_autoconnect_retries(&self) -> i32;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_autoconnect_slaves(&self) -> SettingConnectionAutoconnectSlaves;\n\n\n\n fn get_connection_type(&self) -> Option<GString>;\n\n\n", "file_path": "src/setting_connection.rs", "rank": 9, "score": 252130.38571050842 }, { "content": "pub trait SettingBluetoothExt: 'static {\n\n fn get_bdaddr(&self) -> Option<GString>;\n\n\n\n fn get_connection_type(&self) -> Option<GString>;\n\n\n\n fn set_property_bdaddr(&self, bdaddr: Option<&str>);\n\n\n\n fn get_property_type(&self) -> Option<GString>;\n\n\n\n fn set_property_type(&self, type_: Option<&str>);\n\n\n\n fn connect_property_bdaddr_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<SettingBluetooth>> SettingBluetoothExt for O {\n\n fn get_bdaddr(&self) -> Option<GString> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_setting_bluetooth_get_bdaddr(\n", "file_path": "src/setting_bluetooth.rs", "rank": 10, "score": 252130.38571050842 }, { "content": "pub trait SettingCdmaExt: 'static {\n\n #[cfg(any(feature = \"v1_8\", feature = \"dox\"))]\n\n fn get_mtu(&self) -> u32;\n\n\n\n fn get_number(&self) -> Option<GString>;\n\n\n\n fn get_password(&self) -> Option<GString>;\n\n\n\n fn get_password_flags(&self) -> SettingSecretFlags;\n\n\n\n fn get_username(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_8\", feature = \"dox\"))]\n\n fn set_property_mtu(&self, mtu: u32);\n\n\n\n fn set_property_number(&self, number: Option<&str>);\n\n\n\n fn set_property_password(&self, password: Option<&str>);\n\n\n\n fn set_property_password_flags(&self, password_flags: SettingSecretFlags);\n", "file_path": "src/setting_cdma.rs", "rank": 11, "score": 252130.38571050842 }, { "content": "pub trait SettingTunExt: 'static {\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_group(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_mode(&self) -> SettingTunMode;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_multi_queue(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_owner(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_pi(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_vnet_hdr(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n", "file_path": "src/setting_tun.rs", "rank": 12, "score": 252130.38571050842 }, { "content": "pub trait SettingPppoeExt: 'static {\n\n #[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n fn get_parent(&self) -> Option<GString>;\n\n\n\n fn get_password(&self) -> Option<GString>;\n\n\n\n fn get_password_flags(&self) -> SettingSecretFlags;\n\n\n\n fn get_service(&self) -> Option<GString>;\n\n\n\n fn get_username(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n fn set_property_parent(&self, parent: Option<&str>);\n\n\n\n fn set_property_password(&self, password: Option<&str>);\n\n\n\n fn set_property_password_flags(&self, password_flags: SettingSecretFlags);\n\n\n\n fn set_property_service(&self, service: Option<&str>);\n", "file_path": "src/setting_pppoe.rs", "rank": 13, "score": 252130.38571050842 }, { "content": "pub trait SettingWirelessExt: 'static {\n\n fn add_mac_blacklist_item(&self, mac: &str) -> bool;\n\n\n\n fn add_seen_bssid(&self, bssid: &str) -> bool;\n\n\n\n fn ap_security_compatible<P: IsA<SettingWirelessSecurity>>(\n\n &self,\n\n s_wireless_sec: &P,\n\n ap_flags: _80211ApFlags,\n\n ap_wpa: _80211ApSecurityFlags,\n\n ap_rsn: _80211ApSecurityFlags,\n\n ap_mode: _80211Mode,\n\n ) -> bool;\n\n\n\n fn clear_mac_blacklist_items(&self);\n\n\n\n fn get_band(&self) -> Option<GString>;\n\n\n\n fn get_bssid(&self) -> Option<GString>;\n\n\n", "file_path": "src/setting_wireless.rs", "rank": 14, "score": 252130.38571050842 }, { "content": "pub trait SettingPppExt: 'static {\n\n fn get_baud(&self) -> u32;\n\n\n\n fn get_crtscts(&self) -> bool;\n\n\n\n fn get_lcp_echo_failure(&self) -> u32;\n\n\n\n fn get_lcp_echo_interval(&self) -> u32;\n\n\n\n fn get_mppe_stateful(&self) -> bool;\n\n\n\n fn get_mru(&self) -> u32;\n\n\n\n fn get_mtu(&self) -> u32;\n\n\n\n fn get_no_vj_comp(&self) -> bool;\n\n\n\n fn get_noauth(&self) -> bool;\n\n\n\n fn get_nobsdcomp(&self) -> bool;\n", "file_path": "src/setting_ppp.rs", "rank": 15, "score": 252130.38571050842 }, { "content": "pub trait SettingInfinibandExt: 'static {\n\n fn get_mac_address(&self) -> Option<GString>;\n\n\n\n fn get_mtu(&self) -> u32;\n\n\n\n fn get_p_key(&self) -> i32;\n\n\n\n fn get_parent(&self) -> Option<GString>;\n\n\n\n fn get_transport_mode(&self) -> Option<GString>;\n\n\n\n fn get_virtual_interface_name(&self) -> Option<GString>;\n\n\n\n fn set_property_mac_address(&self, mac_address: Option<&str>);\n\n\n\n fn set_property_mtu(&self, mtu: u32);\n\n\n\n fn set_property_p_key(&self, p_key: i32);\n\n\n\n fn set_property_parent(&self, parent: Option<&str>);\n", "file_path": "src/setting_infiniband.rs", "rank": 16, "score": 252130.38571050842 }, { "content": "pub trait SettingAdslExt: 'static {\n\n fn get_encapsulation(&self) -> Option<GString>;\n\n\n\n fn get_password(&self) -> Option<GString>;\n\n\n\n fn get_password_flags(&self) -> SettingSecretFlags;\n\n\n\n fn get_protocol(&self) -> Option<GString>;\n\n\n\n fn get_username(&self) -> Option<GString>;\n\n\n\n fn get_vci(&self) -> u32;\n\n\n\n fn get_vpi(&self) -> u32;\n\n\n\n fn set_property_encapsulation(&self, encapsulation: Option<&str>);\n\n\n\n fn set_property_password(&self, password: Option<&str>);\n\n\n\n fn set_property_password_flags(&self, password_flags: SettingSecretFlags);\n", "file_path": "src/setting_adsl.rs", "rank": 17, "score": 252130.38571050842 }, { "content": "pub trait SettingTeamExt: 'static {\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn add_link_watcher(&self, link_watcher: &TeamLinkWatcher) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn add_runner_tx_hash(&self, txhash: &str) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn clear_link_watchers(&self);\n\n\n\n fn get_config(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_link_watcher(&self, idx: u32) -> Option<TeamLinkWatcher>;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_mcast_rejoin_count(&self) -> i32;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_mcast_rejoin_interval(&self) -> i32;\n", "file_path": "src/setting_team.rs", "rank": 18, "score": 252130.38571050842 }, { "content": "pub trait SettingWiredExt: 'static {\n\n fn add_mac_blacklist_item(&self, mac: &str) -> bool;\n\n\n\n fn add_s390_option(&self, key: &str, value: &str) -> bool;\n\n\n\n fn clear_mac_blacklist_items(&self);\n\n\n\n fn get_auto_negotiate(&self) -> bool;\n\n\n\n fn get_cloned_mac_address(&self) -> Option<GString>;\n\n\n\n fn get_duplex(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n fn get_generate_mac_address_mask(&self) -> Option<GString>;\n\n\n\n fn get_mac_address(&self) -> Option<GString>;\n\n\n\n fn get_mac_address_blacklist(&self) -> Vec<GString>;\n\n\n", "file_path": "src/setting_wired.rs", "rank": 19, "score": 252130.38571050842 }, { "content": "pub trait SettingWimaxExt: 'static {\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn get_mac_address(&self) -> Option<GString>;\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn get_network_name(&self) -> Option<GString>;\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn set_property_mac_address(&self, mac_address: Option<&str>);\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn set_property_network_name(&self, network_name: Option<&str>);\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn connect_property_mac_address_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn connect_property_network_name_notify<F: Fn(&Self) + 'static>(&self, f: F)\n\n -> SignalHandlerId;\n\n}\n", "file_path": "src/setting_wimax.rs", "rank": 20, "score": 252130.38571050842 }, { "content": "pub trait SettingBondExt: 'static {\n\n fn add_option(&self, name: &str, value: &str) -> bool;\n\n\n\n fn get_num_options(&self) -> u32;\n\n\n\n fn get_option_by_name(&self, name: &str) -> Option<GString>;\n\n\n\n fn get_option_default(&self, name: &str) -> Option<GString>;\n\n\n\n fn get_valid_options(&self) -> Vec<GString>;\n\n\n\n fn remove_option(&self, name: &str) -> bool;\n\n\n\n //fn get_property_options(&self) -> /*Unimplemented*/HashTable TypeId { ns_id: 0, id: 28 }/TypeId { ns_id: 0, id: 28 };\n\n\n\n //fn set_property_options(&self, options: /*Unimplemented*/HashTable TypeId { ns_id: 0, id: 28 }/TypeId { ns_id: 0, id: 28 });\n\n\n\n fn connect_property_options_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n", "file_path": "src/setting_bond.rs", "rank": 21, "score": 252130.38571050842 }, { "content": "pub trait SettingProxyExt: 'static {\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_browser_only(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_method(&self) -> SettingProxyMethod;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_pac_script(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_pac_url(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn set_property_browser_only(&self, browser_only: bool);\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn set_property_method(&self, method: i32);\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n", "file_path": "src/setting_proxy.rs", "rank": 22, "score": 252130.38571050842 }, { "content": "pub trait SettingMacsecExt: 'static {\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_encrypt(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_mka_cak(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_mka_cak_flags(&self) -> SettingSecretFlags;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_mka_ckn(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_mode(&self) -> SettingMacsecMode;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\n fn get_parent(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n", "file_path": "src/setting_macsec.rs", "rank": 23, "score": 252130.38571050842 }, { "content": "pub trait SettingIPTunnelExt: 'static {\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_encapsulation_limit(&self) -> u32;\n\n\n\n fn get_flags(&self) -> IPTunnelFlags;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_flow_label(&self) -> u32;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_input_key(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_local(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_mode(&self) -> IPTunnelMode;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_mtu(&self) -> u32;\n", "file_path": "src/setting_ip_tunnel.rs", "rank": 24, "score": 242744.89678308347 }, { "content": "pub trait SettingWirelessSecurityExt: 'static {\n\n fn add_group(&self, group: &str) -> bool;\n\n\n\n fn add_pairwise(&self, pairwise: &str) -> bool;\n\n\n\n fn add_proto(&self, proto: &str) -> bool;\n\n\n\n fn clear_groups(&self);\n\n\n\n fn clear_pairwise(&self);\n\n\n\n fn clear_protos(&self);\n\n\n\n fn get_auth_alg(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_fils(&self) -> SettingWirelessSecurityFils;\n\n\n\n fn get_group(&self, i: u32) -> Option<GString>;\n\n\n", "file_path": "src/setting_wireless_security.rs", "rank": 25, "score": 242744.89678308347 }, { "content": "pub trait SettingIP6ConfigExt: 'static {\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_addr_gen_mode(&self) -> SettingIP6ConfigAddrGenMode;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_dhcp_duid(&self) -> Option<GString>;\n\n\n\n fn get_ip6_privacy(&self) -> SettingIP6ConfigPrivacy;\n\n\n\n #[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n fn get_token(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn set_property_addr_gen_mode(&self, addr_gen_mode: i32);\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn set_property_dhcp_duid(&self, dhcp_duid: Option<&str>);\n\n\n\n fn set_property_ip6_privacy(&self, ip6_privacy: SettingIP6ConfigPrivacy);\n\n\n", "file_path": "src/setting_ip6_config.rs", "rank": 26, "score": 242744.89678308347 }, { "content": "pub trait SettingIP4ConfigExt: 'static {\n\n fn get_dhcp_client_id(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_dhcp_fqdn(&self) -> Option<GString>;\n\n\n\n fn set_property_dhcp_client_id(&self, dhcp_client_id: Option<&str>);\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn set_property_dhcp_fqdn(&self, dhcp_fqdn: Option<&str>);\n\n\n\n fn connect_property_dhcp_client_id_notify<F: Fn(&Self) + 'static>(\n\n &self,\n\n f: F,\n\n ) -> SignalHandlerId;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn connect_property_dhcp_fqdn_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n", "file_path": "src/setting_ip4_config.rs", "rank": 27, "score": 242744.89678308347 }, { "content": "pub trait SettingOlpcMeshExt: 'static {\n\n fn get_channel(&self) -> u32;\n\n\n\n fn get_dhcp_anycast_address(&self) -> Option<GString>;\n\n\n\n fn get_ssid(&self) -> Option<glib::Bytes>;\n\n\n\n fn set_property_channel(&self, channel: u32);\n\n\n\n fn set_property_dhcp_anycast_address(&self, dhcp_anycast_address: Option<&str>);\n\n\n\n fn set_property_ssid(&self, ssid: Option<&glib::Bytes>);\n\n\n\n fn connect_property_channel_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_dhcp_anycast_address_notify<F: Fn(&Self) + 'static>(\n\n &self,\n\n f: F,\n\n ) -> SignalHandlerId;\n\n\n", "file_path": "src/setting_olpc_mesh.rs", "rank": 28, "score": 242744.89678308347 }, { "content": "pub trait SettingIPConfigExt: 'static {\n\n fn add_address(&self, address: &IPAddress) -> bool;\n\n\n\n fn add_dns(&self, dns: &str) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn add_dns_option(&self, dns_option: &str) -> bool;\n\n\n\n fn add_dns_search(&self, dns_search: &str) -> bool;\n\n\n\n fn add_route(&self, route: &IPRoute) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn add_routing_rule(&self, routing_rule: &IPRoutingRule);\n\n\n\n fn clear_addresses(&self);\n\n\n\n fn clear_dns(&self);\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n", "file_path": "src/setting_ip_config.rs", "rank": 29, "score": 242744.89678308347 }, { "content": "pub trait SettingTeamPortExt: 'static {\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn add_link_watcher(&self, link_watcher: &TeamLinkWatcher) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn clear_link_watchers(&self);\n\n\n\n fn get_config(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_lacp_key(&self) -> i32;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_lacp_prio(&self) -> i32;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_link_watcher(&self, idx: u32) -> Option<TeamLinkWatcher>;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_num_link_watchers(&self) -> u32;\n", "file_path": "src/setting_team_port.rs", "rank": 30, "score": 242744.89678308347 }, { "content": "pub trait SettingBridgePortExt: 'static {\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn add_vlan(&self, vlan: &BridgeVlan);\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn clear_vlans(&self);\n\n\n\n fn get_hairpin_mode(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn get_num_vlans(&self) -> u32;\n\n\n\n fn get_path_cost(&self) -> u16;\n\n\n\n fn get_priority(&self) -> u16;\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn get_vlan(&self, idx: u32) -> Option<BridgeVlan>;\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n", "file_path": "src/setting_bridge_port.rs", "rank": 31, "score": 242744.89678308347 }, { "content": "pub fn utils_ip4_get_default_prefix(ip: u32) -> u32 {\n\n unsafe { nm_sys::nm_utils_ip4_get_default_prefix(ip) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 32, "score": 241532.22001739463 }, { "content": "pub trait DeviceExt: 'static {\n\n fn connection_compatible<P: IsA<Connection>>(&self, connection: &P) -> Result<(), glib::Error>;\n\n\n\n fn connection_valid<P: IsA<Connection>>(&self, connection: &P) -> bool;\n\n\n\n #[cfg_attr(feature = \"v1_22\", deprecated)]\n\n fn delete<P: IsA<gio::Cancellable>>(&self, cancellable: Option<&P>) -> Result<(), glib::Error>;\n\n\n\n fn delete_async<P: IsA<gio::Cancellable>, Q: FnOnce(Result<(), glib::Error>) + Send + 'static>(\n\n &self,\n\n cancellable: Option<&P>,\n\n callback: Q,\n\n );\n\n\n\n fn delete_async_future(\n\n &self,\n\n ) -> Pin<Box_<dyn std::future::Future<Output = Result<(), glib::Error>> + 'static>>;\n\n\n\n #[cfg_attr(feature = \"v1_22\", deprecated)]\n\n fn disconnect<P: IsA<gio::Cancellable>>(\n", "file_path": "src/device.rs", "rank": 33, "score": 206593.29074949492 }, { "content": "pub trait ObjectExt: 'static {\n\n fn get_path(&self) -> Option<GString>;\n\n\n\n fn connect_property_path_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Object>> ObjectExt for O {\n\n fn get_path(&self) -> Option<GString> {\n\n unsafe { from_glib_none(nm_sys::nm_object_get_path(self.as_ref().to_glib_none().0)) }\n\n }\n\n\n\n fn connect_property_path_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_path_trampoline<P, F: Fn(&P) + 'static>(\n\n this: *mut nm_sys::NMObject,\n\n _param_spec: glib_sys::gpointer,\n\n f: glib_sys::gpointer,\n\n ) where\n\n P: IsA<Object>,\n\n {\n\n let f: &F = &*(f as *const F);\n", "file_path": "src/object.rs", "rank": 34, "score": 206593.29074949492 }, { "content": "pub trait ConnectionExt: 'static {\n\n fn add_setting<P: IsA<Setting>>(&self, setting: &P);\n\n\n\n fn clear_secrets(&self);\n\n\n\n fn clear_settings(&self);\n\n\n\n fn compare<P: IsA<Connection>>(&self, b: &P, flags: SettingCompareFlags) -> bool;\n\n\n\n //fn diff<P: IsA<Connection>>(&self, b: &P, flags: SettingCompareFlags, out_settings: /*Unknown conversion*//*Unimplemented*/HashTable TypeId { ns_id: 0, id: 28 }/TypeId { ns_id: 0, id: 40 }) -> bool;\n\n\n\n fn dump(&self);\n\n\n\n //fn for_each_setting_value(&self, func: /*Unimplemented*/FnMut(&Setting, &str, /*Ignored*/glib::Value, /*Ignored*/glib::ParamFlags), user_data: /*Unimplemented*/Option<Fundamental: Pointer>);\n\n\n\n fn get_connection_type(&self) -> Option<GString>;\n\n\n\n fn get_id(&self) -> Option<GString>;\n\n\n\n fn get_interface_name(&self) -> Option<GString>;\n", "file_path": "src/connection.rs", "rank": 35, "score": 206593.29074949492 }, { "content": "pub trait Setting8021xExt: 'static {\n\n fn add_altsubject_match(&self, altsubject_match: &str) -> bool;\n\n\n\n fn add_eap_method(&self, eap: &str) -> bool;\n\n\n\n fn add_phase2_altsubject_match(&self, phase2_altsubject_match: &str) -> bool;\n\n\n\n fn clear_altsubject_matches(&self);\n\n\n\n fn clear_eap_methods(&self);\n\n\n\n fn clear_phase2_altsubject_matches(&self);\n\n\n\n fn get_altsubject_match(&self, i: u32) -> Option<GString>;\n\n\n\n fn get_anonymous_identity(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_8\", feature = \"dox\"))]\n\n fn get_auth_timeout(&self) -> i32;\n\n\n", "file_path": "src/setting8021x.rs", "rank": 36, "score": 206593.29074949492 }, { "content": "#[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\npub fn utils_version() -> u32 {\n\n unsafe { nm_sys::nm_utils_version() }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 37, "score": 206586.78757166842 }, { "content": "pub fn utils_ip4_prefix_to_netmask(prefix: u32) -> u32 {\n\n unsafe { nm_sys::nm_utils_ip4_prefix_to_netmask(prefix) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 38, "score": 204914.84783768177 }, { "content": "pub fn utils_ip4_netmask_to_prefix(netmask: u32) -> u32 {\n\n unsafe { nm_sys::nm_utils_ip4_netmask_to_prefix(netmask) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 39, "score": 204914.84783768177 }, { "content": "pub fn utils_wifi_freq_to_channel(freq: u32) -> u32 {\n\n unsafe { nm_sys::nm_utils_wifi_freq_to_channel(freq) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 40, "score": 204914.84783768177 }, { "content": "pub trait VpnEditorExt: 'static {\n\n fn get_widget(&self) -> Option<glib::Object>;\n\n\n\n fn update_connection<P: IsA<Connection>>(&self, connection: &P) -> Result<(), glib::Error>;\n\n\n\n fn connect_changed<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<VpnEditor>> VpnEditorExt for O {\n\n fn get_widget(&self) -> Option<glib::Object> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_vpn_editor_get_widget(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn update_connection<P: IsA<Connection>>(&self, connection: &P) -> Result<(), glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n", "file_path": "src/vpn_editor.rs", "rank": 41, "score": 198288.99461899628 }, { "content": "pub trait ActiveConnectionExt: 'static {\n\n fn get_connection(&self) -> Option<RemoteConnection>;\n\n\n\n fn get_connection_type(&self) -> Option<GString>;\n\n\n\n fn get_default(&self) -> bool;\n\n\n\n fn get_default6(&self) -> bool;\n\n\n\n fn get_devices(&self) -> Vec<Device>;\n\n\n\n fn get_dhcp4_config(&self) -> Option<DhcpConfig>;\n\n\n\n fn get_dhcp6_config(&self) -> Option<DhcpConfig>;\n\n\n\n fn get_id(&self) -> Option<GString>;\n\n\n\n fn get_ip4_config(&self) -> Option<IPConfig>;\n\n\n\n fn get_ip6_config(&self) -> Option<IPConfig>;\n", "file_path": "src/active_connection.rs", "rank": 42, "score": 198288.99461899628 }, { "content": "pub fn utils_wifi_channel_to_freq(channel: u32, band: &str) -> u32 {\n\n unsafe { nm_sys::nm_utils_wifi_channel_to_freq(channel, band.to_glib_none().0) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 43, "score": 193150.33359131188 }, { "content": "pub trait VpnPluginOldExt: 'static {\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn disconnect(&self) -> Result<(), glib::Error>;\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn failure(&self, reason: VpnPluginFailure);\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn get_state(&self) -> VpnServiceState;\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn set_config(&self, config: &glib::Variant);\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn set_ip4_config(&self, ip4_config: &glib::Variant);\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n\n fn set_ip6_config(&self, ip6_config: &glib::Variant);\n\n\n\n #[cfg_attr(feature = \"v1_2\", deprecated)]\n", "file_path": "src/vpn_plugin_old.rs", "rank": 44, "score": 190925.00873797867 }, { "content": "pub trait VpnPluginInfoExt: 'static {\n\n #[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n fn get_aliases(&self) -> Vec<GString>;\n\n\n\n #[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n fn get_auth_dialog(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_editor_plugin(&self) -> Option<VpnEditorPlugin>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_filename(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_name(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_plugin(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n", "file_path": "src/vpn_plugin_info.rs", "rank": 45, "score": 190925.00873797867 }, { "content": "pub trait VpnEditorPluginExt: 'static {\n\n fn export<P: IsA<Connection>>(&self, path: &str, connection: &P) -> Result<(), glib::Error>;\n\n\n\n fn get_capabilities(&self) -> VpnEditorPluginCapability;\n\n\n\n fn get_editor<P: IsA<Connection>>(&self, connection: &P) -> Result<VpnEditor, glib::Error>;\n\n\n\n #[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n fn get_plugin_info(&self) -> Option<VpnPluginInfo>;\n\n\n\n fn get_suggested_filename<P: IsA<Connection>>(&self, connection: &P) -> Option<GString>;\n\n\n\n //#[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n //fn get_vt(&self, vt: /*Ignored*/VpnEditorPluginVT, vt_size: usize) -> usize;\n\n\n\n fn import(&self, path: &str) -> Result<Connection, glib::Error>;\n\n\n\n #[cfg(any(feature = \"v1_4\", feature = \"dox\"))]\n\n fn set_plugin_info<P: IsA<VpnPluginInfo>>(&self, plugin_info: Option<&P>);\n\n\n", "file_path": "src/vpn_editor_plugin.rs", "rank": 46, "score": 190925.00873797867 }, { "content": "pub trait SecretAgentOldExt: 'static {\n\n fn delete_secrets<\n\n P: IsA<Connection>,\n\n Q: FnOnce(&SecretAgentOld, &Connection, &glib::Error) + 'static,\n\n >(\n\n &self,\n\n connection: &P,\n\n callback: Q,\n\n );\n\n\n\n fn get_registered(&self) -> bool;\n\n\n\n fn get_secrets<\n\n P: IsA<Connection>,\n\n Q: FnOnce(&SecretAgentOld, &Connection, &glib::Variant, &glib::Error) + 'static,\n\n >(\n\n &self,\n\n connection: &P,\n\n setting_name: &str,\n\n hints: &[&str],\n", "file_path": "src/secret_agent_old.rs", "rank": 47, "score": 190925.00873797867 }, { "content": "pub trait VpnServicePluginExt: 'static {\n\n fn disconnect(&self) -> Result<(), glib::Error>;\n\n\n\n fn failure(&self, reason: VpnPluginFailure);\n\n\n\n fn set_config(&self, config: &glib::Variant);\n\n\n\n fn set_ip4_config(&self, ip4_config: &glib::Variant);\n\n\n\n fn set_ip6_config(&self, ip6_config: &glib::Variant);\n\n\n\n fn set_login_banner(&self, banner: &str);\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn shutdown(&self);\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\n fn get_property_service_name(&self) -> Option<GString>;\n\n\n\n #[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n", "file_path": "src/vpn_service_plugin.rs", "rank": 48, "score": 190925.00873797867 }, { "content": "pub fn utils_wifi_find_next_channel(channel: u32, direction: i32, band: &str) -> u32 {\n\n unsafe { nm_sys::nm_utils_wifi_find_next_channel(channel, direction, band.to_glib_none().0) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 49, "score": 180096.12008681678 }, { "content": "pub fn utils_wifi_is_channel_valid(channel: u32, band: &str) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_wifi_is_channel_valid(\n\n channel,\n\n band.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 50, "score": 171644.84948770815 }, { "content": "pub fn utils_security_valid(\n\n type_: UtilsSecurityType,\n\n wifi_caps: DeviceWifiCapabilities,\n\n have_ap: bool,\n\n adhoc: bool,\n\n ap_flags: _80211ApFlags,\n\n ap_wpa: _80211ApSecurityFlags,\n\n ap_rsn: _80211ApSecurityFlags,\n\n) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_security_valid(\n\n type_.to_glib(),\n\n wifi_caps.to_glib(),\n\n have_ap.to_glib(),\n\n adhoc.to_glib(),\n\n ap_flags.to_glib(),\n\n ap_wpa.to_glib(),\n\n ap_rsn.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 51, "score": 134977.9015945805 }, { "content": "pub fn utils_ip6_addresses_to_variant(\n\n addresses: &[&IPAddress],\n\n gateway: Option<&str>,\n\n) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_utils_ip6_addresses_to_variant(\n\n addresses.to_glib_none().0,\n\n gateway.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 52, "score": 132126.5781112739 }, { "content": "pub fn utils_ip4_addresses_to_variant(\n\n addresses: &[&IPAddress],\n\n gateway: Option<&str>,\n\n) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_utils_ip4_addresses_to_variant(\n\n addresses.to_glib_none().0,\n\n gateway.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 53, "score": 132126.5781112739 }, { "content": "pub fn utils_ap_mode_security_valid(\n\n type_: UtilsSecurityType,\n\n wifi_caps: DeviceWifiCapabilities,\n\n) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_ap_mode_security_valid(\n\n type_.to_glib(),\n\n wifi_caps.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 54, "score": 129446.37285866856 }, { "content": "pub fn utils_check_virtual_device_compatibility(\n\n virtual_type: glib::types::Type,\n\n other_type: glib::types::Type,\n\n) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_check_virtual_device_compatibility(\n\n virtual_type.to_glib(),\n\n other_type.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 55, "score": 129446.37285866856 }, { "content": "pub fn utils_get_timestamp_msec() -> i64 {\n\n unsafe { nm_sys::nm_utils_get_timestamp_msec() }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 56, "score": 126998.26739487457 }, { "content": "pub fn utils_uuid_generate() -> Option<GString> {\n\n unsafe { from_glib_full(nm_sys::nm_utils_uuid_generate()) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 57, "score": 122290.98529179179 }, { "content": "pub fn utils_file_is_pkcs12(filename: &str) -> bool {\n\n unsafe { from_glib(nm_sys::nm_utils_file_is_pkcs12(filename.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 58, "score": 120331.8327144177 }, { "content": "pub fn utils_hwaddr_valid(asc: &str) -> bool {\n\n let length = asc.len() as isize;\n\n unsafe { from_glib(nm_sys::nm_utils_hwaddr_valid(asc.to_glib_none().0, length)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 59, "score": 120331.8327144177 }, { "content": "pub fn utils_hwaddr_len(type_: i32) -> usize {\n\n unsafe { nm_sys::nm_utils_hwaddr_len(type_) }\n\n}\n\n\n\n//pub fn utils_hwaddr_matches(hwaddr1: /*Unimplemented*/Option<Fundamental: Pointer>, hwaddr1_len: isize, hwaddr2: /*Unimplemented*/Option<Fundamental: Pointer>, hwaddr2_len: isize) -> bool {\n\n// unsafe { TODO: call nm_sys:nm_utils_hwaddr_matches() }\n\n//}\n\n\n", "file_path": "src/functions.rs", "rank": 60, "score": 120331.8327144177 }, { "content": "pub fn utils_file_is_certificate(filename: &str) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_file_is_certificate(\n\n filename.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 61, "score": 120331.8327144177 }, { "content": "pub fn utils_is_empty_ssid(ssid: &[u8]) -> bool {\n\n let len = ssid.len() as usize;\n\n unsafe { from_glib(nm_sys::nm_utils_is_empty_ssid(ssid.to_glib_none().0, len)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 62, "score": 120331.8327144177 }, { "content": "pub fn utils_is_uuid(str: Option<&str>) -> bool {\n\n unsafe { from_glib(nm_sys::nm_utils_is_uuid(str.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 63, "score": 118563.95102971472 }, { "content": "pub fn utils_wpa_psk_valid(psk: &str) -> bool {\n\n unsafe { from_glib(nm_sys::nm_utils_wpa_psk_valid(psk.to_glib_none().0)) }\n\n}\n", "file_path": "src/functions.rs", "rank": 64, "score": 117950.69122912952 }, { "content": "#[cfg(any(feature = \"v1_20\", feature = \"dox\"))]\n\npub fn ethtool_optname_is_feature(optname: Option<&str>) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_ethtool_optname_is_feature(\n\n optname.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 65, "score": 116182.80954442653 }, { "content": "#[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\npub fn utils_bond_mode_string_to_int(mode: &str) -> i32 {\n\n unsafe { nm_sys::nm_utils_bond_mode_string_to_int(mode.to_glib_none().0) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 66, "score": 115700.64903897766 }, { "content": "pub fn utils_hexstr2bin(hex: &str) -> Option<glib::Bytes> {\n\n unsafe { from_glib_full(nm_sys::nm_utils_hexstr2bin(hex.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 67, "score": 114579.4919432535 }, { "content": "pub fn utils_inet4_ntop(dst: &str) -> Option<GString> {\n\n unsafe { from_glib_none(nm_sys::nm_utils_inet4_ntop(dst.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 68, "score": 113932.76735427469 }, { "content": "pub fn utils_inet6_ntop(dst: &str) -> Option<GString> {\n\n unsafe { from_glib_none(nm_sys::nm_utils_inet6_ntop(dst.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 69, "score": 113932.76735427469 }, { "content": "#[cfg_attr(feature = \"v1_6\", deprecated)]\n\npub fn utils_iface_valid_name(name: Option<&str>) -> bool {\n\n unsafe { from_glib(nm_sys::nm_utils_iface_valid_name(name.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 70, "score": 113932.76735427469 }, { "content": "pub fn utils_ssid_to_utf8(ssid: &[u8]) -> Option<GString> {\n\n let len = ssid.len() as usize;\n\n unsafe { from_glib_full(nm_sys::nm_utils_ssid_to_utf8(ssid.to_glib_none().0, len)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 71, "score": 113932.76735427469 }, { "content": "pub fn utils_hwaddr_canonical(asc: &str) -> Option<GString> {\n\n let length = asc.len() as isize;\n\n unsafe {\n\n from_glib_full(nm_sys::nm_utils_hwaddr_canonical(\n\n asc.to_glib_none().0,\n\n length,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 72, "score": 113932.76735427469 }, { "content": "pub fn utils_escape_ssid(ssid: &[u8]) -> Option<GString> {\n\n let len = ssid.len() as usize;\n\n unsafe { from_glib_none(nm_sys::nm_utils_escape_ssid(ssid.to_glib_none().0, len)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 73, "score": 113932.76735427469 }, { "content": "pub fn utils_file_is_private_key(filename: &str) -> Option<bool> {\n\n unsafe {\n\n let mut out_encrypted = mem::MaybeUninit::uninit();\n\n let ret = from_glib(nm_sys::nm_utils_file_is_private_key(\n\n filename.to_glib_none().0,\n\n out_encrypted.as_mut_ptr(),\n\n ));\n\n let out_encrypted = out_encrypted.assume_init();\n\n if ret {\n\n Some(from_glib(out_encrypted))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n//pub fn utils_format_variant_attributes(attributes: /*Unknown conversion*//*Unimplemented*/HashTable TypeId { ns_id: 0, id: 25 }/TypeId { ns_id: 0, id: 25 }, attr_separator: glib::Char, key_value_separator: glib::Char) -> Option<GString> {\n\n// unsafe { TODO: call nm_sys:nm_utils_format_variant_attributes() }\n\n//}\n\n\n", "file_path": "src/functions.rs", "rank": 74, "score": 113932.76735427469 }, { "content": "pub fn utils_ipaddr_valid(family: i32, ip: &str) -> bool {\n\n unsafe { from_glib(nm_sys::nm_utils_ipaddr_valid(family, ip.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 75, "score": 112329.44975310164 }, { "content": "#[cfg(any(feature = \"v1_6\", feature = \"dox\"))]\n\npub fn utils_is_json_object(str: &str) -> Result<(), glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let _ = nm_sys::nm_utils_is_json_object(str.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(())\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 76, "score": 112329.44975310164 }, { "content": "pub fn utils_wifi_strength_bars(strength: u8) -> Option<GString> {\n\n unsafe { from_glib_none(nm_sys::nm_utils_wifi_strength_bars(strength)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 77, "score": 111803.28756008466 }, { "content": "pub fn utils_hwaddr_atoba(asc: &str) -> Option<glib::ByteArray> {\n\n let length = asc.len() as usize;\n\n unsafe { from_glib_full(nm_sys::nm_utils_hwaddr_atoba(asc.to_glib_none().0, length)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 78, "score": 110199.96995891162 }, { "content": "#[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\npub fn utils_bond_mode_int_to_string(mode: i32) -> Option<GString> {\n\n unsafe { from_glib_none(nm_sys::nm_utils_bond_mode_int_to_string(mode)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 79, "score": 109784.93298215602 }, { "content": "pub fn utils_ip4_routes_from_variant(value: &glib::Variant) -> Vec<IPRoute> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(nm_sys::nm_utils_ip4_routes_from_variant(\n\n value.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 80, "score": 108181.61538098298 }, { "content": "pub fn utils_ip_addresses_to_variant(addresses: &[&IPAddress]) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_utils_ip_addresses_to_variant(\n\n addresses.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 81, "score": 108181.61538098298 }, { "content": "pub fn utils_ip_routes_to_variant(routes: &[&IPRoute]) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_utils_ip_routes_to_variant(\n\n routes.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 82, "score": 108181.61538098298 }, { "content": "pub fn utils_ip6_routes_to_variant(routes: &[&IPRoute]) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_utils_ip6_routes_to_variant(\n\n routes.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 83, "score": 108181.61538098298 }, { "content": "pub fn utils_ip6_routes_from_variant(value: &glib::Variant) -> Vec<IPRoute> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(nm_sys::nm_utils_ip6_routes_from_variant(\n\n value.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 84, "score": 108181.61538098298 }, { "content": "pub fn utils_ip4_routes_to_variant(routes: &[&IPRoute]) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_none(nm_sys::nm_utils_ip4_routes_to_variant(\n\n routes.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 85, "score": 108181.61538098298 }, { "content": "pub fn utils_is_valid_iface_name(name: Option<&str>) -> Result<(), glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let _ = nm_sys::nm_utils_is_valid_iface_name(name.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(())\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n\n//#[cfg(any(feature = \"v1_8\", feature = \"dox\"))]\n\n//pub fn utils_parse_variant_attributes(string: &str, attr_separator: glib::Char, key_value_separator: glib::Char, ignore_unknown: bool, spec: /*Ignored*/&VariantAttributeSpec) -> Result</*Unknown conversion*//*Unimplemented*/HashTable TypeId { ns_id: 0, id: 28 }/TypeId { ns_id: 2, id: 185 }, glib::Error> {\n\n// unsafe { TODO: call nm_sys:nm_utils_parse_variant_attributes() }\n\n//}\n\n\n", "file_path": "src/functions.rs", "rank": 86, "score": 106720.90596516668 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "nm-sys/tests/abi.rs", "rank": 87, "score": 105285.91052656568 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn utils_sriov_vf_from_str(str: &str) -> Result<SriovVF, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = nm_sys::nm_utils_sriov_vf_from_str(str.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 88, "score": 104805.19932154183 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn utils_tc_qdisc_from_str(str: &str) -> Result<TCQdisc, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = nm_sys::nm_utils_tc_qdisc_from_str(str.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 89, "score": 104805.19932154183 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn utils_tc_tfilter_from_str(str: &str) -> Result<TCTfilter, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = nm_sys::nm_utils_tc_tfilter_from_str(str.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 90, "score": 104805.19932154183 }, { "content": "pub fn utils_wep_key_valid(key: &str, wep_type: WepKeyType) -> bool {\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_wep_key_valid(\n\n key.to_glib_none().0,\n\n wep_type.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 91, "score": 104445.21362591909 }, { "content": "pub fn utils_same_ssid(ssid1: &[u8], ssid2: &[u8], ignore_trailing_null: bool) -> bool {\n\n let len1 = ssid1.len() as usize;\n\n let len2 = ssid2.len() as usize;\n\n unsafe {\n\n from_glib(nm_sys::nm_utils_same_ssid(\n\n ssid1.to_glib_none().0,\n\n len1,\n\n ssid2.to_glib_none().0,\n\n len2,\n\n ignore_trailing_null.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 92, "score": 103468.88165335897 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn utils_tc_qdisc_to_str(qdisc: &TCQdisc) -> Result<GString, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = nm_sys::nm_utils_tc_qdisc_to_str(qdisc.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 93, "score": 102984.50421010278 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn utils_tc_tfilter_to_str(tfilter: &TCTfilter) -> Result<GString, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = nm_sys::nm_utils_tc_tfilter_to_str(tfilter.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 94, "score": 102984.50421010278 }, { "content": "pub fn utils_ip_routes_from_variant(value: &glib::Variant, family: i32) -> Vec<IPRoute> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(nm_sys::nm_utils_ip_routes_from_variant(\n\n value.to_glib_none().0,\n\n family,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 95, "score": 101648.18654191992 }, { "content": "pub fn utils_ip_addresses_from_variant(value: &glib::Variant, family: i32) -> Vec<IPAddress> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(nm_sys::nm_utils_ip_addresses_from_variant(\n\n value.to_glib_none().0,\n\n family,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 96, "score": 101648.18654191992 }, { "content": "#[cfg(any(feature = \"v1_2\", feature = \"dox\"))]\n\npub fn utils_enum_to_str(type_: glib::types::Type, value: i32) -> Option<GString> {\n\n unsafe { from_glib_full(nm_sys::nm_utils_enum_to_str(type_.to_glib(), value)) }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 97, "score": 100421.02004109477 }, { "content": " }\n\n }\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n pub fn connect_property_interface_name_notify<F: Fn(&SettingMatch) + 'static>(\n\n &self,\n\n f: F,\n\n ) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_interface_name_trampoline<F: Fn(&SettingMatch) + 'static>(\n\n this: *mut nm_sys::NMSettingMatch,\n\n _param_spec: glib_sys::gpointer,\n\n f: glib_sys::gpointer,\n\n ) {\n\n let f: &F = &*(f as *const F);\n\n f(&from_glib_borrow(this))\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(\n\n self.as_ptr() as *mut _,\n", "file_path": "src/setting_match.rs", "rank": 98, "score": 99892.37082200247 }, { "content": " b\"notify::interface-name\\0\".as_ptr() as *const _,\n\n Some(transmute(notify_interface_name_trampoline::<F> as usize)),\n\n Box_::into_raw(f),\n\n )\n\n }\n\n }\n\n}\n\n\n\n#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\nimpl Default for SettingMatch {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl fmt::Display for SettingMatch {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"SettingMatch\")\n\n }\n\n}\n", "file_path": "src/setting_match.rs", "rank": 99, "score": 99887.45611858429 } ]
Rust
src/lower.rs
1tgr/simplejit-demo
750a1f628452d42836d0da5fc415fcef7750c045
use crate::ast::*; use crate::intern::Intern; use crate::{InternExt, Parse, Result}; use std::collections::HashMap; use std::convert::Infallible; use std::num::NonZeroU32; use std::rc::Rc; use std::result; #[salsa::query_group(LowerDatabase)] pub trait Lower: Parse { fn lower_function(&self, name: IdentId) -> Result<(Rc<HashMap<EnvId, Env>>, ExprId)>; } fn lower_function(db: &dyn Lower, name: IdentId) -> Result<(Rc<HashMap<EnvId, Env>>, ExprId)> { let mut envs = HashMap::new(); let global_env = db.global_env()?; envs.insert(EnvId::GLOBAL, global_env.clone()); let mut index = 2; let env = EnvId::from(NonZeroU32::new(index).unwrap()); index += 1; let Env { mut bindings, ty_bindings } = global_env; let Function { signature, param_names, body } = db.function(name)?; let Signature { param_tys, return_ty: _ } = signature; for (index, (name, ty)) in param_names.into_iter().zip(param_tys).enumerate() { bindings.insert(name, (env, Binding::Param(Param { index, ty }))); } envs.insert(env, Env { bindings, ty_bindings }); let body = LowerExprTransform { db, env, envs: &mut envs, index: &mut index, } .transform_expr(body)?; Ok((Rc::new(envs), body)) } pub trait LowerExt: Lower { fn binding_pair(&self, function_name: IdentId, env: EnvId, name: IdentId) -> Result<(EnvId, Binding)> { let (envs, _) = self.lower_function(function_name)?; let Env { bindings, ty_bindings: _ } = &envs[&env]; let binding = bindings .get(&name) .ok_or_else(|| error!("reading from undeclared variable {}", self.lookup_intern_ident(name)))?; Ok(binding.clone()) } fn binding_decl_env(&self, function_name: IdentId, env: EnvId, name: IdentId) -> Result<EnvId> { let (decl_env, _) = self.binding_pair(function_name, env, name)?; Ok(decl_env) } fn binding(&self, function_name: IdentId, env: EnvId, name: IdentId) -> Result<Binding> { let (_, binding) = self.binding_pair(function_name, env, name)?; Ok(binding) } } impl<T: Lower + ?Sized> LowerExt for T {} struct LowerExprTransform<'a, DB: ?Sized> { db: &'a DB, env: EnvId, envs: &'a mut HashMap<EnvId, Env>, index: &'a mut u32, } impl<'a, DB: Intern + ?Sized> LowerExprTransform<'a, DB> { fn make_scope(&mut self, mut env: Env, decl_name: IdentId, decl_expr: ExprId, mut stmts: Vec<ExprId>) -> result::Result<Scope, Infallible> { let scope_env = EnvId::from(NonZeroU32::new(*self.index).unwrap()); *self.index += 1; let decl_expr = self.transform_expr(decl_expr)?; env.bindings.insert(decl_name, (scope_env, Binding::Variable(Variable { decl_expr }))); self.envs.insert(scope_env, env); LowerExprTransform { db: self.db, env: scope_env, envs: self.envs, index: self.index, } .transform_stmts(&mut stmts)?; let body = self.db.intern_block(stmts); Ok(Scope { scope_env, decl_name, decl_expr, body, }) } fn transform_stmts(&mut self, stmts: &mut Vec<ExprId>) -> result::Result<(), Infallible> { for (index, expr_mut) in stmts.iter_mut().enumerate() { if let Expr::Assign(assign) = self.db.lookup_intern_expr(*expr_mut) { let Assign { lvalue, expr: decl_expr } = assign; if let Expr::Identifier(lvalue) = self.db.lookup_intern_expr(lvalue) { let Identifier { env: _, name } = lvalue; let env = self.envs[&self.env].clone(); if !env.bindings.contains_key(&name) { let body = stmts.split_off(index + 1); let scope = self.make_scope(env, name, decl_expr, body)?; stmts[index] = self.intern_expr(Expr::Scope(scope)); return Ok(()); } } } *expr_mut = self.transform_expr(*expr_mut)?; } Ok(()) } } impl<'a, DB: Intern + InternExt + ?Sized> ExprTransform for LowerExprTransform<'a, DB> { type Error = Infallible; fn lookup_expr(&self, expr: ExprId) -> Expr { self.db.lookup_intern_expr(expr) } fn intern_expr(&self, expr: Expr) -> ExprId { if let Expr::Block(expr) = expr { let Block { stmts } = expr; self.db.intern_block(stmts) } else { self.db.intern_expr(expr) } } fn transform_block(&mut self, _expr_id: ExprId, mut expr: Block) -> result::Result<Expr, Infallible> { self.transform_stmts(&mut expr.stmts)?; Ok(Expr::Block(expr)) } fn transform_call(&mut self, _expr_id: ExprId, mut expr: Call) -> result::Result<Expr, Infallible> { expr.env = Some(self.env); expr.transform(self) } fn transform_identifier(&mut self, _expr_id: ExprId, mut expr: Identifier) -> result::Result<Expr, Infallible> { expr.env = Some(self.env); expr.transform(self) } }
use crate::ast::*; use crate::intern::Intern; use crate::{InternExt, Parse, Result}; use std::collections::HashMap; use std::convert::Infallible; use std::num::NonZeroU32; use std::rc::Rc; use std::result; #[salsa::query_group(LowerDatabase)] pub trait Lower: Parse { fn lower_function(&self, name: IdentId) -> Result<(Rc<HashMap<EnvId, Env>>, ExprId)>; } fn lower_function(db: &dyn Lower, name: IdentId) -> Result<(Rc<HashMap<EnvId, Env>>, ExprId)> { let mut envs = HashMap::new(); let global_env = db.global_env()?; envs.insert(EnvId::GLOBAL, global_env.clone()); let mut index = 2; let env = EnvId::from(NonZeroU32::new(index).unwrap()); index += 1; let Env { mut bindings, ty_bindings } = global_env; let Function { signature, param_names, body } = db.function(name)?; let Signature { param_tys, return_ty: _ } = signature; for (index, (name, ty)) in param_names.into_iter().zip(param_tys).enumerate() { bindings.insert(name, (env, Binding::Param(Param { index, ty }))); } envs.insert(env, Env { bindings, ty_bindings }); let body = LowerExprTransform { db, env, envs: &mut envs, index: &mut index, } .transform_expr(body)?; Ok((Rc::new(envs), body)) } pub trait LowerExt: Lower { fn binding_pair(&self, function_name: IdentId, env: EnvId, name: IdentId) -> Result<(EnvId, Binding)> { let (envs, _) = self.lower_function(function_name)?; let Env { bindings, ty_bindings: _ } = &envs[&env]; let binding = bindings .get(&name) .ok_or_else(|| error!("reading from undeclared variable {}", self.lookup_intern_ident(name)))?; Ok(binding.clone()) } fn binding_decl_env(&self, function_name: IdentId, env: EnvId, name: IdentId) -> Result<EnvId> { let (decl_env, _) = self.binding_pair(function_name, env, name)?; Ok(decl_env) } fn binding(&self, function_name: IdentId, env: EnvId, name: IdentId) -> Result<Binding> { let (_, binding) = self.binding_pair(function_name, env, name)?; Ok(binding) } } impl<T: Lower + ?Sized> LowerExt for T {} struct LowerExprTransform<'a, DB: ?Sized> { db: &'a DB, env: EnvId, envs: &'a mut HashMap<EnvId, Env>, index: &'a mut u32, } impl<'a, DB: Intern + ?Sized> LowerExprTransform<'a, DB> { fn make_scope(&mut self, mut env: Env, decl_name: IdentId, decl_expr: ExprId, mut stmts: Vec<ExprId>) -> result::Result<Scope, Infallible> { let scope_env = EnvId::from(NonZeroU32::new(*self.index).unwrap()); *self.index += 1; let decl_expr = self.transform_expr(decl_expr)?; env.bindings.insert(decl_name, (scope_env, Binding::Variable(Variable { decl_expr }))); self.envs.insert(scope_env, env); LowerExprTransform { db: self.db, env: scope_env, envs: self.envs, index: self.index, } .transform_stmts(&mut stmts)?; let body = self.db.intern_block(stmts); Ok(Scope { scope_env, decl_name, decl_expr, body, }) } fn transform_stmts(&mut self, stmts: &mut Vec<ExprId>) -> result::Result<(), Infallible> { for (index, expr_mut) in stmts.iter_mut().enumerate() { if let Expr::Assign(assign) = self.db.lookup_intern_expr(*expr_mut) { let Assign { lvalue, expr: decl_expr } = assign; if let Expr::Identifier(lvalue) = self.db.lookup_intern_expr(lvalue) { let Identifier { env: _, name } = lvalue; let env = self.envs[&self.env].clone(); if !env.bindings.contains_key(&name) { let body = stmts.split_off(index + 1); let scope = self.make_scope(env, name, decl_expr, body)?; stmts[index] = self.intern_expr(Expr::Scope(scope)); return Ok(()); } } } *expr_mut = self.transform_expr(*expr_mut)?; } Ok(()) } } impl<'a, DB: Intern + InternExt + ?Sized> ExprTransform for LowerExprTransform<'a, DB> { type Error = Infallible; fn lookup_expr(&self, expr: ExprId) -> Expr { self.db.lookup_intern_expr(expr) } fn intern_expr(&self, expr: Expr) -> ExprI
fn transform_block(&mut self, _expr_id: ExprId, mut expr: Block) -> result::Result<Expr, Infallible> { self.transform_stmts(&mut expr.stmts)?; Ok(Expr::Block(expr)) } fn transform_call(&mut self, _expr_id: ExprId, mut expr: Call) -> result::Result<Expr, Infallible> { expr.env = Some(self.env); expr.transform(self) } fn transform_identifier(&mut self, _expr_id: ExprId, mut expr: Identifier) -> result::Result<Expr, Infallible> { expr.env = Some(self.env); expr.transform(self) } }
d { if let Expr::Block(expr) = expr { let Block { stmts } = expr; self.db.intern_block(stmts) } else { self.db.intern_expr(expr) } }
function_block-function_prefixed
[ { "content": "fn function_body(db: &dyn Parse, name: IdentId) -> Result<ExprId> {\n\n let Function {\n\n signature: _,\n\n param_names: _,\n\n body,\n\n } = db.function(name)?;\n\n\n\n Ok(body)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 1, "score": 259582.8352002492 }, { "content": "fn function(db: &dyn Parse, name: IdentId) -> Result<Function> {\n\n let functions = db.functions()?;\n\n let function = functions.get(&name).ok_or_else(|| error!(\"undefined function {}\", db.lookup_intern_ident(name)))?;\n\n Ok(function.clone())\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 2, "score": 246799.11891356215 }, { "content": "fn function_signature(db: &dyn Parse, name: IdentId) -> Result<Signature> {\n\n let Function {\n\n signature,\n\n param_names: _,\n\n body: _,\n\n } = db.function(name)?;\n\n\n\n Ok(signature)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 3, "score": 237577.3699598667 }, { "content": "fn unify_function(db: &dyn TypeCk, name: IdentId) -> Result<Rc<HashMap<ExprId, TypeId>>> {\n\n let Signature { param_tys: _, return_ty } = db.function_signature(name)?;\n\n let (_, body) = db.lower_function(name)?;\n\n let mut context = UnifyExprContext::new(db, name);\n\n context.unify_expr(body, return_ty)?;\n\n Ok(Rc::new(context.into_expr_type_map()))\n\n}\n", "file_path": "src/type_ck.rs", "rank": 4, "score": 228543.764196464 }, { "content": "fn global_env(db: &dyn Parse) -> Result<Env> {\n\n let mut bindings = im_rc::HashMap::new();\n\n let mut ty_bindings = im_rc::HashMap::new();\n\n\n\n for (&name, item) in db.module()?.iter() {\n\n match item.clone() {\n\n Item::Extern(item) => {\n\n let Extern { signature } = item;\n\n bindings.insert(name, (EnvId::GLOBAL, Binding::Extern(signature)));\n\n }\n\n\n\n Item::Function(item) => {\n\n let Function {\n\n signature,\n\n param_names: _,\n\n body: _,\n\n } = item;\n\n\n\n bindings.insert(name, (EnvId::GLOBAL, Binding::Function(signature)));\n\n }\n\n\n\n Item::Struct(item) => {\n\n ty_bindings.insert(name, TyBinding::Struct(item));\n\n }\n\n }\n\n }\n\n\n\n Ok(Env { bindings, ty_bindings })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 5, "score": 222658.26219148168 }, { "content": "fn refine<DB: Intern + ?Sized>(db: &DB, ty_bindings: &HashMap<i32, TypeId>, ty: TypeId) -> Type {\n\n match db.lookup_intern_type(ty) {\n\n Type::Var(ty) => ty_bindings.get(&ty).map_or(Type::Var(ty), |&ty| refine(db, ty_bindings, ty)),\n\n ty => ty,\n\n }\n\n}\n\n\n", "file_path": "src/unify.rs", "rank": 6, "score": 216788.22936963136 }, { "content": "fn compile<DB: Jit + ?Sized>(db: &mut DB, name: IdentId) -> Result<i32> {\n\n db.reset_module();\n\n db.clif_ctx(name)?;\n\n\n\n let signature = db.function_signature(name)?;\n\n let cl_func_id = db.clif_func_id(false, name, signature).unwrap();\n\n\n\n db.with_module_mut(|module| {\n\n module.finalize_definitions();\n\n\n\n let code = module.get_finalized_function(cl_func_id);\n\n let code = unsafe { mem::transmute::<*const u8, fn() -> i32>(code) };\n\n Ok(code())\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 7, "score": 213417.43733336334 }, { "content": "fn function_names(db: &dyn Parse) -> Result<Vec<IdentId>> {\n\n let functions = db.functions()?;\n\n let mut names = functions.keys().copied().collect::<Vec<_>>();\n\n names.sort_by_key(|&name| db.lookup_intern_ident(name));\n\n Ok(names)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 8, "score": 200740.27732082712 }, { "content": "fn number_type(db: &dyn Intern) -> TypeId {\n\n db.intern_type(Type::Number)\n\n}\n\n\n", "file_path": "src/intern.rs", "rank": 9, "score": 198067.4118901543 }, { "content": "fn bool_type(db: &dyn Intern) -> TypeId {\n\n db.intern_type(Type::Bool)\n\n}\n\n\n", "file_path": "src/intern.rs", "rank": 10, "score": 198067.4118901543 }, { "content": "fn unit_type(db: &dyn Intern) -> TypeId {\n\n db.intern_type(Type::Unit)\n\n}\n\n\n", "file_path": "src/intern.rs", "rank": 11, "score": 198067.4118901543 }, { "content": "fn clif_type(db: &dyn Jit, ty: TypeId) -> Result<Vec<ClifType>> {\n\n match db.lookup_intern_type(ty) {\n\n Type::Bool => Ok(vec![types::B1]),\n\n Type::Integer(ty) => {\n\n let Integer { signed: _signed, bits } = ty;\n\n Ok(vec![ClifType::int(bits).unwrap()])\n\n }\n\n Type::Named(ty) => {\n\n let ty_binding = db.ty_binding(ty)?;\n\n let Struct { field_names: _, field_tys } = ty_binding.try_into().unwrap();\n\n field_tys.into_iter().map(|ty| db.clif_type(ty)).process_results(|i| i.flatten().collect())\n\n }\n\n Type::Number => panic!(\"didn't expect number type to survive unification\"),\n\n Type::Pointer(_) => Ok(vec![db.clif_pointer_type()]),\n\n Type::Var(_) => panic!(\"didn't expect type variable to survive unification\"),\n\n Type::Unit => Ok(vec![]),\n\n }\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 12, "score": 195103.59128807328 }, { "content": "fn functions(db: &dyn Parse) -> Result<Rc<HashMap<IdentId, Function>>> {\n\n let items = db.module()?;\n\n\n\n let functions = items\n\n .iter()\n\n .filter_map(|(&name, item)| {\n\n let item = item.try_into().ok()?;\n\n Some((name, Function::clone(item)))\n\n })\n\n .collect();\n\n\n\n Ok(Rc::new(functions))\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 13, "score": 190798.01433178457 }, { "content": "fn clif_ctx(db: &dyn Jit, name: IdentId) -> Result<Context> {\n\n let signature = db.function_signature(name)?;\n\n let (_, body) = db.lower_function(name)?;\n\n let mut ctx = ClifContext::new();\n\n ctx.func.signature = db.clif_signature(signature.clone())?;\n\n\n\n let func_ctx_pool = db.func_ctx_pool();\n\n let mut func_ctx = func_ctx_pool.pull(FunctionBuilderContext::new);\n\n func_ctx.clear();\n\n\n\n let mut builder = FunctionBuilder::new(&mut ctx.func, &mut func_ctx);\n\n\n\n let param_values = {\n\n let entry_block = builder.create_block();\n\n builder.append_block_params_for_function_params(entry_block);\n\n builder.switch_to_block(entry_block);\n\n builder.seal_block(entry_block);\n\n builder.block_params(entry_block).to_vec()\n\n };\n\n\n", "file_path": "src/jit.rs", "rank": 15, "score": 189386.34909681263 }, { "content": "fn pointer_type(db: &dyn Intern, pointee: TypeId) -> TypeId {\n\n db.intern_type(Type::Pointer(pointee))\n\n}\n\n\n", "file_path": "src/intern.rs", "rank": 16, "score": 183267.9092847127 }, { "content": "fn compile<DB: Jit + ?Sized>(db: &mut DB) -> Result<i32> {\n\n let name = db.intern_ident(\"Main\".to_owned());\n\n assert_eq!(db.function_names().unwrap(), vec![name]);\n\n\n\n db.reset_module();\n\n db.clif_ctx(name)?;\n\n\n\n let signature = db.function_signature(name)?;\n\n let clif_func_id = db.clif_func_id(false, name, signature).unwrap();\n\n\n\n db.with_module_mut(|module| {\n\n module.finalize_definitions();\n\n\n\n let code = module.get_finalized_function(clif_func_id);\n\n let code = unsafe { mem::transmute::<*const u8, fn() -> i32>(code) };\n\n Ok(code())\n\n })\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 17, "score": 181725.62007409346 }, { "content": "fn clif_data_id(db: &dyn Jit, name: IdentId) -> Result<ClifDataId> {\n\n let name = db.lookup_intern_ident(name);\n\n db.with_module_mut(|module| Ok(module.declare_data(&name, Linkage::Export, true, false)?))\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 18, "score": 179882.18004285847 }, { "content": "fn clif_func_id(db: &dyn Jit, external: bool, name: IdentId, signature: Signature) -> Result<ClifFuncId> {\n\n let name = db.lookup_intern_ident(name);\n\n let signature = db.clif_signature(signature)?;\n\n let linkage = if external { Linkage::Import } else { Linkage::Export };\n\n db.with_module_mut(|module| Ok(module.declare_function(&name, linkage, &signature)?))\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 20, "score": 177920.00140803808 }, { "content": "pub trait InternExt: Intern {\n\n fn intern_frontend_expr(&self, expr: frontend::Expr) -> ExprId {\n\n use frontend::Expr as E;\n\n\n\n let expr = match expr {\n\n E::Arithmetic(lhs, op, rhs) => {\n\n let lhs = self.intern_frontend_expr(*lhs);\n\n let rhs = self.intern_frontend_expr(*rhs);\n\n Expr::Arithmetic(Arithmetic { lhs, op, rhs })\n\n }\n\n\n\n E::Assign(lvalue, expr) => {\n\n let lvalue = self.intern_frontend_expr(*lvalue);\n\n let expr = self.intern_frontend_expr(*expr);\n\n Expr::Assign(Assign { lvalue, expr })\n\n }\n\n\n\n E::Call(name, args) => {\n\n let name = self.intern_ident(name);\n\n let args = args.into_iter().map(|expr| self.intern_frontend_expr(expr)).collect();\n", "file_path": "src/intern.rs", "rank": 21, "score": 177918.0360124173 }, { "content": "fn clif_signature(db: &dyn Jit, signature: Signature) -> Result<ClifSignature> {\n\n let Signature { param_tys, return_ty } = signature;\n\n let call_conv = db.clif_default_call_conv();\n\n let params = param_tys\n\n .into_iter()\n\n .map(|ty| db.clif_type(ty))\n\n .process_results(|i| i.flatten().map(AbiParam::new).collect())?;\n\n\n\n let returns = db.clif_type(return_ty)?.map(AbiParam::new);\n\n Ok(ClifSignature { call_conv, params, returns })\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 22, "score": 173521.61681533777 }, { "content": "fn clif_struct_field_range(db: &dyn Jit, field_tys: Vec<TypeId>, index: usize) -> Result<Range<usize>> {\n\n let mut prev_acc = 0;\n\n let mut acc = 0;\n\n for &ty in &field_tys[..index + 1] {\n\n let ty = db.clif_type(ty)?;\n\n prev_acc = acc;\n\n acc += ty.len();\n\n }\n\n\n\n Ok(prev_acc..acc)\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 23, "score": 171781.5599392695 }, { "content": "#[salsa::query_group(TypeCkDatabase)]\n\npub trait TypeCk: Lower {\n\n fn unify_function(&self, name: IdentId) -> Result<Rc<HashMap<ExprId, TypeId>>>;\n\n}\n\n\n", "file_path": "src/type_ck.rs", "rank": 24, "score": 171125.86497834022 }, { "content": "fn integer_type(db: &dyn Intern, signed: bool, bits: u16) -> TypeId {\n\n db.intern_type(Type::Integer(Integer { signed, bits }))\n\n}\n\n\n", "file_path": "src/intern.rs", "rank": 25, "score": 168378.13359089685 }, { "content": "fn compile_error<DB: Jit + ?Sized>(db: &mut DB, text: &str) {\n\n assert_eq!(compile(db).expect_err(\"expected compilation error\").to_string(), text);\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 26, "score": 163569.51168203013 }, { "content": "#[salsa::query_group(InternDatabase)]\n\npub trait Intern {\n\n #[salsa::interned]\n\n fn intern_ident(&self, ident: String) -> IdentId;\n\n\n\n #[salsa::interned]\n\n fn intern_expr(&self, expr: Expr) -> ExprId;\n\n\n\n #[salsa::interned]\n\n fn intern_type(&self, ty: Type) -> TypeId;\n\n\n\n fn bool_type(&self) -> TypeId;\n\n fn integer_type(&self, signed: bool, bits: u16) -> TypeId;\n\n fn number_type(&self) -> TypeId;\n\n fn pointer_type(&self, pointee: TypeId) -> TypeId;\n\n fn unit_type(&self) -> TypeId;\n\n}\n\n\n", "file_path": "src/intern.rs", "rank": 27, "score": 162460.06671044003 }, { "content": " pub trait ExprTransform {\n\n type Error;\n\n\n\n fn lookup_expr(&self, expr: ExprId) -> Expr;\n\n\n\n fn intern_expr(&self, expr: Expr) -> ExprId;\n\n\n\n $(\n\n #[allow(unused_variables)]\n\n fn $transform(&mut self, expr_id: ExprId, expr: $ty) -> Result<Expr, Self::Error> {\n\n expr.transform(self)\n\n }\n\n )*\n\n\n\n fn transform_expr(&mut self, expr: ExprId) -> Result<ExprId, Self::Error> {\n\n self.lookup_expr(expr).transform(expr, self).map(|expr| self.intern_expr(expr))\n\n }\n\n }\n\n\n", "file_path": "src/ast/expr.rs", "rank": 28, "score": 155299.7372800599 }, { "content": "fn module(db: &dyn Parse) -> Result<Rc<HashMap<IdentId, Item>>> {\n\n let input = db.source();\n\n let items = parser::module(&input)?;\n\n\n\n let items = items\n\n .into_iter()\n\n .map(|(name, i)| {\n\n let name = db.intern_ident(name);\n\n let i = db.intern_frontend_item(i);\n\n (name, i)\n\n })\n\n .collect();\n\n\n\n Ok(Rc::new(items))\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 30, "score": 146623.09199948108 }, { "content": "fn clif_pointer_type(db: &dyn Jit) -> ClifType {\n\n db.with_module(|module| module.target_config().pointer_type())\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 31, "score": 144022.01438171088 }, { "content": "#[salsa::query_group(JITDatabase)]\n\npub trait Jit: Parse + Lower + Target + TypeCk {\n\n fn clif_pointer_type(&self) -> ClifType;\n\n fn clif_default_call_conv(&self) -> CallConv;\n\n fn clif_type(&self, ty: TypeId) -> Result<Vec<ClifType>>;\n\n fn clif_struct_field_range(&self, field_tys: Vec<TypeId>, field_index: usize) -> Result<Range<usize>>;\n\n fn clif_signature(&self, signature: Signature) -> Result<ClifSignature>;\n\n fn clif_func_id(&self, external: bool, name: IdentId, signature: Signature) -> Result<ClifFuncId>;\n\n fn clif_data_id(&self, name: IdentId) -> Result<ClifDataId>;\n\n fn clif_ctx(&self, function_name: IdentId) -> Result<Context>;\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 32, "score": 142838.82329684895 }, { "content": "#[salsa::query_group(ParseDatabase)]\n\npub trait Parse: Source + Intern {\n\n fn module(&self) -> Result<Rc<HashMap<IdentId, Item>>>;\n\n fn functions(&self) -> Result<Rc<HashMap<IdentId, Function>>>;\n\n fn function_names(&self) -> Result<Vec<IdentId>>;\n\n fn function(&self, name: IdentId) -> Result<Function>;\n\n fn function_body(&self, name: IdentId) -> Result<ExprId>;\n\n fn function_signature(&self, name: IdentId) -> Result<Signature>;\n\n fn global_env(&self) -> Result<Env>;\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 33, "score": 136943.79839413892 }, { "content": "struct UnifyExprVisitor<'a, 'b, DB: ?Sized> {\n\n context: &'a mut UnifyExprContext<'b, DB>,\n\n ty: TypeId,\n\n}\n\n\n\nimpl<'a, 'b, DB: Lower + ?Sized> UnifyExprVisitor<'a, 'b, DB> {\n\n fn unify_type(&mut self, b: TypeId) -> Result<TypeId> {\n\n self.context.unify_type(self.ty, b)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, DB: Lower + ?Sized> ExprMap for UnifyExprVisitor<'a, 'b, DB> {\n\n type Value = Result<TypeId>;\n\n\n\n fn lookup_expr(&self, expr: ExprId) -> Expr {\n\n self.context.db.lookup_intern_expr(expr)\n\n }\n\n\n\n fn map_arithmetic(&mut self, _expr_id: ExprId, expr: Arithmetic) -> Result<TypeId> {\n\n let Arithmetic { lhs, op: _, rhs } = expr;\n", "file_path": "src/unify.rs", "rank": 34, "score": 133020.45267502242 }, { "content": " pub trait ExprVisitor {\n\n type Error;\n\n\n\n fn lookup_expr(&self, expr: ExprId) -> Expr;\n\n\n\n $(\n\n #[allow(unused_variables)]\n\n fn $visit(&mut self, expr_id: ExprId, expr: $ty) -> Result<(), Self::Error> {\n\n expr.walk(self)\n\n }\n\n )*\n\n\n\n fn visit_expr(&mut self, expr: ExprId) -> Result<(), Self::Error> {\n\n self.lookup_expr(expr).walk(expr, self)\n\n }\n\n }\n\n\n", "file_path": "src/ast/expr.rs", "rank": 35, "score": 130109.56126366422 }, { "content": " pub trait ExprMap {\n\n type Value;\n\n\n\n fn lookup_expr(&self, expr: ExprId) -> Expr;\n\n\n\n $(\n\n fn $map(&mut self, expr_id: ExprId, expr: $ty) -> Self::Value;\n\n )*\n\n\n\n fn map_expr(&mut self, expr: ExprId) -> Self::Value {\n\n self.lookup_expr(expr).map(self, expr)\n\n }\n\n }\n\n };\n\n}\n\n\n\nexpr_enum! {\n\n [ Arithmetic, visit_arithmetic, transform_arithmetic, map_arithmetic ],\n\n [ Assign, visit_assign, transform_assign, map_assign ],\n\n [ Block, visit_block, transform_block, map_block ],\n", "file_path": "src/ast/expr.rs", "rank": 36, "score": 130109.56126366422 }, { "content": "struct PrettyPrintExprVisitor<'a, 'b, DB: ?Sized> {\n\n p: &'a PrettyPrintExpr<'a, DB>,\n\n f: &'a mut fmt::Formatter<'b>,\n\n}\n\n\n\nimpl<'a, 'b, DB: Lower + ?Sized> ExprVisitor for PrettyPrintExprVisitor<'a, 'b, DB> {\n\n type Error = fmt::Error;\n\n\n\n fn lookup_expr(&self, expr: ExprId) -> Expr {\n\n self.p.db.lookup_intern_expr(expr)\n\n }\n\n\n\n fn visit_arithmetic(&mut self, _expr_id: ExprId, expr: Arithmetic) -> fmt::Result {\n\n let Arithmetic { lhs, op, rhs } = expr;\n\n\n\n let op = match op {\n\n ArithmeticKind::Add => \"+\",\n\n ArithmeticKind::Sub => \"-\",\n\n ArithmeticKind::Mul => \"*\",\n\n ArithmeticKind::Div => \"/\",\n", "file_path": "src/pretty.rs", "rank": 37, "score": 128950.17892639068 }, { "content": "fn clif_default_call_conv(db: &dyn Jit) -> CallConv {\n\n db.with_module(|module| module.isa().default_call_conv())\n\n}\n\n\n", "file_path": "src/jit.rs", "rank": 38, "score": 111742.75463906475 }, { "content": "fn bench_typeck_big_function(c: &mut Criterion) {\n\n let mut db = Database::default();\n\n let name = db.intern_ident(\"Main\".to_owned());\n\n let mut s = String::new();\n\n let mut counter = 0;\n\n\n\n c.bench_function(\"typeck_big_function\", |b| {\n\n b.iter(|| {\n\n s.clear();\n\n writeln!(s, \"fn Main() -> i32 {{\")?;\n\n writeln!(s, \" A = 0\")?;\n\n\n\n for index in counter..counter + 1000 {\n\n writeln!(s, \" A = A + {}\", index)?;\n\n }\n\n\n\n writeln!(s, \" A\")?;\n\n writeln!(s, \"}}\")?;\n\n\n\n db.set_source(s.clone());\n\n db.unify_function(name)?;\n\n counter += 1;\n\n Ok(()) as Result<()>\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_noop_change, bench_compile, bench_typeck_big_function);\n\ncriterion_main!(benches);\n", "file_path": "benches/bench.rs", "rank": 39, "score": 106088.03620221696 }, { "content": "trait ProcessResultsExt<T, E>: IntoIterator<Item = result::Result<T, E>> + Sized {\n\n fn process_results<F, R>(self, processor: F) -> result::Result<R, E>\n\n where\n\n F: FnOnce(ProcessResults<Self::IntoIter, E>) -> R,\n\n {\n\n itertools::process_results(self, processor)\n\n }\n\n}\n\n\n\nimpl<I, T, E> ProcessResultsExt<T, E> for I where I: IntoIterator<Item = result::Result<T, E>> {}\n\n\n\npub use database::Database;\n\npub use intern::{Intern, InternExt};\n\npub use jit::{Context, Jit};\n\npub use lower::Lower;\n\npub use parse::Parse;\n\npub use pretty::{PrettyExt, PrettyPrintExpr, PrettyPrintFunction, PrettyPrintType};\n\npub use source::Source;\n\npub use target::{Target, TargetExt};\n\npub use type_ck::TypeCk;\n", "file_path": "src/lib.rs", "rank": 40, "score": 103436.41473960863 }, { "content": "#[salsa::query_group(SourceDatabase)]\n\npub trait Source {\n\n #[salsa::input]\n\n fn source(&self) -> String;\n\n}\n", "file_path": "src/source.rs", "rank": 41, "score": 102136.23349664081 }, { "content": "#[salsa::query_group(TargetDatabase)]\n\npub trait Target {\n\n #[salsa::input]\n\n fn module(&self) -> Rc<RefCell<SimpleJITModule>>;\n\n\n\n #[salsa::input]\n\n fn func_ctx_pool(&self) -> Rc<Pool<FunctionBuilderContext>>;\n\n}\n\n\n", "file_path": "src/target.rs", "rank": 42, "score": 102136.23349664081 }, { "content": "pub trait PrettyExt {\n\n fn pretty_print_expr(&self, function_name: IdentId, expr: ExprId) -> PrettyPrintExpr<'_, Self> {\n\n PrettyPrintExpr {\n\n db: self,\n\n function_name,\n\n indent: Indent { count: 0 },\n\n expr,\n\n }\n\n }\n\n\n\n fn pretty_print_function<'a>(&'a self, name: IdentId, function: &'a Function) -> PrettyPrintFunction<'a, Self> {\n\n PrettyPrintFunction { db: self, name, function }\n\n }\n\n\n\n fn pretty_print_type(&self, ty: TypeId) -> PrettyPrintType<'_, Self> {\n\n PrettyPrintType { db: self, ty }\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> PrettyExt for T {}\n\n\n", "file_path": "src/pretty.rs", "rank": 43, "score": 99430.83879854553 }, { "content": "fn do_test_jit<F>(name: &str, source_text: &str, assert: F) -> Result<()>\n\nwhere\n\n F: FnOnce(*const u8),\n\n{\n\n let mut db = Database::default();\n\n db.set_source(source_text.to_owned());\n\n\n\n for name in db.function_names()? {\n\n db.clif_ctx(name)?;\n\n }\n\n\n\n let name = db.intern_ident(name.to_owned());\n\n let signature = db.function_signature(name)?;\n\n let cl_func_id = db.clif_func_id(false, name, signature)?;\n\n\n\n let cl_data_id = db.clif_data_id(db.intern_ident(\"hello_string\".to_owned()))?;\n\n let mut data_ctx = DataContext::new();\n\n data_ctx.define(b\"hello world!\\0\".to_vec().into_boxed_slice());\n\n\n\n db.with_module_mut(|module| {\n", "file_path": "tests/jit_tests.rs", "rank": 44, "score": 97143.35877431126 }, { "content": "pub trait TargetExt: Target {\n\n fn reset_module(&mut self) {\n\n let builder = SimpleJITBuilder::new(cranelift_module::default_libcall_names());\n\n let module = SimpleJITModule::new(builder);\n\n self.set_module(Rc::new(RefCell::new(module)));\n\n }\n\n\n\n fn with_module<T, F: FnOnce(&SimpleJITModule) -> T>(&self, f: F) -> T {\n\n f(&self.module().borrow())\n\n }\n\n\n\n fn with_module_mut<T, F: FnOnce(&mut SimpleJITModule) -> T>(&self, f: F) -> T {\n\n f(&mut self.module().borrow_mut())\n\n }\n\n}\n\n\n\nimpl<T: Target + ?Sized> TargetExt for T {}\n", "file_path": "src/target.rs", "rank": 45, "score": 93686.47448570709 }, { "content": "pub trait ParseExt: Parse {\n\n fn ty_binding(&self, name: IdentId) -> Result<TyBinding> {\n\n let Env { bindings: _, ty_bindings } = self.global_env()?;\n\n let ty_binding = ty_bindings.get(&name).ok_or_else(|| error!(\"using undeclared type {}\", self.lookup_intern_ident(name)))?;\n\n Ok(ty_binding.clone())\n\n }\n\n}\n\n\n\nimpl<T: Parse + ?Sized> ParseExt for T {}\n", "file_path": "src/parse.rs", "rank": 46, "score": 93686.47448570709 }, { "content": "#[test]\n\nfn test_source_update() -> Result<()> {\n\n let mut db = Database::default();\n\n\n\n {\n\n db.set_source(String::from(\n\n r\"\n", "file_path": "src/tests.rs", "rank": 47, "score": 90783.0101601461 }, { "content": "#[test]\n\nfn test_noop_update() -> Result<()> {\n\n let mut db = Database::default();\n\n let name = db.intern_ident(\"Main\".to_owned());\n\n\n\n let func1 = {\n\n db.set_source(String::from(\n\n r\"\n", "file_path": "src/tests.rs", "rank": 48, "score": 90783.0101601461 }, { "content": "fn bench_compile(c: &mut Criterion) {\n\n let mut db = Database::default();\n\n let name = db.intern_ident(\"Main\".to_owned());\n\n let mut counter = 0;\n\n\n\n c.bench_function(\"compile\", |b| {\n\n b.iter(|| {\n\n db.set_source(format!(\n\n r\"\n", "file_path": "benches/bench.rs", "rank": 49, "score": 87396.26561643009 }, { "content": "fn bench_noop_change(c: &mut Criterion) {\n\n let mut db = Database::default();\n\n let name = db.intern_ident(\"Main\".to_owned());\n\n let mut counter = 0;\n\n\n\n c.bench_function(\"noop_change\", |b| {\n\n b.iter(|| {\n\n db.set_source(format!(\n\n r\"\n", "file_path": "benches/bench.rs", "rank": 50, "score": 85188.75471402501 }, { "content": "struct FunctionTranslator<'a, 'b> {\n\n db: &'a dyn Jit,\n\n function_name: IdentId,\n\n builder: &'a mut FunctionBuilder<'b>,\n\n param_values: Vec<ClifValue>,\n\n expr_types: &'a HashMap<ExprId, TypeId>,\n\n clif_variables: HashMap<(EnvId, IdentId), Vec<ClifVariable>>,\n\n clif_functions: HashMap<(EnvId, IdentId), ClifFuncRef>,\n\n clif_data: HashMap<ClifDataId, ClifGlobalValue>,\n\n index: usize,\n\n}\n\n\n\nimpl<'a, 'b> FunctionTranslator<'a, 'b> {\n\n fn new(db: &'a dyn Jit, function_name: IdentId, builder: &'a mut FunctionBuilder<'b>, param_values: Vec<ClifValue>, expr_types: &'a HashMap<ExprId, TypeId>) -> Self {\n\n Self {\n\n db,\n\n function_name,\n\n builder,\n\n param_values,\n\n expr_types,\n", "file_path": "src/jit.rs", "rank": 51, "score": 78502.33406708634 }, { "content": "fn do_test_ir(source_text: &str, ir: &str) -> Result<()> {\n\n let mut db = Database::default();\n\n db.set_source(source_text.to_owned());\n\n\n\n let mut actual_ir = String::new();\n\n for name in db.function_names()? {\n\n let ctx = db.clif_ctx(name)?;\n\n codegen::write_function(&mut actual_ir, &ctx.func, &DisplayFunctionAnnotations::default())?;\n\n }\n\n\n\n print!(\"{}\", actual_ir);\n\n assert_eq!(actual_ir, ir);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/jit_tests.rs", "rank": 52, "score": 71818.68332012923 }, { "content": "fn do_test_pretty(source_text: &str, pretty_text: &str) -> Result<()> {\n\n let mut db = Database::default();\n\n db.set_source(source_text.to_owned());\n\n\n\n let items = db.module()?;\n\n let mut item_names = items.keys().copied().collect::<Vec<_>>();\n\n item_names.sort_by_key(|&name| db.lookup_intern_ident(name));\n\n\n\n let mut actual_pretty_text = String::new();\n\n for name in item_names {\n\n match &items[&name] {\n\n Item::Extern(_) => writeln!(actual_pretty_text, \"extern fn {}\", db.lookup_intern_ident(name))?,\n\n Item::Function(item) => {\n\n let (_, body) = db.lower_function(name)?;\n\n let mut item = item.clone();\n\n item.body = body;\n\n write!(&mut actual_pretty_text, \"{}\", db.pretty_print_function(name, &item))?;\n\n }\n\n Item::Struct(_) => writeln!(actual_pretty_text, \"struct {}\", db.lookup_intern_ident(name))?,\n\n }\n\n }\n\n\n\n print!(\"{}\", actual_pretty_text);\n\n assert_eq!(actual_pretty_text, pretty_text);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/jit_tests.rs", "rank": 53, "score": 70372.04193362544 }, { "content": "trait VecMapExt<T, U>: Sized {\n\n type Output;\n\n\n\n fn map<F: FnMut(T) -> U>(self, f: F) -> Self::Output;\n\n fn filter_map<F: FnMut(T) -> Option<U>>(self, f: F) -> Self::Output;\n\n fn flat_map<I: IntoIterator<Item = U>, F: FnMut(T) -> I>(self, f: F) -> Self::Output;\n\n}\n\n\n\nimpl<T> VecExt<T> for Vec<T> {\n\n fn into_single_item(mut self) -> result::Result<T, Self> {\n\n if let Some(item) = self.pop() {\n\n if self.is_empty() {\n\n return Ok(item);\n\n }\n\n\n\n self.push(item);\n\n }\n\n\n\n Err(self)\n\n }\n", "file_path": "src/lib.rs", "rank": 54, "score": 66007.45454314977 }, { "content": "#[derive(Clone, Copy)]\n\nstruct Indent {\n\n count: u32,\n\n}\n\n\n\nimpl<'a> fmt::Display for Indent {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for _ in 0..self.count {\n\n f.write_str(\" \")?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct PrettyPrintExpr<'a, DB: ?Sized> {\n\n db: &'a DB,\n\n function_name: IdentId,\n\n indent: Indent,\n\n expr: ExprId,\n\n}\n", "file_path": "src/pretty.rs", "rank": 55, "score": 56089.741695076475 }, { "content": "use crate::ast::item::Struct;\n\nuse derive_more::TryInto;\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq, TryInto)]\n\n#[try_into(owned, ref, ref_mut)]\n\npub enum TyBinding {\n\n Struct(Struct),\n\n}\n", "file_path": "src/ast/ty_binding.rs", "rank": 56, "score": 55640.20931218721 }, { "content": "trait VecExt<T> {\n\n fn into_single_item(self) -> result::Result<T, Self>\n\n where\n\n Self: Sized;\n\n\n\n fn as_single_item(&mut self) -> Option<T>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 57, "score": 49502.15109990809 }, { "content": "fn Other() -> i32 {{\n\n {}\n\n}}\n\n\",\n\n counter\n\n ));\n\n\n\n assert_eq!(compile(&mut db, name)?, 123);\n\n counter += 1;\n\n Ok(()) as Result<()>\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 58, "score": 47205.03072933959 }, { "content": "fn Main() -> i32 {{\n\n {}\n\n}}\n\n\",\n\n counter\n\n ));\n\n\n\n assert_eq!(compile(&mut db, name)?, counter);\n\n counter += 1;\n\n Ok(()) as Result<()>\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 59, "score": 45753.221962271164 }, { "content": "fn Main() -> i32 {\n\n 123\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 60, "score": 45753.221962271164 }, { "content": "fn Zzz() -> i32 {\n\nbroken\n\n}\n\n\",\n\n ));\n\n\n\n db.unify_function(name)?\n\n };\n\n\n\n assert_eq!(func1, func2);\n\n Ok(())\n\n}\n", "file_path": "src/tests.rs", "rank": 61, "score": 45753.221962271164 }, { "content": " fn intern_frontend_item(&self, item: frontend::Item) -> Item {\n\n use frontend::Item as I;\n\n\n\n match item {\n\n I::Extern(item) => {\n\n let frontend::Extern { params, return_ty } = item;\n\n let param_tys = params.into_iter().map(|(_, ty)| self.intern_frontend_type(ty)).collect();\n\n let return_ty = self.intern_frontend_type(return_ty);\n\n let signature = Signature { param_tys, return_ty };\n\n Item::Extern(Extern { signature })\n\n }\n\n\n\n I::Function(item) => {\n\n let frontend::Function { params, return_ty, stmts } = item;\n\n let (param_names, param_tys) = params.into_iter().map(|(name, ty)| (self.intern_ident(name), self.intern_frontend_type(ty))).unzip();\n\n let return_ty = self.intern_frontend_type(return_ty);\n\n let body = self.intern_frontend_block(stmts);\n\n let signature = Signature { param_tys, return_ty };\n\n Item::Function(Function { signature, param_names, body })\n\n }\n", "file_path": "src/intern.rs", "rank": 67, "score": 30662.708909832472 }, { "content": " E::GlobalDataAddr(name) => {\n\n let name = self.intern_ident(name);\n\n Expr::GlobalDataAddr(GlobalDataAddr { name })\n\n }\n\n\n\n E::Identifier(name) => {\n\n let name = self.intern_ident(name);\n\n Expr::Identifier(Identifier { env: None, name })\n\n }\n\n\n\n E::IfElse(condition, then_stmts, else_stmts) => {\n\n let condition = self.intern_frontend_expr(*condition);\n\n let then_body = self.intern_frontend_block(then_stmts);\n\n let else_body = self.intern_frontend_block(else_stmts);\n\n Expr::IfElse(IfElse { condition, then_body, else_body })\n\n }\n\n\n\n E::Index(base, offset) => {\n\n let base = self.intern_frontend_expr(*base);\n\n let offset = self.intern_frontend_expr(*offset);\n", "file_path": "src/intern.rs", "rank": 68, "score": 30659.563956138518 }, { "content": "\n\n I::Struct(item) => {\n\n let frontend::Struct { fields } = item;\n\n let (field_names, field_tys) = fields.into_iter().map(|(name, ty)| (self.intern_ident(name), self.intern_frontend_type(ty))).unzip();\n\n Item::Struct(Struct { field_names, field_tys })\n\n }\n\n }\n\n }\n\n\n\n fn intern_block(&self, stmts: Vec<ExprId>) -> ExprId {\n\n stmts.into_single_item().unwrap_or_else(|stmts| self.intern_expr(Expr::Block(Block { stmts })))\n\n }\n\n}\n\n\n\nimpl<T: Intern + ?Sized> InternExt for T {}\n", "file_path": "src/intern.rs", "rank": 69, "score": 30659.124861279703 }, { "content": " Expr::Index(Index { base, offset })\n\n }\n\n\n\n E::Literal(value) => Expr::Literal(Literal { value }),\n\n\n\n E::StructInit(name, fields) => {\n\n let name = self.intern_ident(name);\n\n let fields = fields.into_iter().map(|(name, expr)| (self.intern_ident(name), self.intern_frontend_expr(*expr))).collect();\n\n Expr::StructInit(StructInit { name, fields })\n\n }\n\n\n\n E::WhileLoop(condition, stmts) => {\n\n let condition = self.intern_frontend_expr(*condition);\n\n let body = self.intern_frontend_block(stmts);\n\n Expr::WhileLoop(WhileLoop { condition, body })\n\n }\n\n };\n\n\n\n self.intern_expr(expr)\n\n }\n", "file_path": "src/intern.rs", "rank": 70, "score": 30658.405615530686 }, { "content": "\n\n fn intern_frontend_type(&self, ty: frontend::Type) -> TypeId {\n\n use frontend::Type as T;\n\n\n\n let ty = match ty {\n\n T::I32 => Type::Integer(Integer { signed: true, bits: 32 }),\n\n T::Named(name) => Type::Named(self.intern_ident(name)),\n\n T::Pointer(ty) => Type::Pointer(self.intern_frontend_type(*ty)),\n\n T::U8 => Type::Integer(Integer { signed: false, bits: 8 }),\n\n T::Unit => Type::Unit,\n\n };\n\n\n\n self.intern_type(ty)\n\n }\n\n\n\n fn intern_frontend_block(&self, stmts: Vec<frontend::Expr>) -> ExprId {\n\n let stmts = stmts.map(|expr| self.intern_frontend_expr(expr));\n\n self.intern_block(stmts)\n\n }\n\n\n", "file_path": "src/intern.rs", "rank": 71, "score": 30658.258750492278 }, { "content": " Expr::Call(Call { env: None, name, args })\n\n }\n\n\n\n E::Comparison(lhs, op, rhs) => {\n\n let lhs = self.intern_frontend_expr(*lhs);\n\n let rhs = self.intern_frontend_expr(*rhs);\n\n Expr::Comparison(Comparison { lhs, op, rhs })\n\n }\n\n\n\n E::Deref(expr) => {\n\n let expr = self.intern_frontend_expr(*expr);\n\n Expr::Deref(Deref { expr })\n\n }\n\n\n\n E::Dot(expr, name) => {\n\n let expr = self.intern_frontend_expr(*expr);\n\n let name = self.intern_ident(name);\n\n Expr::Dot(Dot { expr, field_name: name })\n\n }\n\n\n", "file_path": "src/intern.rs", "rank": 72, "score": 30652.484672303064 }, { "content": "use crate::ast::*;\n\nuse crate::{frontend, VecExt, VecMapExt};\n\n\n\n#[salsa::query_group(InternDatabase)]\n", "file_path": "src/intern.rs", "rank": 73, "score": 30646.556609086852 }, { "content": "use crate::ast::expr::ExprId;\n\nuse crate::ast::ty::TypeId;\n\nuse crate::ast::Signature;\n\nuse derive_more::TryInto;\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Param {\n\n pub index: usize,\n\n pub ty: TypeId,\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Variable {\n\n pub decl_expr: ExprId,\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq, TryInto)]\n\n#[try_into(owned, ref, ref_mut)]\n\npub enum Binding {\n\n Extern(Signature),\n\n Function(Signature),\n\n Param(Param),\n\n Variable(Variable),\n\n}\n", "file_path": "src/ast/binding.rs", "rank": 74, "score": 29388.03098748632 }, { "content": " pub bits: u16,\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub enum Type {\n\n Bool,\n\n Integer(Integer),\n\n Named(IdentId),\n\n Number,\n\n Pointer(TypeId),\n\n Unit,\n\n Var(i32),\n\n}\n\n\n\nimpl Type {\n\n pub fn as_named(&self) -> Option<IdentId> {\n\n if let &Self::Named(ty) = self {\n\n Some(ty)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/ast/ty.rs", "rank": 75, "score": 29316.824358434736 }, { "content": "use crate::ast::IdentId;\n\nuse derive_more::Display;\n\n\n\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Display)]\n\n#[display(fmt = \"{}\", \"_0\")]\n\npub struct TypeId(salsa::InternId);\n\n\n\nimpl salsa::InternKey for TypeId {\n\n fn from_intern_id(v: salsa::InternId) -> Self {\n\n Self(v)\n\n }\n\n\n\n fn as_intern_id(&self) -> salsa::InternId {\n\n self.0\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Integer {\n\n pub signed: bool,\n", "file_path": "src/ast/ty.rs", "rank": 76, "score": 29316.338359268957 }, { "content": "use crate::ast::*;\n\nuse crate::lower::Lower;\n\nuse crate::unify::UnifyExprContext;\n\nuse crate::Result;\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\n\n\n#[salsa::query_group(TypeCkDatabase)]\n", "file_path": "src/type_ck.rs", "rank": 77, "score": 29072.262505075607 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.decl_expr = transform.transform_expr(self.decl_expr)?;\n\n self.body = transform.transform_expr(self.body)?;\n\n Ok(Expr::Scope(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct StructInit {\n\n pub name: IdentId,\n\n pub fields: im_rc::HashMap<IdentId, ExprId>,\n\n}\n\n\n\nimpl StructInit {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { name: _, fields } = self;\n\n for &expr in fields.values() {\n", "file_path": "src/ast/expr.rs", "rank": 78, "score": 28568.494779099 }, { "content": "pub struct Assign {\n\n pub lvalue: ExprId,\n\n pub expr: ExprId,\n\n}\n\n\n\nimpl Assign {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { lvalue, expr } = self;\n\n visitor.visit_expr(lvalue)?;\n\n visitor.visit_expr(expr)?;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.lvalue = transform.transform_expr(self.lvalue)?;\n\n self.expr = transform.transform_expr(self.expr)?;\n\n Ok(Expr::Assign(self))\n\n }\n\n}\n\n\n", "file_path": "src/ast/expr.rs", "rank": 79, "score": 28568.192852738743 }, { "content": " pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> {\n\n Ok(Expr::GlobalDataAddr(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Identifier {\n\n pub env: Option<EnvId>,\n\n pub name: IdentId,\n\n}\n\n\n\nimpl Identifier {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> {\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> {\n\n Ok(Expr::Identifier(self))\n\n }\n\n}\n", "file_path": "src/ast/expr.rs", "rank": 80, "score": 28568.002236263834 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Scope {\n\n pub scope_env: EnvId,\n\n pub decl_name: IdentId,\n\n pub decl_expr: ExprId,\n\n pub body: ExprId,\n\n}\n\n\n\nimpl Scope {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self {\n\n scope_env: _,\n\n decl_name: _,\n\n decl_expr,\n\n body,\n\n } = self;\n\n visitor.visit_expr(decl_expr)?;\n\n visitor.visit_expr(body)?;\n", "file_path": "src/ast/expr.rs", "rank": 81, "score": 28567.722453155566 }, { "content": " self.else_body = transform.transform_expr(self.else_body)?;\n\n Ok(Expr::IfElse(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Index {\n\n pub base: ExprId,\n\n pub offset: ExprId,\n\n}\n\n\n\nimpl Index {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { base, offset } = self;\n\n visitor.visit_expr(base)?;\n\n visitor.visit_expr(offset)?;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n", "file_path": "src/ast/expr.rs", "rank": 82, "score": 28566.020425002986 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.expr = transform.transform_expr(self.expr)?;\n\n Ok(Expr::Dot(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct GlobalDataAddr {\n\n pub name: IdentId,\n\n}\n\n\n\nimpl GlobalDataAddr {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { name: _name } = self;\n\n Ok(())\n\n }\n\n\n", "file_path": "src/ast/expr.rs", "rank": 83, "score": 28564.840873793848 }, { "content": " Ok(Expr::Block(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Call {\n\n pub env: Option<EnvId>,\n\n pub name: IdentId,\n\n pub args: Vec<ExprId>,\n\n}\n\n\n\nimpl Call {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { env: _, name: _, args } = self;\n\n for expr in args {\n\n visitor.visit_expr(expr)?;\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "src/ast/expr.rs", "rank": 84, "score": 28564.668758777978 }, { "content": "#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Block {\n\n pub stmts: Vec<ExprId>,\n\n}\n\n\n\nimpl Block {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { stmts } = self;\n\n for expr in stmts {\n\n visitor.visit_expr(expr)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n for stmt in self.stmts.iter_mut() {\n\n *stmt = transform.transform_expr(*stmt)?;\n\n }\n\n\n", "file_path": "src/ast/expr.rs", "rank": 85, "score": 28564.052440741067 }, { "content": "\n\nimpl WhileLoop {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { condition, body } = self;\n\n visitor.visit_expr(condition)?;\n\n visitor.visit_expr(body)?;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.condition = transform.transform_expr(self.condition)?;\n\n self.body = transform.transform_expr(self.body)?;\n\n Ok(Expr::WhileLoop(self))\n\n }\n\n}\n\n\n\nmacro_rules! expr_enum {\n\n ( $( [ $ty:ident, $visit:ident, $transform:ident, $map:ident ] ),* ) => {\n\n #[derive(Clone, Debug, Hash, PartialEq, Eq, TryInto)]\n\n #[try_into(owned, ref, ref_mut)]\n", "file_path": "src/ast/expr.rs", "rank": 86, "score": 28563.77872197321 }, { "content": " visitor.visit_expr(expr)?;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.expr = transform.transform_expr(self.expr)?;\n\n Ok(Expr::Deref(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Dot {\n\n pub expr: ExprId,\n\n pub field_name: IdentId,\n\n}\n\n\n\nimpl Dot {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { expr, field_name: _ } = self;\n\n visitor.visit_expr(expr)?;\n", "file_path": "src/ast/expr.rs", "rank": 87, "score": 28563.437943898796 }, { "content": " pub enum Expr {\n\n $(\n\n $ty($ty),\n\n )*\n\n }\n\n\n\n impl Expr {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, expr_id: ExprId, visitor: &mut V) -> Result<(), V::Error> {\n\n match self {\n\n $(\n\n Self::$ty(expr) => visitor.$visit(expr_id, expr),\n\n )*\n\n }\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(self, expr_id: ExprId, transform: &mut T) -> Result<Self, T::Error> {\n\n match self {\n\n $(\n\n Self::$ty(expr) => transform.$transform(expr_id, expr),\n\n )*\n", "file_path": "src/ast/expr.rs", "rank": 88, "score": 28563.38252607835 }, { "content": "\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct IfElse {\n\n pub condition: ExprId,\n\n pub then_body: ExprId,\n\n pub else_body: ExprId,\n\n}\n\n\n\nimpl IfElse {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { condition, then_body, else_body } = self;\n\n visitor.visit_expr(condition)?;\n\n visitor.visit_expr(then_body)?;\n\n visitor.visit_expr(else_body)?;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.condition = transform.transform_expr(self.condition)?;\n\n self.then_body = transform.transform_expr(self.then_body)?;\n", "file_path": "src/ast/expr.rs", "rank": 89, "score": 28563.320673250386 }, { "content": " self.base = transform.transform_expr(self.base)?;\n\n self.offset = transform.transform_expr(self.offset)?;\n\n Ok(Expr::Index(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Literal {\n\n pub value: i32,\n\n}\n\n\n\nimpl Literal {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { value: _value } = self;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> {\n\n Ok(Expr::Literal(self))\n\n }\n", "file_path": "src/ast/expr.rs", "rank": 90, "score": 28563.233217537043 }, { "content": " visitor.visit_expr(expr)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n for (_, expr_mut) in self.fields.iter_mut() {\n\n *expr_mut = transform.transform_expr(*expr_mut)?;\n\n }\n\n\n\n Ok(Expr::StructInit(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct WhileLoop {\n\n pub condition: ExprId,\n\n pub body: ExprId,\n\n}\n", "file_path": "src/ast/expr.rs", "rank": 91, "score": 28562.652057708376 }, { "content": " visitor.visit_expr(lhs)?;\n\n visitor.visit_expr(rhs)?;\n\n Ok(())\n\n }\n\n\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n self.lhs = transform.transform_expr(self.lhs)?;\n\n self.rhs = transform.transform_expr(self.rhs)?;\n\n Ok(Expr::Comparison(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Deref {\n\n pub expr: ExprId,\n\n}\n\n\n\nimpl Deref {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { expr } = self;\n", "file_path": "src/ast/expr.rs", "rank": 92, "score": 28561.07945202743 }, { "content": "\n\n pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> {\n\n for expr in self.args.iter_mut() {\n\n *expr = transform.transform_expr(*expr)?;\n\n }\n\n\n\n Ok(Expr::Call(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\npub struct Comparison {\n\n pub lhs: ExprId,\n\n pub op: ComparisonKind,\n\n pub rhs: ExprId,\n\n}\n\n\n\nimpl Comparison {\n\n pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> {\n\n let Self { lhs, op: _, rhs } = self;\n", "file_path": "src/ast/expr.rs", "rank": 93, "score": 28560.919699848986 }, { "content": " fn map_identifier(&mut self, _expr_id: ExprId, expr: Identifier) -> Result<TypeId> {\n\n let Identifier { env, name } = expr;\n\n match self.context.db.binding(self.context.function_name, env.unwrap(), name)? {\n\n Binding::Param(binding) => {\n\n let Param { index: _, ty: param_ty } = binding;\n\n self.unify_type(param_ty)\n\n }\n\n Binding::Variable(binding) => {\n\n let Variable { decl_expr } = binding;\n\n self.context.unify_expr(decl_expr, self.ty)?;\n\n Ok(self.ty)\n\n }\n\n Binding::Extern(_) | Binding::Function(_) => Err(error!(\"functions can only be called\")),\n\n }\n\n }\n\n\n\n fn map_if_else(&mut self, _expr_id: ExprId, expr: IfElse) -> Result<TypeId> {\n\n let IfElse { condition, then_body, else_body } = expr;\n\n self.context.unify_expr(condition, self.context.db.bool_type())?;\n\n self.context.unify_expr(then_body, self.ty)?;\n", "file_path": "src/unify.rs", "rank": 94, "score": 35.58859892082548 }, { "content": " self.builder.def_var(variable, value);\n\n }\n\n }\n\n\n\n self.clif_variables.insert((decl_env, name), variable.clone());\n\n Ok(variable)\n\n }\n\n\n\n fn struct_field_range(&self, struct_expr: ExprId, field_name: IdentId) -> Result<Range<usize>> {\n\n let ty = self.expr_types[&struct_expr];\n\n let ty_name = self.db.lookup_intern_type(ty).as_named().unwrap();\n\n let ty_binding = self.db.ty_binding(ty_name)?;\n\n let Struct { field_names, field_tys } = ty_binding.try_into().unwrap();\n\n let field_index = field_names.into_iter().position(|n| n == field_name).unwrap();\n\n self.db.clif_struct_field_range(field_tys, field_index)\n\n }\n\n\n\n fn translate_lvalue_dot(&mut self, expr: Dot) -> Result<Vec<ClifVariable>> {\n\n let Dot { expr, field_name } = expr;\n\n\n", "file_path": "src/jit.rs", "rank": 95, "score": 32.1556225651489 }, { "content": " self.context.unify_expr(expr, struct_ty)?;\n\n\n\n let struct_ty = refine(self.context.db, &self.context.ty_bindings, struct_ty);\n\n let struct_name = struct_ty.as_named().ok_or_else(|| error!(\"only structs can be used with ., not {:?}\", struct_ty))?;\n\n let ty_binding = self.context.db.ty_binding(struct_name)?;\n\n let Struct { field_names, field_tys } = ty_binding.try_into().map_err(|e| error!(\"only structs can be used with ., not {}\", e))?;\n\n\n\n let index = field_names\n\n .into_iter()\n\n .position(|n| n == field_name)\n\n .ok_or_else(|| error!(\"invalid struct field {}\", self.context.db.lookup_intern_ident(field_name)))?;\n\n\n\n self.unify_type(field_tys[index])\n\n }\n\n\n\n fn map_global_data_addr(&mut self, _expr_id: ExprId, _expr: GlobalDataAddr) -> Result<TypeId> {\n\n let pointee = self.context.new_var();\n\n self.unify_type(self.context.db.pointer_type(pointee))\n\n }\n\n\n", "file_path": "src/unify.rs", "rank": 96, "score": 31.605023466132167 }, { "content": "use crate::ast::*;\n\nuse crate::intern::Intern;\n\nuse crate::lower::{Lower, LowerExt};\n\nuse crate::parse::ParseExt;\n\nuse crate::pretty::PrettyExt;\n\nuse crate::Result;\n\nuse std::collections::HashMap;\n\nuse std::convert::TryInto;\n\n\n\npub struct UnifyExprContext<'a, DB: ?Sized> {\n\n db: &'a DB,\n\n function_name: IdentId,\n\n result: HashMap<ExprId, TypeId>,\n\n ty_bindings: HashMap<i32, TypeId>,\n\n index: i32,\n\n}\n\n\n\nimpl<'a, DB: Lower + ?Sized> UnifyExprContext<'a, DB> {\n\n pub fn new(db: &'a DB, function_name: IdentId) -> Self {\n\n Self {\n", "file_path": "src/unify.rs", "rank": 97, "score": 31.52210299395224 }, { "content": " clif_variables: HashMap::new(),\n\n clif_functions: HashMap::new(),\n\n clif_data: HashMap::new(),\n\n index: 0,\n\n }\n\n }\n\n\n\n fn translate_variable(&mut self, env: EnvId, name: IdentId) -> Result<Vec<ClifVariable>> {\n\n let (decl_env, binding) = self.db.binding_pair(self.function_name, env, name)?;\n\n\n\n if let Some(variable) = self.clif_variables.get(&(decl_env, name)) {\n\n return Ok(variable.clone());\n\n }\n\n\n\n let (value, ty) = match binding {\n\n Binding::Param(binding) => {\n\n let Param { index, ty } = binding;\n\n let Signature { param_tys, return_ty: _ } = self.db.function_signature(self.function_name)?;\n\n let range = self.db.clif_struct_field_range(param_tys, index)?;\n\n (Some(self.param_values[range].to_vec()), ty)\n", "file_path": "src/jit.rs", "rank": 98, "score": 29.845611538282288 }, { "content": " self.context.unify_expr(else_body, self.ty)?;\n\n Ok(self.ty)\n\n }\n\n\n\n fn map_index(&mut self, _expr_id: ExprId, expr: Index) -> Result<TypeId> {\n\n let Index { base, offset } = expr;\n\n self.context.unify_expr(base, self.context.db.pointer_type(self.ty))?;\n\n self.context.unify_expr(offset, self.context.db.integer_type(true, 64))?;\n\n Ok(self.ty)\n\n }\n\n\n\n fn map_literal(&mut self, _expr_id: ExprId, _expr: Literal) -> Result<TypeId> {\n\n self.unify_type(self.context.db.number_type())\n\n }\n\n\n\n fn map_scope(&mut self, _expr_id: ExprId, expr: Scope) -> Result<TypeId> {\n\n let Scope {\n\n scope_env: _,\n\n decl_name: _,\n\n decl_expr: _,\n", "file_path": "src/unify.rs", "rank": 99, "score": 29.474136527488525 } ]
Rust
tests/integrations/config/test_config_client.rs
amyangfei/tikv
5019e61d0c8e1966f6b649894ad17ba128068a1e
use std::cmp::Ordering; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use kvproto::configpb::*; use configuration::{ConfigChange, Configuration}; use pd_client::errors::Result; use pd_client::ConfigClient; use raftstore::store::Config as RaftstoreConfig; use tikv::config::*; use tikv_util::config::ReadableDuration; use tikv_util::worker::FutureWorker; struct MockPdClient { configs: Mutex<HashMap<String, Config>>, } #[derive(Clone)] struct Config { version: Version, content: String, update: Vec<ConfigEntry>, } impl Config { fn new(version: Version, content: String, update: Vec<ConfigEntry>) -> Self { Config { version, content, update, } } } impl MockPdClient { fn new() -> Self { MockPdClient { configs: Mutex::new(HashMap::new()), } } fn register(self: Arc<Self>, id: &str, cfg: TiKvConfig) -> ConfigHandler { let (version, cfg) = ConfigHandler::create(id.to_owned(), self, cfg).unwrap(); ConfigHandler::start( id.to_owned(), ConfigController::new(cfg, version), FutureWorker::new("test-pd-worker").scheduler(), ) .unwrap() } fn update_cfg<F>(&self, id: &str, f: F) where F: Fn(&mut TiKvConfig), { let mut configs = self.configs.lock().unwrap(); let cfg = configs.get_mut(id).unwrap(); let mut config: TiKvConfig = toml::from_str(&cfg.content).unwrap(); f(&mut config); cfg.content = toml::to_string(&config).unwrap(); cfg.version.local += 1; } fn update_raw<F>(&self, id: &str, f: F) where F: Fn(&mut String), { let mut configs = self.configs.lock().unwrap(); let cfg = configs.get_mut(id).unwrap(); f(&mut cfg.content); cfg.version.local += 1; } fn get(&self, id: &str) -> Config { self.configs.lock().unwrap().get(id).unwrap().clone() } } impl ConfigClient for MockPdClient { fn register_config(&self, id: String, v: Version, cfg: String) -> Result<CreateResponse> { let old = self .configs .lock() .unwrap() .insert(id.clone(), Config::new(v.clone(), cfg.clone(), Vec::new())); assert!(old.is_none(), format!("id {} already be registered", id)); let mut status = Status::default(); status.set_code(StatusCode::Ok); let mut resp = CreateResponse::default(); resp.set_status(status); resp.set_config(cfg); resp.set_version(v); Ok(resp) } fn get_config(&self, id: String, version: Version) -> Result<GetResponse> { let mut resp = GetResponse::default(); let mut status = Status::default(); let configs = self.configs.lock().unwrap(); if let Some(cfg) = configs.get(&id) { match cmp_version(&cfg.version, &version) { Ordering::Equal => status.set_code(StatusCode::Ok), _ => { resp.set_config(cfg.content.clone()); status.set_code(StatusCode::WrongVersion); } } resp.set_version(cfg.version.clone()); } else { status.set_code(StatusCode::Unknown); } resp.set_status(status); Ok(resp) } fn update_config( &self, id: String, version: Version, mut entries: Vec<ConfigEntry>, ) -> Result<UpdateResponse> { let mut resp = UpdateResponse::default(); let mut status = Status::default(); if let Some(cfg) = self.configs.lock().unwrap().get_mut(&id) { match cmp_version(&cfg.version, &version) { Ordering::Equal => { cfg.update.append(&mut entries); cfg.version.local += 1; status.set_code(StatusCode::Ok); } _ => status.set_code(StatusCode::WrongVersion), } resp.set_version(cfg.version.clone()); } else { status.set_code(StatusCode::Unknown); } resp.set_status(status); Ok(resp) } } #[test] fn test_update_config() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg_handler = pd_client.clone().register(id, TiKvConfig::default()); let mut cfg = cfg_handler.get_config().clone(); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!(cfg_handler.get_config(), &cfg); pd_client.update_cfg(id, |cfg| { cfg.refresh_config_interval = ReadableDuration::hours(12); }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); cfg.refresh_config_interval = ReadableDuration::hours(12); assert_eq!(cfg_handler.get_config(), &cfg); } #[test] fn test_update_not_support_config() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg_handler = pd_client.clone().register(id, TiKvConfig::default()); let cfg = cfg_handler.get_config().clone(); pd_client.update_cfg(id, |cfg| { cfg.server.addr = "localhost:3000".to_owned(); }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!(cfg_handler.get_config(), &cfg); } #[test] fn test_update_to_invalid() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg = TiKvConfig::default(); cfg.raft_store.raft_log_gc_threshold = 2000; let mut cfg_handler = pd_client.clone().register(id, cfg); pd_client.update_cfg(id, |cfg| { cfg.raft_store.raft_log_gc_threshold = 0; }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!( cfg_handler.get_config().raft_store.raft_log_gc_threshold, 2000 ); let cfg = pd_client.get(id); assert_eq!(cfg.update.len(), 1); assert_eq!(cfg.update[0].name, "raftstore.raft-log-gc-threshold"); assert_eq!(cfg.update[0].value, toml::to_string(&2000).unwrap()); } #[test] fn test_compatible_config() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg_handler = pd_client.clone().register(id, TiKvConfig::default()); let mut cfg = cfg_handler.get_config().clone(); pd_client.update_raw(id, |cfg| { *cfg = " [new.config] xyz = 1 [raftstore] raft-log-gc-threshold = 2048 " .to_owned(); }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); cfg.raft_store.raft_log_gc_threshold = 2048; assert_eq!(cfg_handler.get_config(), &cfg); } #[test] fn test_dispatch_change() { use configuration::ConfigManager; use std::error::Error; use std::result::Result; #[derive(Clone)] struct CfgManager(Arc<Mutex<RaftstoreConfig>>); impl ConfigManager for CfgManager { fn dispatch(&mut self, c: ConfigChange) -> Result<(), Box<dyn Error>> { self.0.lock().unwrap().update(c); Ok(()) } } let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let cfg = TiKvConfig::default(); let mgr = CfgManager(Arc::new(Mutex::new(Default::default()))); let mut cfg_handler = { let (version, cfg) = ConfigHandler::create(id.to_owned(), pd_client.clone(), cfg).unwrap(); *mgr.0.lock().unwrap() = cfg.raft_store.clone(); let mut controller = ConfigController::new(cfg, version); controller.register(Module::Raftstore, Box::new(mgr.clone())); ConfigHandler::start( id.to_owned(), controller, FutureWorker::new("test-pd-worker").scheduler(), ) .unwrap() }; pd_client.update_cfg(id, |cfg| { cfg.raft_store.raft_log_gc_threshold = 2000; }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!( cfg_handler.get_config().raft_store.raft_log_gc_threshold, 2000 ); assert_eq!(mgr.0.lock().unwrap().raft_log_gc_threshold, 2000); }
use std::cmp::Ordering; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use kvproto::configpb::*; use configuration::{ConfigChange, Configuration}; use pd_client::errors::Result; use pd_client::ConfigClient; use raftstore::store::Config as RaftstoreConfig; use tikv::config::*; use tikv_util::config::ReadableDuration; use tikv_util::worker::FutureWorker; struct MockPdClient { configs: Mutex<HashMap<String, Config>>, } #[derive(Clone)] struct Config { version: Version, content: String, update: Vec<ConfigEntry>, } impl Config { fn new(version: Version, content: String, update: Vec<ConfigEntry>) -> Self { Config { version, content, update, } } } impl MockPdClient { fn new() -> Self { MockPdClient { configs: Mutex::new(HashMap::new()), } } fn register(self: Arc<Self>, id: &str, cfg: TiKvConfig) -> ConfigHandler { let (version, cfg) = ConfigHandler::create(id.to_owned(), self, cfg).unwrap(); ConfigHandler::start( id.to_owned(), ConfigController::new(cfg, version), FutureWorker::new("test-pd-worker").scheduler(), ) .unwrap() } fn update_cfg<F>(&self, id: &str, f: F) where F: Fn(&mut TiKvConfig), { let mut configs = self.configs.lock().unwrap(); let cfg = configs.get_mut(id).unwrap(); let mut config: TiKvConfig = toml::from_str(&cfg.content).unwrap(); f(&mut config); cfg.content = toml::to_string(&config).unwrap(); cfg.version.local += 1; } fn update_raw<F>(&self, id: &str, f: F) where F: Fn(&mut String), { let mut configs = self.configs.lock().unwrap(); let cfg = configs.get_mut(id).unwrap(); f(&mut cfg.content); cfg.version.local += 1; } fn get(&self, id: &str) -> Config { self.configs.lock().unwrap().get(id).unwrap().clone() } } impl ConfigClient for MockPdClient { fn register_config(&self, id: String, v: Version, cfg: String) -> Result<CreateResponse> { let old = self .configs .lock() .unwrap() .insert(id.clone(), Config::new(v.clone(), cfg.clone(), Vec::new())); assert!(old.is_none(), format!("id {} already be registered", id)); let mut status = Status::default(); status.set_code(StatusCode::Ok); let mut resp = CreateResponse::default(); resp.set_status(status); resp.set_config(cfg); resp.set_version(v); Ok(resp) } fn get_config(&self, id: String, version: Version) -> Result<GetResponse> { let mut resp = GetResponse::default(); let mut status = Status::default(); let configs = self.configs.lock().unwrap();
resp.set_status(status); Ok(resp) } fn update_config( &self, id: String, version: Version, mut entries: Vec<ConfigEntry>, ) -> Result<UpdateResponse> { let mut resp = UpdateResponse::default(); let mut status = Status::default(); if let Some(cfg) = self.configs.lock().unwrap().get_mut(&id) { match cmp_version(&cfg.version, &version) { Ordering::Equal => { cfg.update.append(&mut entries); cfg.version.local += 1; status.set_code(StatusCode::Ok); } _ => status.set_code(StatusCode::WrongVersion), } resp.set_version(cfg.version.clone()); } else { status.set_code(StatusCode::Unknown); } resp.set_status(status); Ok(resp) } } #[test] fn test_update_config() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg_handler = pd_client.clone().register(id, TiKvConfig::default()); let mut cfg = cfg_handler.get_config().clone(); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!(cfg_handler.get_config(), &cfg); pd_client.update_cfg(id, |cfg| { cfg.refresh_config_interval = ReadableDuration::hours(12); }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); cfg.refresh_config_interval = ReadableDuration::hours(12); assert_eq!(cfg_handler.get_config(), &cfg); } #[test] fn test_update_not_support_config() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg_handler = pd_client.clone().register(id, TiKvConfig::default()); let cfg = cfg_handler.get_config().clone(); pd_client.update_cfg(id, |cfg| { cfg.server.addr = "localhost:3000".to_owned(); }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!(cfg_handler.get_config(), &cfg); } #[test] fn test_update_to_invalid() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg = TiKvConfig::default(); cfg.raft_store.raft_log_gc_threshold = 2000; let mut cfg_handler = pd_client.clone().register(id, cfg); pd_client.update_cfg(id, |cfg| { cfg.raft_store.raft_log_gc_threshold = 0; }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!( cfg_handler.get_config().raft_store.raft_log_gc_threshold, 2000 ); let cfg = pd_client.get(id); assert_eq!(cfg.update.len(), 1); assert_eq!(cfg.update[0].name, "raftstore.raft-log-gc-threshold"); assert_eq!(cfg.update[0].value, toml::to_string(&2000).unwrap()); } #[test] fn test_compatible_config() { let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let mut cfg_handler = pd_client.clone().register(id, TiKvConfig::default()); let mut cfg = cfg_handler.get_config().clone(); pd_client.update_raw(id, |cfg| { *cfg = " [new.config] xyz = 1 [raftstore] raft-log-gc-threshold = 2048 " .to_owned(); }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); cfg.raft_store.raft_log_gc_threshold = 2048; assert_eq!(cfg_handler.get_config(), &cfg); } #[test] fn test_dispatch_change() { use configuration::ConfigManager; use std::error::Error; use std::result::Result; #[derive(Clone)] struct CfgManager(Arc<Mutex<RaftstoreConfig>>); impl ConfigManager for CfgManager { fn dispatch(&mut self, c: ConfigChange) -> Result<(), Box<dyn Error>> { self.0.lock().unwrap().update(c); Ok(()) } } let pd_client = Arc::new(MockPdClient::new()); let id = "localhost:1080"; let cfg = TiKvConfig::default(); let mgr = CfgManager(Arc::new(Mutex::new(Default::default()))); let mut cfg_handler = { let (version, cfg) = ConfigHandler::create(id.to_owned(), pd_client.clone(), cfg).unwrap(); *mgr.0.lock().unwrap() = cfg.raft_store.clone(); let mut controller = ConfigController::new(cfg, version); controller.register(Module::Raftstore, Box::new(mgr.clone())); ConfigHandler::start( id.to_owned(), controller, FutureWorker::new("test-pd-worker").scheduler(), ) .unwrap() }; pd_client.update_cfg(id, |cfg| { cfg.raft_store.raft_log_gc_threshold = 2000; }); cfg_handler.refresh_config(pd_client.as_ref()).unwrap(); assert_eq!( cfg_handler.get_config().raft_store.raft_log_gc_threshold, 2000 ); assert_eq!(mgr.0.lock().unwrap().raft_log_gc_threshold, 2000); }
if let Some(cfg) = configs.get(&id) { match cmp_version(&cfg.version, &version) { Ordering::Equal => status.set_code(StatusCode::Ok), _ => { resp.set_config(cfg.content.clone()); status.set_code(StatusCode::WrongVersion); } } resp.set_version(cfg.version.clone()); } else { status.set_code(StatusCode::Unknown); }
if_condition
[ { "content": "fn config_to_slice(config_change: &[(String, String)]) -> Vec<(&str, &str)> {\n\n config_change\n\n .iter()\n\n .map(|(name, value)| (name.as_str(), value.as_str()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 0, "score": 391110.64508115995 }, { "content": "fn mvcc_reader_load_lock<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let test_keys: Vec<Key> = KvGenerator::with_seed(\n\n config.key_length,\n\n config.value_length,\n\n DEFAULT_KV_GENERATOR_SEED,\n\n )\n\n .generate(DEFAULT_ITERATIONS)\n\n .iter()\n\n .map(|(k, _)| Key::from_raw(&k))\n\n .collect();\n\n\n\n b.iter_batched(\n\n || {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n (snapshot, &test_keys)\n\n },\n\n |(snapshot, test_kvs)| {\n\n for key in test_kvs {\n\n let mut reader =\n\n MvccReader::new(snapshot.clone(), None, true, ctx.get_isolation_level());\n\n black_box(reader.load_lock(&key).unwrap());\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 1, "score": 374769.33923676657 }, { "content": "fn bench_detect(b: &mut Bencher, cfg: &Config) {\n\n let mut detect_table = DetectTable::new(cfg.ttl);\n\n let mut generator = DetectGenerator::new(cfg.range);\n\n b.iter(|| {\n\n for entry in generator.generate(cfg.n) {\n\n detect_table.detect(\n\n entry.get_txn().into(),\n\n entry.get_wait_for_txn().into(),\n\n entry.get_key_hash(),\n\n );\n\n }\n\n });\n\n}\n\n\n", "file_path": "tests/benches/deadlock_detector/mod.rs", "rank": 2, "score": 356563.08779790066 }, { "content": "fn from_change_value(v: ConfigValue) -> CfgResult<String> {\n\n let s = match v {\n\n ConfigValue::Duration(_) => {\n\n let v: ReadableDuration = v.into();\n\n toml::to_string(&v)?\n\n }\n\n ConfigValue::Size(_) => {\n\n let v: ReadableSize = v.into();\n\n toml::to_string(&v)?\n\n }\n\n ConfigValue::U64(ref v) => toml::to_string(v)?,\n\n ConfigValue::F64(ref v) => toml::to_string(v)?,\n\n ConfigValue::Usize(ref v) => toml::to_string(v)?,\n\n ConfigValue::Bool(ref v) => toml::to_string(v)?,\n\n ConfigValue::String(ref v) => toml::to_string(v)?,\n\n _ => unreachable!(),\n\n };\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 3, "score": 348257.49333287677 }, { "content": "fn bench_raft_cluster<T, F>(c: &mut Criterion, factory: F, label: &str)\n\nwhere\n\n T: Simulator + 'static,\n\n F: ClusterFactory<T>,\n\n{\n\n let nodes_coll = vec![1, 3, 5];\n\n let value_size_coll = vec![8, 128, 1024, 4096];\n\n\n\n let mut set_inputs = vec![];\n\n let mut get_inputs = vec![];\n\n let mut delete_inputs = vec![];\n\n for nodes in nodes_coll {\n\n for &value_size in &value_size_coll {\n\n set_inputs.push(SetConfig {\n\n factory: factory.clone(),\n\n nodes,\n\n value_size,\n\n });\n\n }\n\n get_inputs.push(GetConfig {\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 4, "score": 344171.6813251942 }, { "content": "fn mvcc_prewrite<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n b.iter_batched(\n\n || {\n\n let mutations: Vec<(Mutation, Vec<u8>)> = KvGenerator::with_seed(\n\n config.key_length,\n\n config.value_length,\n\n DEFAULT_KV_GENERATOR_SEED,\n\n )\n\n .generate(DEFAULT_ITERATIONS)\n\n .iter()\n\n .map(|(k, v)| (Mutation::Put((Key::from_raw(&k), v.clone())), k.clone()))\n\n .collect();\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n (mutations, snapshot)\n\n },\n\n |(mutations, snapshot)| {\n\n for (mutation, primary) in mutations {\n\n let mut txn = mvcc::new_txn!(snapshot.clone(), 1, true);\n\n txn.prewrite(mutation, &primary, false, 0, 0, TimeStamp::default())\n\n .unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 5, "score": 336925.9631670233 }, { "content": "fn storage_prewrite<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let store = SyncTestStorageBuilder::from_engine(engine).build().unwrap();\n\n b.iter_batched(\n\n || {\n\n let kvs = KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS);\n\n\n\n let data: Vec<(Context, Vec<Mutation>, Vec<u8>)> = kvs\n\n .iter()\n\n .map(|(k, v)| {\n\n (\n\n Context::default(),\n\n vec![Mutation::Put((Key::from_raw(&k), v.clone()))],\n\n k.clone(),\n\n )\n\n })\n\n .collect();\n\n (data, &store)\n\n },\n\n |(data, store)| {\n\n for (context, mutations, primary) in data {\n\n black_box(store.prewrite(context, mutations, primary, 1).unwrap());\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 6, "score": 336925.96316702326 }, { "content": "fn txn_commit<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 1),\n\n |keys| {\n\n for key in keys {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = mvcc::new_txn!(snapshot, 1, true);\n\n txn.commit(key, 2.into()).unwrap();\n\n let modifies = txn.into_modifies();\n\n black_box(engine.write(&ctx, modifies)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 7, "score": 336925.96316702326 }, { "content": "fn txn_prewrite<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n b.iter_batched(\n\n || {\n\n let mutations: Vec<(Mutation, Vec<u8>)> =\n\n KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS)\n\n .iter()\n\n .map(|(k, v)| (Mutation::Put((Key::from_raw(&k), v.clone())), k.clone()))\n\n .collect();\n\n mutations\n\n },\n\n |mutations| {\n\n for (mutation, primary) in mutations {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = mvcc::new_txn!(snapshot, 1, true);\n\n txn.prewrite(mutation, &primary, false, 0, 0, TimeStamp::default())\n\n .unwrap();\n\n let modifies = txn.into_modifies();\n\n black_box(engine.write(&ctx, modifies)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 8, "score": 336925.96316702326 }, { "content": "fn storage_commit<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let store = SyncTestStorageBuilder::from_engine(engine).build().unwrap();\n\n b.iter_batched(\n\n || {\n\n let kvs = KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS);\n\n\n\n for (k, v) in &kvs {\n\n store\n\n .prewrite(\n\n Context::default(),\n\n vec![Mutation::Put((Key::from_raw(&k), v.clone()))],\n\n k.clone(),\n\n 1,\n\n )\n\n .unwrap();\n\n }\n\n\n\n (kvs, &store)\n\n },\n\n |(kvs, store)| {\n\n for (k, _) in &kvs {\n\n black_box(store.commit(Context::default(), vec![Key::from_raw(k)], 1, 2)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 9, "score": 336925.96316702326 }, { "content": "fn mvcc_commit<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 1),\n\n |(snapshot, keys)| {\n\n for key in keys {\n\n let mut txn = mvcc::new_txn!(snapshot.clone(), 1, true);\n\n black_box(txn.commit(key, 1.into())).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 10, "score": 336925.9631670233 }, { "content": "fn scan_impl<F>(mut it: DBIterator<&DB>, start_key: &[u8], mut f: F) -> Result<()>\n\nwhere\n\n F: FnMut(&[u8], &[u8]) -> Result<bool>,\n\n{\n\n let mut remained = it.seek(start_key.into())?;\n\n while remained {\n\n remained = f(it.key(), it.value())? && it.next()?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "components/engine/src/iterable.rs", "rank": 11, "score": 334458.56031547836 }, { "content": "fn scan_impl<Iter, F>(mut it: Iter, start_key: &[u8], mut f: F) -> Result<()>\n\nwhere\n\n Iter: Iterator,\n\n F: FnMut(&[u8], &[u8]) -> Result<bool>,\n\n{\n\n let mut remained = it.seek(SeekKey::Key(start_key))?;\n\n while remained {\n\n remained = f(it.key(), it.value())? && it.next()?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<'a> From<&'a [u8]> for SeekKey<'a> {\n\n fn from(bs: &'a [u8]) -> SeekKey {\n\n SeekKey::Key(bs)\n\n }\n\n}\n\n\n", "file_path": "components/engine_traits/src/iterable.rs", "rank": 12, "score": 334458.56031547836 }, { "content": "fn storage_raw_get<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let store = SyncTestStorageBuilder::from_engine(engine).build().unwrap();\n\n b.iter_batched(\n\n || {\n\n let kvs = KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS);\n\n let data: Vec<(Context, Vec<u8>)> = kvs\n\n .iter()\n\n .map(|(k, _)| (Context::default(), k.clone()))\n\n .collect();\n\n (data, &store)\n\n },\n\n |(data, store)| {\n\n for (context, key) in data {\n\n black_box(store.raw_get(context, CF_DEFAULT.to_owned(), key).unwrap());\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 13, "score": 333910.6700938188 }, { "content": "fn txn_rollback_conflict<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 2),\n\n |keys| {\n\n for key in keys {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = mvcc::new_txn!(snapshot, 1, true);\n\n txn.rollback(key).unwrap();\n\n let modifies = txn.into_modifies();\n\n black_box(engine.write(&ctx, modifies)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 14, "score": 333910.6700938187 }, { "content": "fn txn_rollback_prewrote<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 1),\n\n |keys| {\n\n for key in keys {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = mvcc::new_txn!(snapshot, 1, true);\n\n txn.rollback(key).unwrap();\n\n let modifies = txn.into_modifies();\n\n black_box(engine.write(&ctx, modifies)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 15, "score": 333910.6700938187 }, { "content": "// Read a file in project directory. It is similar to `include_str!`,\n\n// but `include_str!` a large string literal increases compile time.\n\n// See more: https://github.com/rust-lang/rust/issues/39352\n\nfn read_file_in_project_dir(path: &str) -> String {\n\n let mut p = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n p.push(path);\n\n let mut f = File::open(p).unwrap();\n\n let mut buffer = String::new();\n\n f.read_to_string(&mut buffer).unwrap();\n\n buffer\n\n}\n\n\n", "file_path": "tests/integrations/config/mod.rs", "rank": 16, "score": 331159.7935920226 }, { "content": "/// Prevents launching with an incompatible configuration\n\n///\n\n/// Loads the previously-loaded configuration from `last_tikv.toml`,\n\n/// compares key configuration items and fails if they are not\n\n/// identical.\n\npub fn check_critical_config(config: &TiKvConfig) -> Result<(), String> {\n\n // Check current critical configurations with last time, if there are some\n\n // changes, user must guarantee relevant works have been done.\n\n let store_path = Path::new(&config.storage.data_dir);\n\n let last_cfg_path = store_path.join(LAST_CONFIG_FILE);\n\n\n\n if last_cfg_path.exists() {\n\n let last_cfg = TiKvConfig::from_file(&last_cfg_path);\n\n config.check_critical_cfg_with(&last_cfg)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 17, "score": 327309.024643487 }, { "content": "/// Persists critical config to `last_tikv.toml`\n\npub fn persist_critical_config(config: &TiKvConfig) -> Result<(), String> {\n\n let store_path = Path::new(&config.storage.data_dir);\n\n let last_cfg_path = store_path.join(LAST_CONFIG_FILE);\n\n\n\n // Create parent directory if missing.\n\n if let Err(e) = fs::create_dir_all(&store_path) {\n\n return Err(format!(\n\n \"create parent directory '{}' failed: {}\",\n\n store_path.to_str().unwrap(),\n\n e\n\n ));\n\n }\n\n\n\n // Persist current critical configurations to file.\n\n if let Err(e) = config.write_to_file(&last_cfg_path) {\n\n return Err(format!(\n\n \"persist critical config to '{}' failed: {}\",\n\n last_cfg_path.to_str().unwrap(),\n\n e\n\n ));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 18, "score": 327307.7959810932 }, { "content": "fn normalize_column_name(name: impl std::borrow::Borrow<str>) -> String {\n\n name.borrow().to_lowercase()\n\n}\n\n\n\nimpl Table {\n\n /// Get a column reference in the table by column id.\n\n pub fn column_by_id(&self, id: i64) -> Option<&Column> {\n\n let idx = self.column_index_by_id.get(&id);\n\n idx.map(|idx| &self.columns[*idx].1)\n\n }\n\n\n\n /// Get a column reference in the table by column name (case insensitive).\n\n pub fn column_by_name(&self, name: impl std::borrow::Borrow<str>) -> Option<&Column> {\n\n let normalized_name = normalize_column_name(name);\n\n let idx = self.column_index_by_name.get(&normalized_name);\n\n idx.map(|idx| &self.columns[*idx].1)\n\n }\n\n\n\n /// Create `tipb::TableInfo` from current table.\n\n pub fn table_info(&self) -> tipb::TableInfo {\n", "file_path": "components/test_coprocessor/src/table.rs", "rank": 19, "score": 327020.85812054126 }, { "content": "fn new_config(eps: Vec<(String, u16)>) -> Config {\n\n let mut cfg = Config::default();\n\n cfg.endpoints = eps\n\n .into_iter()\n\n .map(|addr| format!(\"{}:{}\", addr.0, addr.1))\n\n .collect();\n\n cfg\n\n}\n\n\n", "file_path": "tests/integrations/pd/test_rpc_client.rs", "rank": 20, "score": 325641.57557210105 }, { "content": "fn bench_set<T, F>(b: &mut Bencher, input: &SetConfig<F>)\n\nwhere\n\n T: Simulator,\n\n F: ClusterFactory<T>,\n\n{\n\n let mut cluster = input.factory.build(input.nodes);\n\n prepare_cluster(&mut cluster, &[]);\n\n\n\n let mut kvs = KvGenerator::new(100, input.value_size);\n\n\n\n b.iter(|| {\n\n let (k, v) = kvs.next().unwrap();\n\n cluster.must_put(&k, &v)\n\n });\n\n}\n\n\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 21, "score": 320060.7389703205 }, { "content": "fn bench_delete<T, F>(b: &mut Bencher, input: &DeleteConfig<F>)\n\nwhere\n\n T: Simulator,\n\n F: ClusterFactory<T>,\n\n{\n\n let mut cluster = input.factory.build(input.nodes);\n\n let mut kvs = KvGenerator::new(100, 128).generate(DEFAULT_DATA_SIZE);\n\n prepare_cluster(&mut cluster, &kvs);\n\n\n\n let mut keys = kvs\n\n .drain(..)\n\n .take(DEFAULT_DATA_SIZE / 10)\n\n .map(|i| i.0)\n\n .chain(KvGenerator::new(100, 0).map(|i| i.0));\n\n\n\n b.iter(|| {\n\n let k = keys.next().unwrap();\n\n cluster.must_delete(&k)\n\n });\n\n}\n\n\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 22, "score": 320060.7389703205 }, { "content": "fn bench_get<T, F>(b: &mut Bencher, input: &GetConfig<F>)\n\nwhere\n\n T: Simulator,\n\n F: ClusterFactory<T>,\n\n{\n\n let mut cluster = input.factory.build(input.nodes);\n\n let mut kvs = KvGenerator::new(100, 128).generate(DEFAULT_DATA_SIZE);\n\n prepare_cluster(&mut cluster, &kvs);\n\n\n\n let mut keys = kvs\n\n .drain(..)\n\n .take(DEFAULT_DATA_SIZE / 10)\n\n .map(|i| i.0)\n\n .chain(KvGenerator::new(100, 0).map(|i| i.0));\n\n\n\n b.iter(|| {\n\n let k = keys.next().unwrap();\n\n cluster.get(&k)\n\n });\n\n}\n\n\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 23, "score": 320060.7389703205 }, { "content": "#[allow(dead_code)]\n\npub fn validate_and_persist_config(config: &mut TiKvConfig, persist: bool) {\n\n if let Err(e) = check_critical_config(config) {\n\n fatal!(\"critical config check failed: {}\", e);\n\n }\n\n\n\n if persist {\n\n if let Err(e) = persist_critical_config(&config) {\n\n fatal!(\"persist critical config failed: {}\", e);\n\n }\n\n }\n\n\n\n config.compatible_adjust();\n\n if let Err(e) = config.validate() {\n\n fatal!(\"invalid configuration: {}\", e.description());\n\n }\n\n}\n", "file_path": "cmd/src/setup.rs", "rank": 24, "score": 309901.78136002313 }, { "content": "#[test]\n\nfn test_lock_manager_cfg_update() {\n\n const DEFAULT_TIMEOUT: u64 = 3000;\n\n const DEFAULT_DELAY: u64 = 100;\n\n let mut cfg = TiKvConfig::default();\n\n cfg.pessimistic_txn.wait_for_lock_timeout = DEFAULT_TIMEOUT;\n\n cfg.pessimistic_txn.wake_up_delay_duration = DEFAULT_DELAY;\n\n cfg.validate().unwrap();\n\n let (mut cfg_controller, waiter, deadlock, mut lock_mgr) = setup(cfg.clone());\n\n\n\n // update of other module's config should not effect lock manager config\n\n let mut incoming = cfg.clone();\n\n incoming.raft_store.raft_log_gc_threshold = 2000;\n\n let rollback = cfg_controller.update_or_rollback(incoming).unwrap();\n\n assert_eq!(rollback.right(), Some(true));\n\n validate_waiter(&waiter, move |timeout: u64, delay: u64| {\n\n assert_eq!(timeout, DEFAULT_TIMEOUT);\n\n assert_eq!(delay, DEFAULT_DELAY);\n\n });\n\n validate_dead_lock(&deadlock, move |ttl: u64| {\n\n assert_eq!(ttl, DEFAULT_TIMEOUT);\n", "file_path": "tests/integrations/config/dynamic/pessimistic_txn.rs", "rank": 25, "score": 306391.05722698936 }, { "content": "fn validate_dead_lock<F>(router: &DetectorScheduler, f: F)\n\nwhere\n\n F: FnOnce(u64) + Send + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n router.validate(Box::new(move |v| {\n\n f(v);\n\n tx.send(()).unwrap();\n\n }));\n\n rx.recv_timeout(Duration::from_secs(3)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/pessimistic_txn.rs", "rank": 26, "score": 303187.6828893592 }, { "content": "pub fn bench_mvcc<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n c.bench_function_over_inputs(\"mvcc_prewrite\", mvcc_prewrite, configs.to_owned());\n\n c.bench_function_over_inputs(\"mvcc_commit\", mvcc_commit, configs.to_owned());\n\n c.bench_function_over_inputs(\n\n \"mvcc_rollback_prewrote\",\n\n mvcc_rollback_prewrote,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"mvcc_rollback_conflict\",\n\n mvcc_rollback_conflict,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"mvcc_rollback_non_prewrote\",\n\n mvcc_rollback_non_prewrote,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\"mvcc_load_lock\", mvcc_reader_load_lock, configs.to_owned());\n\n c.bench_function_over_inputs(\n\n \"mvcc_seek_write\",\n\n mvcc_reader_seek_write,\n\n configs.to_owned(),\n\n );\n\n}\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 27, "score": 301488.2733806677 }, { "content": "pub fn bench_engine<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n c.bench_function_over_inputs(\n\n \"engine_get(exclude snapshot)\",\n\n bench_engine_get,\n\n configs.to_vec(),\n\n );\n\n c.bench_function_over_inputs(\"engine_put\", bench_engine_put, configs.to_owned());\n\n c.bench_function_over_inputs(\"engine_snapshot\", bench_engine_snapshot, configs.to_owned());\n\n}\n", "file_path": "tests/benches/hierarchy/engine/mod.rs", "rank": 28, "score": 301488.2733806677 }, { "content": "pub fn bench_txn<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n c.bench_function_over_inputs(\"txn_prewrite\", txn_prewrite, configs.to_owned());\n\n c.bench_function_over_inputs(\"txn_commit\", txn_commit, configs.to_owned());\n\n c.bench_function_over_inputs(\n\n \"txn_rollback_prewrote\",\n\n txn_rollback_prewrote,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"txn_rollback_conflict\",\n\n txn_rollback_conflict,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"txn_rollback_non_prewrote\",\n\n txn_rollback_non_prewrote,\n\n configs.to_owned(),\n\n );\n\n}\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 29, "score": 301488.2733806678 }, { "content": "#[allow(dead_code)]\n\npub fn overwrite_config_with_cmd_args(config: &mut TiKvConfig, matches: &ArgMatches<'_>) {\n\n if matches.is_present(\"enable-dynamic-config\") {\n\n config.dynamic_config = true;\n\n }\n\n\n\n if let Some(level) = matches.value_of(\"log-level\") {\n\n config.log_level = logger::get_level_by_string(level).unwrap();\n\n }\n\n\n\n if let Some(file) = matches.value_of(\"log-file\") {\n\n config.log_file = file.to_owned();\n\n }\n\n\n\n if let Some(addr) = matches.value_of(\"addr\") {\n\n config.server.addr = addr.to_owned();\n\n }\n\n\n\n if let Some(advertise_addr) = matches.value_of(\"advertise-addr\") {\n\n config.server.advertise_addr = advertise_addr.to_owned();\n\n }\n", "file_path": "cmd/src/setup.rs", "rank": 30, "score": 298360.346437466 }, { "content": "pub fn new_store_cfg() -> Config {\n\n Config {\n\n sync_log: false,\n\n raft_base_tick_interval: ReadableDuration::millis(10),\n\n raft_heartbeat_ticks: 2,\n\n raft_election_timeout_ticks: 25,\n\n raft_log_gc_tick_interval: ReadableDuration::millis(100),\n\n raft_log_gc_threshold: 1,\n\n // Use a value of 3 seconds as max_leader_missing_duration just for test.\n\n // In production environment, the value of max_leader_missing_duration\n\n // should be configured far beyond the election timeout.\n\n max_leader_missing_duration: ReadableDuration::secs(2),\n\n // To make a valid config, use a value of 2 seconds as\n\n // abnormal_leader_missing_duration and set\n\n // peer_stale_state_check_interval to 1 second.\n\n abnormal_leader_missing_duration: ReadableDuration::millis(1500),\n\n peer_stale_state_check_interval: ReadableDuration::secs(1),\n\n pd_heartbeat_tick_interval: ReadableDuration::millis(20),\n\n region_split_check_diff: ReadableSize(10000),\n\n report_region_flow_interval: ReadableDuration::millis(100),\n\n raft_store_max_leader_lease: ReadableDuration::millis(250),\n\n raft_reject_transfer_leader_duration: ReadableDuration::secs(0),\n\n clean_stale_peer_delay: ReadableDuration::secs(0),\n\n allow_remove_leader: true,\n\n merge_check_tick_interval: ReadableDuration::millis(100),\n\n ..Config::default()\n\n }\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 31, "score": 298049.07809725247 }, { "content": "pub fn canonicalize_sub_path(path: &str, sub_path: &str) -> Result<String, Box<dyn Error>> {\n\n let parent = Path::new(path);\n\n let p = parent.join(Path::new(sub_path));\n\n if p.exists() && p.is_file() {\n\n return Err(format!(\"{}/{} is not a directory!\", path, sub_path).into());\n\n }\n\n if !p.exists() {\n\n fs::create_dir_all(p.as_path())?;\n\n }\n\n Ok(format!(\"{}\", p.canonicalize()?.display()))\n\n}\n\n\n", "file_path": "components/tikv_util/src/config.rs", "rank": 32, "score": 295739.7106916761 }, { "content": "// BatchCommandsNotify is used to make business pool notifiy completion queues directly.\n\nstruct BatchCommandsNotify<F>(Arc<Mutex<Option<Spawn<F>>>>);\n\nimpl<F> Clone for BatchCommandsNotify<F> {\n\n fn clone(&self) -> BatchCommandsNotify<F> {\n\n BatchCommandsNotify(Arc::clone(&self.0))\n\n }\n\n}\n\nimpl<F> Notify for BatchCommandsNotify<F>\n\nwhere\n\n F: Future<Item = (), Error = ()> + Send + 'static,\n\n{\n\n fn notify(&self, id: usize) {\n\n let n = Arc::new(self.clone());\n\n let mut s = self.0.lock().unwrap();\n\n match s.as_mut().map(|spawn| spawn.poll_future_notify(&n, id)) {\n\n Some(Ok(Async::NotReady)) | None => {}\n\n _ => *s = None,\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/server/service/kv.rs", "rank": 33, "score": 289844.0830911214 }, { "content": "pub fn canonicalize_path(path: &str) -> Result<String, Box<dyn Error>> {\n\n canonicalize_sub_path(path, \"\")\n\n}\n\n\n", "file_path": "components/tikv_util/src/config.rs", "rank": 34, "score": 288094.8490891623 }, { "content": "// Convert `ConfigValue` to formatted String that can pass to `DB::set_db_options`\n\nfn config_value_to_string(config_change: Vec<(String, ConfigValue)>) -> Vec<(String, String)> {\n\n config_change\n\n .into_iter()\n\n .map(|(name, value)| {\n\n let v = match value {\n\n d @ ConfigValue::Duration(_) => {\n\n let d: ReadableDuration = d.into();\n\n d.as_secs().to_string()\n\n }\n\n s @ ConfigValue::Size(_) => {\n\n let s: ReadableSize = s.into();\n\n s.0.to_string()\n\n }\n\n ConfigValue::Module(_) => unreachable!(),\n\n v => format!(\"{}\", v),\n\n };\n\n (name, v)\n\n })\n\n .collect()\n\n}\n", "file_path": "src/config.rs", "rank": 35, "score": 286449.75405912823 }, { "content": "#[inline]\n\nfn substring_index_positive(s: &str, delim: &str, count: usize) -> String {\n\n let mut bg = 0;\n\n let mut cnt = 0;\n\n let mut last = 0;\n\n while cnt < count {\n\n if let Some(idx) = s[bg..].find(delim) {\n\n last = bg + idx;\n\n bg = last + delim.len();\n\n cnt += 1;\n\n } else {\n\n last = s.len();\n\n break;\n\n }\n\n }\n\n s[..last].to_string()\n\n}\n\n\n", "file_path": "components/tidb_query/src/expr/builtin_string.rs", "rank": 36, "score": 285409.83519751305 }, { "content": "#[inline]\n\nfn substring_index_negative(s: &str, delim: &str, count: usize) -> String {\n\n let mut bg = 0;\n\n let mut positions = VecDeque::with_capacity(count.min(128));\n\n positions.push_back(0);\n\n while let Some(idx) = s[bg..].find(delim) {\n\n bg = bg + idx + delim.len();\n\n if positions.len() == count {\n\n positions.pop_front();\n\n }\n\n positions.push_back(bg);\n\n }\n\n s[positions[0]..].to_string()\n\n}\n\n\n", "file_path": "components/tidb_query/src/expr/builtin_string.rs", "rank": 37, "score": 285409.83519751305 }, { "content": "pub fn new_tikv_config(cluster_id: u64) -> TiKvConfig {\n\n TiKvConfig {\n\n storage: StorageConfig {\n\n scheduler_worker_pool_size: 1,\n\n scheduler_concurrency: 10,\n\n ..StorageConfig::default()\n\n },\n\n server: new_server_config(cluster_id),\n\n raft_store: new_store_cfg(),\n\n readpool: new_readpool_cfg(),\n\n pessimistic_txn: new_pessimistic_txn_cfg(),\n\n ..TiKvConfig::default()\n\n }\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 38, "score": 283898.6226314959 }, { "content": "fn validate_apply<F>(router: &ApplyRouter, region_id: u64, validate: F)\n\nwhere\n\n F: FnOnce(bool) + Send + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n router.schedule_task(\n\n region_id,\n\n ApplyTask::Validate(\n\n region_id,\n\n Box::new(move |(_, sync_log): (_, bool)| {\n\n validate(sync_log);\n\n tx.send(()).unwrap();\n\n }),\n\n ),\n\n );\n\n rx.recv_timeout(Duration::from_secs(3)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/raftstore.rs", "rank": 39, "score": 283234.2931349269 }, { "content": "pub fn tls_processing_read_observe_duration<F, R>(cmd: &str, f: F) -> R\n\nwhere\n\n F: FnOnce() -> R,\n\n{\n\n TLS_STORAGE_METRICS.with(|m| {\n\n let now = tikv_util::time::Instant::now_coarse();\n\n let ret = f();\n\n m.borrow_mut()\n\n .local_sched_processing_read_histogram_vec\n\n .with_label_values(&[cmd])\n\n .observe(now.elapsed_secs());\n\n ret\n\n })\n\n}\n\n\n", "file_path": "src/storage/metrics.rs", "rank": 40, "score": 275436.07755074615 }, { "content": "#[inline]\n\nfn get_name(command: &str) -> String {\n\n if command != \"\" {\n\n return command.to_owned();\n\n }\n\n String::from(\"anony\")\n\n}\n\n\n", "file_path": "components/tikv_util/src/metrics/threads_linux.rs", "rank": 41, "score": 275190.8523352124 }, { "content": "fn if_null<F, T>(mut f: F) -> Result<Option<T>>\n\nwhere\n\n F: FnMut(usize) -> Result<Option<T>>,\n\n{\n\n let arg0 = f(0)?;\n\n if arg0.is_some() {\n\n return Ok(arg0);\n\n }\n\n f(1)\n\n}\n\n\n", "file_path": "components/tidb_query/src/expr/builtin_control.rs", "rank": 42, "score": 274887.04130177014 }, { "content": "pub fn unquote_string(s: &str) -> Result<String> {\n\n let mut ret = String::with_capacity(s.len());\n\n let mut chars = s.chars();\n\n while let Some(ch) = chars.next() {\n\n if ch == '\\\\' {\n\n let c = match chars.next() {\n\n Some(c) => c,\n\n None => return Err(box_err!(\"Incomplete escaped sequence\")),\n\n };\n\n match c {\n\n '\"' => ret.push('\"'),\n\n 'b' => ret.push(CHAR_BACKSPACE),\n\n 'f' => ret.push(CHAR_FORMFEED),\n\n 'n' => ret.push(CHAR_LINEFEED),\n\n 'r' => ret.push(CHAR_CARRIAGE_RETURN),\n\n 't' => ret.push(CHAR_HORIZONTAL_TAB),\n\n '\\\\' => ret.push('\\\\'),\n\n 'u' => {\n\n let b = chars.as_str().as_bytes();\n\n if b.len() < ESCAPED_UNICODE_BYTES_SIZE {\n", "file_path": "components/tidb_query/src/codec/mysql/json/json_unquote.rs", "rank": 43, "score": 269307.2534127765 }, { "content": "#[allow(dead_code)]\n\npub fn initial_metric(cfg: &MetricConfig, node_id: Option<u64>) {\n\n tikv_util::metrics::monitor_threads(\"tikv\")\n\n .unwrap_or_else(|e| fatal!(\"failed to start monitor thread: {}\", e));\n\n tikv_util::metrics::monitor_allocator_stats(\"tikv\")\n\n .unwrap_or_else(|e| fatal!(\"failed to monitor allocator stats: {}\", e));\n\n\n\n if cfg.interval.as_secs() == 0 || cfg.address.is_empty() {\n\n return;\n\n }\n\n\n\n let mut push_job = cfg.job.clone();\n\n if let Some(id) = node_id {\n\n push_job.push_str(&format!(\"_{}\", id));\n\n }\n\n\n\n info!(\"start prometheus client\");\n\n tikv_util::metrics::run_prometheus(cfg.interval.0, &cfg.address, &push_job);\n\n}\n\n\n", "file_path": "cmd/src/setup.rs", "rank": 44, "score": 268993.8879970934 }, { "content": "#[derive(Debug)]\n\nstruct SetConfig<F> {\n\n factory: F,\n\n nodes: usize,\n\n value_size: usize,\n\n}\n\n\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 45, "score": 268980.24714757316 }, { "content": "#[derive(Debug)]\n\nstruct GetConfig<F> {\n\n factory: F,\n\n nodes: usize,\n\n}\n\n\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 46, "score": 268980.24714757316 }, { "content": "#[derive(Debug)]\n\nstruct DeleteConfig<F> {\n\n factory: F,\n\n nodes: usize,\n\n}\n\n\n", "file_path": "tests/benches/raftstore/mod.rs", "rank": 47, "score": 268980.24714757316 }, { "content": "fn no_exp_float_str_to_int_str(valid_float: &str, mut dot_idx: usize) -> Result<Cow<'_, str>> {\n\n // According to TiDB's impl\n\n // 1. If there is digit after dot, round.\n\n // 2. Only when the final result <0, add '-' in the front of it.\n\n // 3. The result has no '+'.\n\n\n\n let digits = if valid_float.starts_with('+') || valid_float.starts_with('-') {\n\n dot_idx -= 1;\n\n &valid_float[1..]\n\n } else {\n\n valid_float\n\n };\n\n // TODO: may here we can use Cow to avoid some copy below\n\n let int_str = if valid_float.starts_with('-') {\n\n if dot_idx == 0 {\n\n \"-0\"\n\n } else {\n\n // the valid_float[0] is '-', so there is `dot_idx-=1` above,\n\n // so we need valid_float[..(dot_idx+1)] here.\n\n &valid_float[..=dot_idx]\n", "file_path": "components/tidb_query/src/codec/convert.rs", "rank": 48, "score": 262424.11162477743 }, { "content": "fn validate_label(s: &str, tp: &str) -> Result<()> {\n\n let report_err = || {\n\n box_err!(\n\n \"store label {}: {:?} not match ^[a-zA-Z0-9]([a-zA-Z0-9-._]*[a-zA-Z0-9])?\",\n\n tp,\n\n s\n\n )\n\n };\n\n if s.is_empty() {\n\n return Err(report_err());\n\n }\n\n let mut chrs = s.chars();\n\n let first_char = chrs.next().unwrap();\n\n if !first_char.is_ascii_alphanumeric() {\n\n return Err(report_err());\n\n }\n\n let last_char = match chrs.next_back() {\n\n None => return Ok(()),\n\n Some(c) => c,\n\n };\n", "file_path": "src/server/config.rs", "rank": 49, "score": 261787.61444396476 }, { "content": "pub fn bind_term(resp: &mut RaftCmdResponse, term: u64) {\n\n if term == 0 {\n\n return;\n\n }\n\n\n\n resp.mut_header().set_current_term(term);\n\n}\n\n\n", "file_path": "components/raftstore/src/store/cmd_resp.rs", "rank": 50, "score": 261026.08343356234 }, { "content": "pub fn bind_error(resp: &mut RaftCmdResponse, err: Error) {\n\n resp.mut_header().set_error(err.into());\n\n}\n\n\n", "file_path": "components/raftstore/src/store/cmd_resp.rs", "rank": 51, "score": 261026.08343356234 }, { "content": "fn validate_store<F>(router: &RaftRouter, f: F)\n\nwhere\n\n F: FnOnce(&Config) + Send + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n router\n\n .send_control(StoreMsg::Validate(Box::new(move |cfg: &Config| {\n\n f(cfg);\n\n tx.send(()).unwrap();\n\n })))\n\n .unwrap();\n\n rx.recv_timeout(Duration::from_secs(3)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/raftstore.rs", "rank": 52, "score": 259133.82559963525 }, { "content": "pub fn new_store(store_id: u64, addr: String) -> metapb::Store {\n\n let mut store = metapb::Store::default();\n\n store.set_id(store_id);\n\n store.set_address(addr);\n\n\n\n store\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 53, "score": 258119.918648228 }, { "content": "#[inline]\n\npub fn start(_name: impl AsRef<str>) -> bool {\n\n // Do nothing\n\n false\n\n}\n\n\n\n/// Stop profiling. Always returns false if `profiling` feature is not enabled.\n", "file_path": "components/profiler/src/profiler_dummy.rs", "rank": 54, "score": 257203.32001964087 }, { "content": "#[inline]\n\npub fn start(name: impl AsRef<str>) -> bool {\n\n let mut profiler = ACTIVE_PROFILER.lock().unwrap();\n\n\n\n // Profiling in progress.\n\n if *profiler != Profiler::None {\n\n return false;\n\n }\n\n\n\n if valgrind_request::running_on_valgrind() != 0 {\n\n *profiler = Profiler::CallGrind;\n\n CallgrindClientRequest::start();\n\n } else {\n\n *profiler = Profiler::GPerfTools;\n\n gperftools::PROFILER\n\n .lock()\n\n .unwrap()\n\n .start(name.as_ref())\n\n .unwrap();\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Stop profiling. Returns false if failed, i.e. there is no profiling in progress.\n\n///\n\n/// When `profiling` feature is not enabled, this function will do nothing and there is totally\n\n/// zero cost.\n", "file_path": "components/profiler/src/profiler_unix.rs", "rank": 55, "score": 257203.32001964087 }, { "content": "fn check_system_config(config: &TiKvConfig) {\n\n info!(\"beginning system configuration check\");\n\n let mut rocksdb_max_open_files = config.rocksdb.max_open_files;\n\n if config.rocksdb.titan.enabled {\n\n // Titan engine maintains yet another pool of blob files and uses the same max\n\n // number of open files setup as rocksdb does. So we double the max required\n\n // open files here\n\n rocksdb_max_open_files *= 2;\n\n }\n\n if let Err(e) = tikv_util::config::check_max_open_fds(\n\n RESERVED_OPEN_FDS + (rocksdb_max_open_files + config.raftdb.max_open_files) as u64,\n\n ) {\n\n fatal!(\"{}\", e);\n\n }\n\n\n\n // Check RocksDB data dir\n\n if let Err(e) = tikv_util::config::check_data_dir(&config.storage.data_dir) {\n\n warn!(\n\n \"check: rocksdb-data-dir\";\n\n \"path\" => &config.storage.data_dir,\n", "file_path": "cmd/src/server.rs", "rank": 56, "score": 256590.9462702198 }, { "content": "fn validate<F>(scheduler: &Scheduler<Task>, f: F)\n\nwhere\n\n F: FnOnce(&Config) + Send + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n scheduler\n\n .schedule(Task::Validate(Box::new(move |cfg: &Config| {\n\n f(cfg);\n\n tx.send(()).unwrap();\n\n })))\n\n .unwrap();\n\n rx.recv_timeout(Duration::from_secs(1)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/split_check.rs", "rank": 57, "score": 256105.28732319747 }, { "content": "fn setup(cfg: TiKvConfig, engine: Arc<DB>) -> (ConfigController, Worker<Task>) {\n\n let (router, _) = sync_channel(1);\n\n let runner = Runner::new(\n\n engine,\n\n router.clone(),\n\n CoprocessorHost::new(router),\n\n cfg.coprocessor.clone(),\n\n );\n\n let mut worker: Worker<Task> = Worker::new(\"split-check-config\");\n\n worker.start(runner).unwrap();\n\n\n\n let mut cfg_controller = ConfigController::new(cfg, Default::default());\n\n cfg_controller.register(\n\n Module::Coprocessor,\n\n Box::new(SplitCheckConfigManager(worker.scheduler())),\n\n );\n\n\n\n (cfg_controller, worker)\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/split_check.rs", "rank": 58, "score": 255333.52279928065 }, { "content": "fn pre_check(command: &mut Command, hint: &str) -> Result<(), Error> {\n\n let check = command\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .status()\n\n .unwrap();\n\n if !check.success() {\n\n Err(format_err!(\n\n \"Pre-checking for fuzzing failed. Consider run `{}` before fuzzing.\",\n\n hint\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "fuzz/cli.rs", "rank": 59, "score": 255190.71395331973 }, { "content": "/// Returns the tikv version information.\n\npub fn tikv_version_info() -> String {\n\n let fallback = \"Unknown (env var does not exist when building)\";\n\n format!(\n\n \"\\nRelease Version: {}\\\n\n \\nGit Commit Hash: {}\\\n\n \\nGit Commit Branch: {}\\\n\n \\nUTC Build Time: {}\\\n\n \\nRust Version: {}\\\n\n \\nEnable Features: {}\\\n\n \\nProfile: {}\",\n\n env!(\"CARGO_PKG_VERSION\"),\n\n option_env!(\"TIKV_BUILD_GIT_HASH\").unwrap_or(fallback),\n\n option_env!(\"TIKV_BUILD_GIT_BRANCH\").unwrap_or(fallback),\n\n option_env!(\"TIKV_BUILD_TIME\").unwrap_or(fallback),\n\n option_env!(\"TIKV_BUILD_RUSTC_VERSION\").unwrap_or(fallback),\n\n option_env!(\"TIKV_ENABLE_FEATURES\")\n\n .unwrap_or(fallback)\n\n .trim(),\n\n option_env!(\"TIKV_PROFILE\").unwrap_or(fallback),\n\n )\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 254937.81384634756 }, { "content": "#[inline]\n\nfn read_num_bytes<T, F>(size: usize, data: &mut &[u8], f: F) -> Result<T>\n\nwhere\n\n F: Fn(&[u8]) -> T,\n\n{\n\n if data.len() >= size {\n\n let buf = &data[..size];\n\n *data = &data[size..];\n\n return Ok(f(buf));\n\n }\n\n Err(Error::unexpected_eof())\n\n}\n\n\n\n/// Decodes value encoded by `encode_i64` before.\n", "file_path": "components/tikv_util/src/codec/number.rs", "rank": 61, "score": 254203.51688579685 }, { "content": "fn extract_num_str(s: &str, from_base: IntWithSign) -> Option<(String, bool)> {\n\n let mut iter = s.chars().peekable();\n\n let head = *iter.peek().unwrap();\n\n let mut is_neg = false;\n\n if head == '+' || head == '-' {\n\n is_neg = head == '-';\n\n iter.next();\n\n }\n\n let IntWithSign(base, _) = from_base;\n\n let s = iter\n\n .take_while(|x| x.is_digit(base as u32))\n\n .collect::<String>();\n\n if s.is_empty() {\n\n None\n\n } else {\n\n Some((s, is_neg))\n\n }\n\n}\n\n\n", "file_path": "components/tidb_query/src/expr_util/conv.rs", "rank": 62, "score": 254067.876966702 }, { "content": "/// Sanitizes the thread name. Keeps `a-zA-Z0-9_:`, replaces `-` and ` ` with `_`, and drops the others.\n\n///\n\n/// Examples:\n\n///\n\n/// ```ignore\n\n/// assert_eq!(sanitize_thread_name(0, \"ok123\"), \"ok123\");\n\n/// assert_eq!(sanitize_thread_name(0, \"Az_1\"), \"Az_1\");\n\n/// assert_eq!(sanitize_thread_name(0, \"a-b\"), \"a_b\");\n\n/// assert_eq!(sanitize_thread_name(0, \"a b\"), \"a_b\");\n\n/// assert_eq!(sanitize_thread_name(1, \"@123\"), \"123\");\n\n/// assert_eq!(sanitize_thread_name(1, \"@@@@\"), \"1\");\n\n/// ```\n\nfn sanitize_thread_name(tid: pid_t, raw: &str) -> String {\n\n let mut name = String::with_capacity(raw.len());\n\n // sanitize thread name.\n\n for c in raw.chars() {\n\n match c {\n\n // Prometheus label characters `[a-zA-Z0-9_:]`\n\n 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | ':' => {\n\n name.push(c);\n\n }\n\n '-' | ' ' => {\n\n name.push('_');\n\n }\n\n _ => (),\n\n }\n\n }\n\n if name.is_empty() {\n\n name = format!(\"{}\", tid)\n\n }\n\n name\n\n}\n\n\n", "file_path": "components/tikv_util/src/metrics/threads_linux.rs", "rank": 63, "score": 253875.37988921884 }, { "content": "fn do_coalesce<'a, F, T>(expr: &'a ScalarFunc, mut f: F) -> Result<Option<T>>\n\nwhere\n\n F: FnMut(&'a Expression) -> Result<Option<T>>,\n\n{\n\n for exp in &expr.children {\n\n let v = f(exp)?;\n\n if v.is_some() {\n\n return Ok(v);\n\n }\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "components/tidb_query/src/expr/builtin_compare.rs", "rank": 64, "score": 253460.02040724183 }, { "content": "fn test_apply_new_version_snapshot<T: Simulator>(cluster: &mut Cluster<T>) {\n\n // truncate the log quickly so that we can force sending snapshot.\n\n cluster.cfg.raft_store.raft_log_gc_tick_interval = ReadableDuration::millis(20);\n\n cluster.cfg.raft_store.raft_log_gc_count_limit = 5;\n\n cluster.cfg.raft_store.merge_max_log_gap = 1;\n\n cluster.cfg.raft_store.raft_log_gc_threshold = 5;\n\n\n\n // We use three nodes([1, 2, 3]) for this test.\n\n cluster.run();\n\n\n\n // guarantee node 1 is leader\n\n cluster.must_transfer_leader(1, new_peer(1, 1));\n\n cluster.must_put(b\"k0\", b\"v0\");\n\n assert_eq!(cluster.leader_of_region(1), Some(new_peer(1, 1)));\n\n\n\n let pd_client = Arc::clone(&cluster.pd_client);\n\n\n\n // isolate node 3 for region 1.\n\n cluster.add_send_filter(CloneFilterFactory(RegionPacketFilter::new(1, 3)));\n\n cluster.must_put(b\"k1\", b\"v1\");\n", "file_path": "tests/integrations/raftstore/test_split_region.rs", "rank": 65, "score": 253458.75212532323 }, { "content": "fn validate_waiter<F>(router: &WaiterMgrScheduler, f: F)\n\nwhere\n\n F: FnOnce(u64, u64) + Send + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n router.validate(Box::new(move |v1, v2| {\n\n f(v1, v2);\n\n tx.send(()).unwrap();\n\n }));\n\n rx.recv_timeout(Duration::from_secs(3)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/pessimistic_txn.rs", "rank": 66, "score": 253433.3513480396 }, { "content": "fn deadlock_detector_leader_must_be(cluster: &mut Cluster<ServerCluster>, store_id: u64) {\n\n let leader_region = cluster.get_region(b\"\");\n\n assert_eq!(\n\n cluster\n\n .leader_of_region(leader_region.get_id())\n\n .unwrap()\n\n .get_store_id(),\n\n store_id\n\n );\n\n let leader_peer = find_peer_of_store(&leader_region, store_id);\n\n cluster\n\n .pd_client\n\n .region_leader_must_be(leader_region.get_id(), leader_peer);\n\n}\n\n\n", "file_path": "tests/integrations/server/lock_manager.rs", "rank": 67, "score": 253343.5974092665 }, { "content": "/// Various sanity-checks and logging before running a server.\n\n///\n\n/// Warnings are logged.\n\n///\n\n/// # Logs\n\n///\n\n/// The presence of these environment variables that affect the database\n\n/// behavior is logged.\n\n///\n\n/// - `GRPC_POLL_STRATEGY`\n\n/// - `http_proxy` and `https_proxy`\n\n///\n\n/// # Warnings\n\n///\n\n/// - if `net.core.somaxconn` < 32768\n\n/// - if `net.ipv4.tcp_syncookies` is not 0\n\n/// - if `vm.swappiness` is not 0\n\n/// - if data directories are not on SSDs\n\n/// - if the \"TZ\" environment variable is not set on unix\n\nfn pre_start(config: &TiKvConfig) {\n\n // Sets the global logger ASAP.\n\n // It is okay to use the config w/o `validate()`,\n\n // because `initial_logger()` handles various conditions.\n\n // TODO: currently the logger config has to be provided\n\n // through command line. Consider remove this constraint.\n\n initial_logger(&config);\n\n\n\n check_environment_variables();\n\n\n\n for e in tikv_util::config::check_kernel() {\n\n warn!(\n\n \"check: kernel\";\n\n \"err\" => %e\n\n );\n\n }\n\n}\n\n\n", "file_path": "cmd/src/server.rs", "rank": 68, "score": 251887.50156731144 }, { "content": "fn check_cluster(cluster: &mut Cluster<impl Simulator>, k: &[u8], v: &[u8], all_committed: bool) {\n\n let region = cluster.pd_client.get_region(k).unwrap();\n\n let mut tried_cnt = 0;\n\n let leader = loop {\n\n match cluster.leader_of_region(region.get_id()) {\n\n None => {\n\n tried_cnt += 1;\n\n if tried_cnt >= 3 {\n\n panic!(\"leader should be elected\");\n\n }\n\n continue;\n\n }\n\n Some(l) => break l,\n\n }\n\n };\n\n for i in 1..=region.get_peers().len() as u64 {\n\n let engine = cluster.get_engine(i);\n\n if all_committed || i == leader.get_store_id() {\n\n must_get_equal(&engine, k, v);\n\n } else {\n\n must_get_none(&engine, k);\n\n }\n\n }\n\n}\n\n\n\n/// TiKV enables lazy broadcast commit optimization, which can delay split\n\n/// on follower node. So election of new region will delay. We need to make\n\n/// sure broadcast commit is disabled when split.\n", "file_path": "tests/integrations/raftstore/test_split_region.rs", "rank": 69, "score": 251486.5585984678 }, { "content": "fn download_and_extract_file(url: &str) -> io::Result<String> {\n\n let mut dl_child = Command::new(\"curl\")\n\n .arg(url)\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::null())\n\n .spawn()?;\n\n let mut tar_child = Command::new(\"tar\")\n\n .args(&[\"xzf\", \"-\", \"--to-stdout\"])\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::null())\n\n .spawn()?;\n\n\n\n let mut dl_output = dl_child.stdout.take().unwrap();\n\n let mut tar_input = tar_child.stdin.take().unwrap();\n\n let th = thread::spawn(move || -> io::Result<()> {\n\n let mut buf = vec![0; 4096];\n\n loop {\n\n let nbytes = dl_output.read(&mut buf)?;\n\n if nbytes > 0 {\n", "file_path": "tests/benches/misc/coprocessor/codec/mysql/json/mod.rs", "rank": 70, "score": 251144.38071469252 }, { "content": "/// Comparing two `Version` with the assumption of `global` and `local`\n\n/// should be monotonically increased, if `global` or `local` of _current config_\n\n/// less than _incoming config_ means there are update in _incoming config_\n\npub fn cmp_version(current: &configpb::Version, incoming: &configpb::Version) -> Ordering {\n\n match (\n\n Ord::cmp(&current.local, &incoming.local),\n\n Ord::cmp(&current.global, &incoming.global),\n\n ) {\n\n (Ordering::Equal, Ordering::Equal) => Ordering::Equal,\n\n (Ordering::Less, _) | (_, Ordering::Less) => Ordering::Less,\n\n _ => Ordering::Greater,\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Hash, Debug, Clone)]\n\npub enum Module {\n\n Readpool,\n\n Server,\n\n Metric,\n\n Raftstore,\n\n Coprocessor,\n\n Pd,\n\n Rocksdb,\n", "file_path": "src/config.rs", "rank": 71, "score": 250841.57715357887 }, { "content": "/// Takes the peer address (for sending raft messages) from a store.\n\npub fn take_peer_address(store: &mut metapb::Store) -> String {\n\n if !store.get_peer_address().is_empty() {\n\n store.take_peer_address()\n\n } else {\n\n store.take_address()\n\n }\n\n}\n", "file_path": "components/pd_client/src/lib.rs", "rank": 72, "score": 250806.05714293552 }, { "content": "fn validate<F>(scheduler: &FutureScheduler<GcTask>, f: F)\n\nwhere\n\n F: FnOnce(&GcConfig, &Limiter) + Send + 'static,\n\n{\n\n let (tx, rx) = channel();\n\n scheduler\n\n .schedule(GcTask::Validate(Box::new(\n\n move |cfg: &GcConfig, limiter: &Limiter| {\n\n f(cfg, limiter);\n\n tx.send(()).unwrap();\n\n },\n\n )))\n\n .unwrap();\n\n rx.recv_timeout(Duration::from_secs(3)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/gc_worker.rs", "rank": 73, "score": 250404.81307160182 }, { "content": "pub fn get_valid_float_prefix<'a>(ctx: &mut EvalContext, s: &'a str) -> Result<&'a str> {\n\n let mut saw_dot = false;\n\n let mut saw_digit = false;\n\n let mut valid_len = 0;\n\n let mut e_idx = 0;\n\n for (i, c) in s.chars().enumerate() {\n\n if c == '+' || c == '-' {\n\n if i != 0 && (e_idx == 0 || i != e_idx + 1) {\n\n // \"1e+1\" is valid.\n\n break;\n\n }\n\n } else if c == '.' {\n\n if saw_dot || e_idx > 0 {\n\n // \"1.1.\" or \"1e1.1\"\n\n break;\n\n }\n\n saw_dot = true;\n\n if saw_digit {\n\n // \"123.\" is valid.\n\n valid_len = i + 1;\n", "file_path": "components/tidb_query/src/codec/convert.rs", "rank": 74, "score": 248875.92223551834 }, { "content": "pub fn new_security_cfg() -> SecurityConfig {\n\n let p = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n SecurityConfig {\n\n ca_path: format!(\"{}\", p.join(\"data/ca.crt\").display()),\n\n cert_path: format!(\"{}\", p.join(\"data/server.crt\").display()),\n\n key_path: format!(\"{}\", p.join(\"data/server.pem\").display()),\n\n override_ssl_target: \"example.com\".to_owned(),\n\n cipher_file: \"\".to_owned(),\n\n }\n\n}\n", "file_path": "components/test_util/src/security.rs", "rank": 75, "score": 248291.05904467322 }, { "content": "fn get_file_count(data_path: &str, extension: &str) -> Result<usize, ConfigError> {\n\n let op = \"data-dir.file-count.get\";\n\n let dir = fs::read_dir(data_path).map_err(|e| {\n\n ConfigError::FileSystem(format!(\n\n \"{}: read file dir {:?} failed: {:?}\",\n\n op, data_path, e\n\n ))\n\n })?;\n\n let mut file_count = 0;\n\n for entry in dir {\n\n let entry = entry.map_err(|e| {\n\n ConfigError::FileSystem(format!(\n\n \"{}: read file in file dir {:?} failed: {:?}\",\n\n op, data_path, e\n\n ))\n\n })?;\n\n let path = entry.path();\n\n if path.is_file() {\n\n if let Some(ext) = path.extension() {\n\n if extension.is_empty() || extension == ext {\n\n file_count += 1;\n\n }\n\n } else if extension.is_empty() {\n\n file_count += 1;\n\n }\n\n }\n\n }\n\n Ok(file_count)\n\n}\n\n\n", "file_path": "components/tikv_util/src/config.rs", "rank": 76, "score": 248219.56409911334 }, { "content": "pub fn load_configs<E: Engine, F: EngineFactory<E>>(engine_factory: F) -> Vec<BenchConfig<F>> {\n\n let key_lengths = DEFAULT_KEY_LENGTHS;\n\n let value_lengths = DEFAULT_VALUE_LENGTHS;\n\n let mut configs = vec![];\n\n\n\n for &kl in &key_lengths {\n\n for &vl in &value_lengths {\n\n configs.push(BenchConfig {\n\n key_length: kl,\n\n value_length: vl,\n\n engine_factory,\n\n })\n\n }\n\n }\n\n configs\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mod.rs", "rank": 77, "score": 247192.20107245474 }, { "content": "pub fn new_server_config(cluster_id: u64) -> ServerConfig {\n\n ServerConfig {\n\n cluster_id,\n\n addr: \"127.0.0.1:0\".to_owned(),\n\n grpc_concurrency: 1,\n\n // Considering connection selection algo is involved, maybe\n\n // use 2 or larger value here?\n\n grpc_raft_conn_num: 1,\n\n ..ServerConfig::default()\n\n }\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 78, "score": 247144.63891276 }, { "content": "pub fn create_apply_batch_system(cfg: &Config) -> (ApplyRouter, ApplyBatchSystem) {\n\n let (tx, _) = loose_bounded(usize::MAX);\n\n let (router, system) = batch_system::create_system(\n\n cfg.apply_pool_size,\n\n cfg.apply_max_batch_size,\n\n tx,\n\n Box::new(ControlFsm),\n\n );\n\n (ApplyRouter { router }, ApplyBatchSystem { system })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::cell::RefCell;\n\n use std::rc::Rc;\n\n use std::sync::atomic::*;\n\n use std::sync::*;\n\n use std::time::*;\n\n\n\n use crate::coprocessor::*;\n", "file_path": "components/raftstore/src/store/fsm/apply.rs", "rank": 79, "score": 246196.3345362415 }, { "content": "pub fn create_raft_batch_system(cfg: &Config) -> (RaftRouter, RaftBatchSystem) {\n\n let (store_tx, store_fsm) = StoreFsm::new(cfg);\n\n let (apply_router, apply_system) = create_apply_batch_system(&cfg);\n\n let (router, system) = batch_system::create_system(\n\n cfg.store_pool_size,\n\n cfg.store_max_batch_size,\n\n store_tx,\n\n store_fsm,\n\n );\n\n let raft_router = RaftRouter { router };\n\n let system = RaftBatchSystem {\n\n system,\n\n workers: None,\n\n apply_router,\n\n apply_system,\n\n router: raft_router.clone(),\n\n };\n\n (raft_router, system)\n\n}\n\n\n", "file_path": "components/raftstore/src/store/fsm/store.rs", "rank": 80, "score": 246196.3345362415 }, { "content": "// check dir is empty of file with certain extension, empty string for any extension.\n\npub fn check_data_dir_empty(data_path: &str, extension: &str) -> Result<(), ConfigError> {\n\n let op = \"data-dir.empty.check\";\n\n let dir = Path::new(data_path);\n\n if dir.exists() && !dir.is_file() {\n\n let count = get_file_count(data_path, extension)?;\n\n if count > 0 {\n\n return Err(ConfigError::Limit(format!(\n\n \"{}: the number of file with extension {} in directory {} is non-zero, \\\n\n got {}, expect 0.\",\n\n op, extension, data_path, count,\n\n )));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "components/tikv_util/src/config.rs", "rank": 81, "score": 245872.1113118103 }, { "content": "fn test_base_split_region<T, F>(cluster: &mut Cluster<T>, split: F, right_derive: bool)\n\nwhere\n\n T: Simulator,\n\n F: Fn(&mut Cluster<T>, &metapb::Region, &[u8]),\n\n{\n\n cluster.cfg.raft_store.right_derive_when_split = right_derive;\n\n cluster.run();\n\n\n\n let pd_client = Arc::clone(&cluster.pd_client);\n\n\n\n let tbls = vec![\n\n (b\"k22\", b\"k11\", b\"k33\"),\n\n (b\"k11\", b\"k00\", b\"k11\"),\n\n (b\"k33\", b\"k22\", b\"k33\"),\n\n ];\n\n\n\n for (split_key, left_key, right_key) in tbls {\n\n cluster.must_put(left_key, b\"v1\");\n\n cluster.must_put(right_key, b\"v3\");\n\n\n", "file_path": "tests/integrations/raftstore/test_split_region.rs", "rank": 82, "score": 245537.51922738028 }, { "content": "fn check_raft_cmd_response(resp: &mut RaftCmdResponse, req_cnt: usize) -> Result<()> {\n\n if resp.get_header().has_error() {\n\n return Err(Error::RequestFailed(resp.take_header().take_error()));\n\n }\n\n if req_cnt != resp.get_responses().len() {\n\n return Err(Error::InvalidResponse(format!(\n\n \"responses count {} is not equal to requests count {}\",\n\n resp.get_responses().len(),\n\n req_cnt\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/raftkv.rs", "rank": 83, "score": 245435.75843286212 }, { "content": "pub fn new_readpool_cfg() -> ReadPoolConfig {\n\n ReadPoolConfig {\n\n unify_read_pool: false,\n\n unified: UnifiedReadPoolConfig {\n\n min_thread_count: 1,\n\n max_thread_count: 1,\n\n ..UnifiedReadPoolConfig::default()\n\n },\n\n storage: StorageReadPoolConfig {\n\n high_concurrency: 1,\n\n normal_concurrency: 1,\n\n low_concurrency: 1,\n\n ..StorageReadPoolConfig::default()\n\n },\n\n coprocessor: CoprReadPoolConfig {\n\n high_concurrency: 1,\n\n normal_concurrency: 1,\n\n low_concurrency: 1,\n\n ..CoprReadPoolConfig::default()\n\n },\n\n }\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 84, "score": 244022.43363447883 }, { "content": "#[inline]\n\npub fn eval_arith<F>(ctx: &mut EvalContext, left: Datum, right: Datum, f: F) -> Result<Datum>\n\nwhere\n\n F: FnOnce(Datum, &mut EvalContext, Datum) -> Result<Datum>,\n\n{\n\n let left = left.into_arith(ctx)?;\n\n let right = right.into_arith(ctx)?;\n\n\n\n let (left, right) = Datum::coerce(ctx, left, right)?;\n\n if left == Datum::Null || right == Datum::Null {\n\n return Ok(Datum::Null);\n\n }\n\n\n\n f(left, ctx, right).map_err(From::from)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n use std::{i64, u64};\n\n\n", "file_path": "components/tidb_query/src/expr/mod.rs", "rank": 85, "score": 243660.82160809316 }, { "content": "pub fn get_valid_int_prefix<'a>(ctx: &mut EvalContext, s: &'a str) -> Result<Cow<'a, str>> {\n\n if !ctx.cfg.flag.contains(Flag::IN_SELECT_STMT) {\n\n let vs = get_valid_float_prefix(ctx, s)?;\n\n float_str_to_int_string(ctx, vs)\n\n } else {\n\n let mut valid_len = 0;\n\n for (i, c) in s.chars().enumerate() {\n\n if (c == '+' || c == '-') && i == 0 {\n\n continue;\n\n }\n\n if c >= '0' && c <= '9' {\n\n valid_len = i + 1;\n\n continue;\n\n }\n\n break;\n\n }\n\n let mut valid = &s[..valid_len];\n\n if valid == \"\" {\n\n valid = \"0\";\n\n }\n\n if valid_len == 0 || valid_len < s.len() {\n\n ctx.handle_truncate_err(Error::truncated_wrong_val(\"INTEGER\", s))?;\n\n }\n\n Ok(Cow::Borrowed(valid))\n\n }\n\n}\n\n\n", "file_path": "components/tidb_query/src/codec/convert.rs", "rank": 86, "score": 243319.85181273706 }, { "content": "/// According to [RFC: Unified Log Format], escapes the given data and writes it into a writer.\n\n/// If there is no character [`need json encode`], it writes the data into the writer directly.\n\n/// Else, it serializes the given data structure as JSON into a writer.\n\n///\n\n/// [RFC: Unified Log Format]: (https://github.com/tikv/rfcs/blob/master/text/2018-12-19-unified-log-format.md)\n\n/// [`need json encode`]: #method.need_json_encode\n\n///\n\npub fn write_escaped_str<W>(writer: &mut W, value: &str) -> io::Result<()>\n\nwhere\n\n W: io::Write + ?Sized,\n\n{\n\n if !need_json_encode(value.as_bytes()) {\n\n writer.write_all(value.as_bytes())?;\n\n } else {\n\n serde_json::to_writer(writer, value)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_need_escape() {\n\n let cases = [\n\n (\"abc\", false),\n", "file_path": "components/tikv_util/src/logger/formatter.rs", "rank": 87, "score": 242671.2379229768 }, { "content": "#[allow(dead_code)]\n\npub fn initial_logger(config: &TiKvConfig) {\n\n if config.log_file.is_empty() {\n\n let drainer = logger::term_drainer();\n\n // use async drainer and init std log.\n\n logger::init_log(drainer, config.log_level, true, true, vec![]).unwrap_or_else(|e| {\n\n fatal!(\"failed to initialize log: {}\", e);\n\n });\n\n } else {\n\n let drainer = logger::file_drainer(\n\n &config.log_file,\n\n config.log_rotation_timespan,\n\n config.log_rotation_size,\n\n rename_by_timestamp,\n\n )\n\n .unwrap_or_else(|e| {\n\n fatal!(\n\n \"failed to initialize log with file {}: {}\",\n\n config.log_file,\n\n e\n\n );\n\n });\n\n\n\n // use async drainer and init std log.\n\n logger::init_log(drainer, config.log_level, true, true, vec![]).unwrap_or_else(|e| {\n\n fatal!(\"failed to initialize log: {}\", e);\n\n });\n\n };\n\n LOG_INITIALIZED.store(true, Ordering::SeqCst);\n\n}\n\n\n", "file_path": "cmd/src/setup.rs", "rank": 88, "score": 242424.1882782431 }, { "content": "/// Run a TiKV server. Returns when the server is shutdown by the user, in which\n\n/// case the server will be properly stopped.\n\npub fn run_tikv(config: TiKvConfig) {\n\n // Do some prepare works before start.\n\n pre_start(&config);\n\n\n\n let mut tikv = TiKVServer::init(config);\n\n\n\n let _m = Monitor::default();\n\n\n\n tikv.init_fs();\n\n tikv.init_yatp();\n\n tikv.init_engines();\n\n let gc_worker = tikv.init_gc_worker();\n\n let server_config = tikv.init_servers(&gc_worker);\n\n tikv.register_services(gc_worker);\n\n tikv.init_metrics_flusher();\n\n\n\n tikv.run_server(server_config);\n\n\n\n signal_handler::wait_for_signal(Some(tikv.engines.take().unwrap().engines));\n\n\n\n tikv.stop();\n\n}\n\n\n\nconst RESERVED_OPEN_FDS: u64 = 1000;\n\n\n", "file_path": "cmd/src/server.rs", "rank": 89, "score": 242424.1882782431 }, { "content": "fn must_transfer_leader(cluster: &mut Cluster<ServerCluster>, region_key: &[u8], store_id: u64) {\n\n let region = cluster.get_region(region_key);\n\n let target_peer = find_peer_of_store(&region, store_id);\n\n cluster.must_transfer_leader(region.get_id(), target_peer.clone());\n\n cluster\n\n .pd_client\n\n .region_leader_must_be(region.get_id(), target_peer);\n\n // Make sure the new leader can get snapshot locally.\n\n cluster.must_get(region_key);\n\n}\n\n\n", "file_path": "tests/integrations/server/lock_manager.rs", "rank": 90, "score": 240794.33054981288 }, { "content": "pub fn new_pessimistic_txn_cfg() -> PessimisticTxnConfig {\n\n PessimisticTxnConfig {\n\n // Use a large value here since tests run slowly in CI.\n\n wait_for_lock_timeout: 3000,\n\n wake_up_delay_duration: 100,\n\n ..PessimisticTxnConfig::default()\n\n }\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 91, "score": 239923.33758797782 }, { "content": "fn is_attr(name: &str, attr: &Attribute) -> bool {\n\n for s in &attr.path.segments {\n\n if s.ident == name {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n", "file_path": "components/configuration/configuration_derive/src/lib.rs", "rank": 92, "score": 239600.54602363866 }, { "content": "fn do_in<'a, T, E, F>(expr: &'a ScalarFunc, mut f: F, get_order: E) -> Result<Option<i64>>\n\nwhere\n\n F: FnMut(&'a Expression) -> Result<Option<T>>,\n\n E: Fn(&T, &T) -> Result<Ordering>,\n\n{\n\n let (first, others) = expr.children.split_first().unwrap();\n\n let arg = try_opt!(f(first));\n\n let mut ret_when_not_matched = Ok(Some(0));\n\n for exp in others {\n\n let arg1 = f(exp)?;\n\n if arg1.is_none() {\n\n ret_when_not_matched = Ok(None);\n\n continue;\n\n }\n\n let cmp_result = get_order(&arg, &arg1.unwrap())?;\n\n if cmp_result == Ordering::Equal {\n\n return Ok(Some(1));\n\n }\n\n }\n\n ret_when_not_matched\n\n}\n\n\n", "file_path": "components/tidb_query/src/expr/builtin_compare.rs", "rank": 93, "score": 238339.21633311675 }, { "content": "pub fn new_test_engine(path: &str, cfs: &[&str]) -> RocksEngine {\n\n let cf_opts = cfs\n\n .iter()\n\n .map(|cf| {\n\n let mut opt = ColumnFamilyOptions::new();\n\n opt.add_table_properties_collector_factory(\n\n \"tikv.test_properties\",\n\n Box::new(TestPropertiesCollectorFactory::new(*cf)),\n\n );\n\n CFOptions::new(*cf, opt)\n\n })\n\n .collect();\n\n let db = new_engine(path, None, cfs, Some(cf_opts)).expect(\"rocks test engine\");\n\n RocksEngine::from_db(Arc::new(db))\n\n}\n\n\n", "file_path": "components/test_sst_importer/src/lib.rs", "rank": 94, "score": 238297.8328626558 }, { "content": "type RocksDBMetricsFn = fn(cf: &str, name: &str, v: f64);\n\n\n\npub struct ImportModeSwitcher {\n\n mode: SwitchMode,\n\n backup_db_options: ImportModeDBOptions,\n\n backup_cf_options: Vec<(String, ImportModeCFOptions)>,\n\n}\n\n\n\nimpl ImportModeSwitcher {\n\n pub fn new() -> ImportModeSwitcher {\n\n ImportModeSwitcher {\n\n mode: SwitchMode::Normal,\n\n backup_db_options: ImportModeDBOptions::new(),\n\n backup_cf_options: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn enter_normal_mode(&mut self, db: &impl KvEngine, mf: RocksDBMetricsFn) -> Result<()> {\n\n if self.mode == SwitchMode::Normal {\n\n return Ok(());\n", "file_path": "components/sst_importer/src/import_mode.rs", "rank": 96, "score": 237341.54490481716 }, { "content": "/// `check_addr` validates an address. Addresses are formed like \"Host:Port\".\n\n/// More details about **Host** and **Port** can be found in WHATWG URL Standard.\n\npub fn check_addr(addr: &str) -> Result<(), ConfigError> {\n\n // Try to validate \"IPv4:Port\" and \"[IPv6]:Port\".\n\n if SocketAddrV4::from_str(addr).is_ok() {\n\n return Ok(());\n\n }\n\n if SocketAddrV6::from_str(addr).is_ok() {\n\n return Ok(());\n\n }\n\n\n\n let parts: Vec<&str> = addr\n\n .split(':')\n\n .filter(|s| !s.is_empty()) // \"Host:\" or \":Port\" are invalid.\n\n .collect();\n\n\n\n // [\"Host\", \"Port\"]\n\n if parts.len() != 2 {\n\n return Err(ConfigError::Address(format!(\"invalid addr: {:?}\", addr)));\n\n }\n\n\n\n // Check Port.\n", "file_path": "components/tikv_util/src/config.rs", "rank": 97, "score": 236460.17879254857 }, { "content": "fn new_ctx(resp: &RaftCmdResponse) -> CbContext {\n\n let mut cb_ctx = CbContext::new();\n\n cb_ctx.term = Some(resp.get_header().get_current_term());\n\n cb_ctx\n\n}\n\n\n", "file_path": "src/server/raftkv.rs", "rank": 98, "score": 236112.82961246202 }, { "content": "/// Create corpus dir for fuzz target\n\nfn create_corpus_dir(base: impl AsRef<Path>, target: &str) -> Result<PathBuf, Error> {\n\n let base = base.as_ref();\n\n let corpus_dir = base.join(&format!(\"corpus-{}\", target));\n\n fs::create_dir_all(&corpus_dir).context(format!(\n\n \"unable to create corpus dir for {}{}\",\n\n base.display(),\n\n target\n\n ))?;\n\n Ok(corpus_dir)\n\n}\n\n\n", "file_path": "fuzz/cli.rs", "rank": 99, "score": 235147.97225832066 } ]
Rust
editor/src/settings/mod.rs
Libertus-Lab/Fyrox
c925304f42744659fd3a6be5c4a1a8609556033a
use crate::{ inspector::editors::make_property_editors_container, settings::{ debugging::DebuggingSettings, graphics::GraphicsSettings, move_mode::MoveInteractionModeSettings, rotate_mode::RotateInteractionModeSettings, selection::SelectionSettings, }, GameEngine, Message, MSG_SYNC_FLAG, }; use fyrox::{ core::{ inspect::{Inspect, PropertyInfo}, pool::Handle, scope_profile, }, gui::{ button::{ButtonBuilder, ButtonMessage}, grid::{Column, GridBuilder, Row}, inspector::{ editors::{ enumeration::EnumPropertyEditorDefinition, inspectable::InspectablePropertyEditorDefinition, PropertyEditorDefinitionContainer, }, FieldKind, InspectorBuilder, InspectorContext, InspectorMessage, PropertyChanged, }, message::{MessageDirection, UiMessage}, scroll_viewer::ScrollViewerBuilder, stack_panel::StackPanelBuilder, widget::WidgetBuilder, window::{WindowBuilder, WindowMessage, WindowTitle}, HorizontalAlignment, Orientation, Thickness, UiNode, UserInterface, }, renderer::{CsmSettings, QualitySettings, ShadowMapPrecision}, utils::log::Log, }; use ron::ser::PrettyConfig; use serde::{Deserialize, Serialize}; use std::{fs::File, path::PathBuf, rc::Rc, sync::mpsc::Sender}; pub mod debugging; pub mod graphics; pub mod move_mode; pub mod rotate_mode; pub mod selection; pub struct SettingsWindow { window: Handle<UiNode>, ok: Handle<UiNode>, default: Handle<UiNode>, inspector: Handle<UiNode>, } #[derive(Deserialize, Serialize, PartialEq, Clone, Default, Debug, Inspect)] pub struct Settings { pub selection: SelectionSettings, pub graphics: GraphicsSettings, pub debugging: DebuggingSettings, pub move_mode_settings: MoveInteractionModeSettings, pub rotate_mode_settings: RotateInteractionModeSettings, } #[derive(Debug)] pub enum SettingsError { Io(std::io::Error), Ron(ron::Error), } impl From<std::io::Error> for SettingsError { fn from(e: std::io::Error) -> Self { Self::Io(e) } } impl From<ron::Error> for SettingsError { fn from(e: ron::Error) -> Self { Self::Ron(e) } } impl Settings { const FILE_NAME: &'static str = "settings.ron"; fn full_path() -> PathBuf { Self::FILE_NAME.into() } pub fn load() -> Result<Self, SettingsError> { let file = File::open(Self::full_path())?; Ok(ron::de::from_reader(file)?) } pub fn save(&self) -> Result<(), SettingsError> { let file = File::create(Self::full_path())?; ron::ser::to_writer_pretty(file, self, PrettyConfig::default())?; Ok(()) } fn make_property_editors_container( sender: Sender<Message>, ) -> Rc<PropertyEditorDefinitionContainer> { let container = make_property_editors_container(sender); container.insert(InspectablePropertyEditorDefinition::<GraphicsSettings>::new()); container.insert(InspectablePropertyEditorDefinition::<SelectionSettings>::new()); container.insert(EnumPropertyEditorDefinition::<ShadowMapPrecision>::new()); container.insert(InspectablePropertyEditorDefinition::<DebuggingSettings>::new()); container.insert(InspectablePropertyEditorDefinition::<CsmSettings>::new()); container.insert(InspectablePropertyEditorDefinition::<QualitySettings>::new()); container.insert(InspectablePropertyEditorDefinition::< MoveInteractionModeSettings, >::new()); container.insert(InspectablePropertyEditorDefinition::< RotateInteractionModeSettings, >::new()); Rc::new(container) } fn handle_property_changed(&mut self, property_changed: &PropertyChanged) -> bool { if let FieldKind::Inspectable(ref inner) = property_changed.value { return match property_changed.name.as_ref() { Self::SELECTION => self.selection.handle_property_changed(&**inner), Self::GRAPHICS => self.graphics.handle_property_changed(&**inner), Self::DEBUGGING => self.debugging.handle_property_changed(&**inner), Self::MOVE_MODE_SETTINGS => { self.move_mode_settings.handle_property_changed(&**inner) } Self::ROTATE_MODE_SETTINGS => { self.rotate_mode_settings.handle_property_changed(&**inner) } _ => false, }; } false } } impl SettingsWindow { pub fn new(engine: &mut GameEngine) -> Self { let ok; let default; let ctx = &mut engine.user_interface.build_ctx(); let inspector = InspectorBuilder::new(WidgetBuilder::new()).build(ctx); let window = WindowBuilder::new(WidgetBuilder::new().with_width(500.0).with_height(600.0)) .open(false) .with_title(WindowTitle::Text("Settings".to_owned())) .with_content( GridBuilder::new( WidgetBuilder::new() .with_child( ScrollViewerBuilder::new( WidgetBuilder::new() .with_margin(Thickness::uniform(2.0)) .on_row(0), ) .with_content(inspector) .build(ctx), ) .with_child( StackPanelBuilder::new( WidgetBuilder::new() .on_row(1) .with_horizontal_alignment(HorizontalAlignment::Right) .with_child({ default = ButtonBuilder::new( WidgetBuilder::new() .with_width(80.0) .with_margin(Thickness::uniform(1.0)), ) .with_text("Default") .build(ctx); default }) .with_child({ ok = ButtonBuilder::new( WidgetBuilder::new() .with_width(80.0) .with_margin(Thickness::uniform(1.0)), ) .with_text("OK") .build(ctx); ok }), ) .with_orientation(Orientation::Horizontal) .build(ctx), ), ) .add_row(Row::stretch()) .add_row(Row::strict(25.0)) .add_column(Column::stretch()) .build(ctx), ) .build(ctx); Self { window, ok, default, inspector, } } pub fn open(&self, ui: &mut UserInterface, settings: &Settings, sender: &Sender<Message>) { ui.send_message(WindowMessage::open( self.window, MessageDirection::ToWidget, true, )); self.sync_to_model(ui, settings, sender); } fn sync_to_model(&self, ui: &mut UserInterface, settings: &Settings, sender: &Sender<Message>) { let context = InspectorContext::from_object( settings, &mut ui.build_ctx(), Settings::make_property_editors_container(sender.clone()), None, MSG_SYNC_FLAG, 0, ); ui.send_message(InspectorMessage::context( self.inspector, MessageDirection::ToWidget, context, )); } pub fn handle_message( &mut self, message: &UiMessage, engine: &mut GameEngine, settings: &mut Settings, sender: &Sender<Message>, ) { scope_profile!(); let old_settings = settings.clone(); if let Some(ButtonMessage::Click) = message.data::<ButtonMessage>() { if message.destination() == self.ok { engine.user_interface.send_message(WindowMessage::close( self.window, MessageDirection::ToWidget, )); } else if message.destination() == self.default { *settings = Default::default(); self.sync_to_model(&mut engine.user_interface, settings, sender); } } else if let Some(InspectorMessage::PropertyChanged(property_changed)) = message.data() { if message.destination() == self.inspector && !settings.handle_property_changed(property_changed) { Log::err(format!( "Unhandled property change: {}", property_changed.path() )) } } if settings != &old_settings { if settings.graphics.quality != engine.renderer.get_quality_settings() { if let Err(e) = engine .renderer .set_quality_settings(&settings.graphics.quality) { Log::err(format!( "An error occurred at attempt to set new graphics settings: {:?}", e )); } else { Log::info("New graphics quality settings were successfully set!".to_owned()); } } match settings.save() { Ok(_) => { Log::info("Settings were successfully saved!".to_owned()); } Err(e) => { Log::err(format!("Unable to save settings! Reason: {:?}!", e)); } }; } } }
use crate::{ inspector::editors::make_property_editors_container, settings::{ debugging::DebuggingSettings, graphics::GraphicsSettings, move_mode::MoveInteractionModeSettings, rotate_mode::RotateInteractionModeSettings, selection::SelectionSettings, }, GameEngine, Message, MSG_SYNC_FLAG, }; use fyrox::{ core::{ inspect::{Inspect, PropertyInfo}, pool::Handle, scope_profile, }, gui::{ button::{ButtonBuilder, ButtonMessage}, grid::{Column, GridBuilder, Row}, inspector::{ editors::{ enumeration::EnumPropertyEditorDefinition, inspectable::InspectablePropertyEditorDefinition, PropertyEditorDefinitionContainer, }, FieldKind, InspectorBuilder, InspectorContext, InspectorMessage, PropertyChanged, }, message::{MessageDirection, UiMessage}, scroll_viewer::ScrollViewerBuilder, stack_panel::StackPanelBuilder, widget::WidgetBuilder, window::{WindowBuilder, WindowMessage, WindowTitle}, HorizontalAlignment, Orientation, Thickness, UiNode, UserInterface, }, renderer::{CsmSettings, QualitySettings, ShadowMapPrecision}, utils::log::Log, }; use ron::ser::PrettyConfig; use serde::{Deserialize, Serialize}; use std::{fs::File, path::PathBuf, rc::Rc, sync::mpsc::Sender}; pub mod debugging; pub mod graphics; pub mod move_mode; pub mod rotate_mode; pub mod selection; pub struct SettingsWindow { window: Handle<UiNode>, ok: Handle<UiNode>, default: Handle<UiNode>, inspector: Handle<UiNode>, } #[derive(Deserialize, Serialize, PartialEq, Clone, Default, Debug, Inspect)] pub struct Settings { pub selection: SelectionSettings, pub graphics: GraphicsSettings, pub debugging: DebuggingSettings, pub move_mode_settings: MoveInteractionModeSettings, pub rotate_mode_settings: RotateInteractionModeSettings, } #[derive(Debug)] pub enum SettingsError { Io(std::io::Error), Ron(ron::Error), } impl From<std::io::Error> for SettingsError { fn from(e: std::io::Error) -> Self { Self::Io(e) } } impl From<ron::Error> for SettingsError { fn from(e: ron::Error) -> Self { Self::Ron(e) } } impl Settings { const FILE_NAME: &'static str = "settings.ron"; fn full_path() -> PathBuf { Self::FILE_NAME.into() } pub fn load() -> Result<Self, SettingsError> { let file = File::open(Self::full_path())?; Ok(ron::de::from_reader(file)?) } pub fn save(&self) -> Result<(), SettingsError> { let file = File::create(Self::full_path())?; ron::ser::to_writer_pretty(file, self, PrettyConfig::default())?; Ok(()) } fn make_property_editors_container( sender: Sender<Message>, ) -> Rc<PropertyEditorDefinitionContainer> { let container = make_property_editors_container(sender); container.insert(InspectablePropertyEditorDefinition::<GraphicsSettings>::new()); container.insert(InspectablePropertyEditorDefinition::<SelectionSettings>::new()); container.insert(EnumPropertyEditorDefinition::<ShadowMapPrecision>::new()); container.insert(InspectablePropertyEditorDefinition::<DebuggingSettings>::new()); container.insert(InspectablePropertyEditorDefinition::<CsmSettings>::new()); container.insert(InspectablePropertyEditorDefinition::<QualitySettings>::new()); container.insert(InspectablePropertyEditorDefinition::< MoveInteractionModeSettings, >::new()); container.insert(InspectablePropertyEditorDefinition::< RotateInteractionModeSettings, >::new()); Rc::new(container) } fn handle_property_changed(&mut self, property_changed: &PropertyChanged) -> bool { if let FieldKind::Inspectable(ref inner) = property_changed.value { return match property_changed.name.as_ref() { Self::SELECTION => self.selection.handle_property_changed(&**inner), Self::GRAPHICS => self.graphics.handle_property_changed(&**inner), Self::DEBUGGING => self.debugging.handle_property_changed(&**inner), Self::MOVE_MODE_SETTINGS => { self.move_mode_settings.handle_property_changed(&**inner) } Self::ROTATE_MODE_SETTINGS => { self.rotate_mode_settings.handle_property_changed(&**inner) } _ => false, }; } false } } impl SettingsWindow { pub fn new(engine: &mut GameEngine) -> Self { let ok; let default; let ctx = &mut engine.user_interface.build_ctx(); let inspector = InspectorBuilder::new(WidgetBuilder::new()).build(ctx); let window = WindowBuilder::new(WidgetBuilder::new().with_width(500.0).with_height(600.0)) .open(false) .with_title(WindowTitle::Text("Settings".to_owned())) .with_content( GridBuilder::new( WidgetBuilder::new() .with_child( ScrollViewerBuilder::new( WidgetBuilder::new() .with_margin(Thickness::uniform(2.0)) .on_row(0), ) .with_content(inspector) .build(ctx), ) .with_child( StackPanelBuilder::new( WidgetBuilder::new() .on_row(1) .with_horizontal_alignment(HorizontalAlignment::Right) .with_child({ default = ButtonBuilder::new( WidgetBuilder::new() .with_width(80.0) .with_margin(Thickness::uniform(1.0)), ) .with_text("Default") .build(ctx); default }) .with_child({ ok = ButtonBuilder::new( WidgetBuilder::new() .with_width(80.0) .with_margin(Thickness::uniform(1.0)), ) .with_text("OK") .build(ctx); ok }), ) .with_orientation(Orientation::Horizontal) .build(ctx), ), ) .add_row(Row::stretch()) .add_row(Row::strict(25.0)) .add_column(Column::stretch()) .build(ctx), ) .build(ctx); Self { window, ok, default, inspector, } } pub fn open(&self, ui: &mut UserInterface, settings: &Settings, sender: &Sender<Message>) { ui.send_message(WindowMessage::open( self.window, MessageDirection::ToWidget, true, )); self.sync_to_model(ui, settings, sender); } fn sync_to_model(&self, ui: &mut UserInterface, settings: &Settings, sender: &Sender<Message>) { let context = InspectorContext::from_object( settings, &mut ui.build_ctx(), Settings::make_property_editors_container(sender.clone()), None, MSG_SYNC_FLAG, 0, ); ui.send_message(InspectorMessage::context( self.inspector, MessageDirection::ToWidget, context, )); } pub fn handle_message( &mut self, message: &UiMessage, engine: &mut GameEngine, settings: &mut Settings, sender: &Sender<Message>, ) { scope_profile!(); let old_settings = settings.clone(); if let Some(ButtonMessage::Click) = message.data::<ButtonMessage>() { if message.destination() == self.ok { engine.user_interface.send_message(WindowMessage::close( self.window, MessageDirection::ToWidget, )); } else if message.destination() == self.default { *settings = Default::default(); self.sync_to_model(&mut engine.user_interface, settings, sender); } } else if let Some(InspectorMessage::PropertyChanged(property_changed)) = message.data() { if message.destination() == self.inspector && !settings.handle_property_changed(property_changed) { Log::err(format!( "Unhandled property change: {}", property_changed.path() )) } } if settings != &old_settings {
match settings.save() { Ok(_) => { Log::info("Settings were successfully saved!".to_owned()); } Err(e) => { Log::err(format!("Unable to save settings! Reason: {:?}!", e)); } }; } } }
if settings.graphics.quality != engine.renderer.get_quality_settings() { if let Err(e) = engine .renderer .set_quality_settings(&settings.graphics.quality) { Log::err(format!( "An error occurred at attempt to set new graphics settings: {:?}", e )); } else { Log::info("New graphics quality settings were successfully set!".to_owned()); } }
if_condition
[ { "content": "pub trait InspectableEnum: Debug + Inspect + 'static {}\n\n\n\nimpl<T: Debug + Inspect + 'static> InspectableEnum for T {}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum EnumPropertyEditorMessage {\n\n Variant(usize),\n\n PropertyChanged(PropertyChanged),\n\n}\n\n\n\nimpl EnumPropertyEditorMessage {\n\n define_constructor!(EnumPropertyEditorMessage:Variant => fn variant(usize), layout: false);\n\n define_constructor!(EnumPropertyEditorMessage:PropertyChanged => fn property_changed(PropertyChanged), layout: false);\n\n}\n\n\n\npub struct EnumPropertyEditor<T: InspectableEnum> {\n\n widget: Widget,\n\n variant_selector: Handle<UiNode>,\n\n inspector: Handle<UiNode>,\n\n definition: EnumPropertyEditorDefinition<T>,\n", "file_path": "fyrox-ui/src/inspector/editors/enumeration.rs", "rank": 0, "score": 457328.59013341216 }, { "content": "fn make_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n if text.is_empty() {\n\n Handle::NONE\n\n } else {\n\n make_simple_tooltip(ctx, text)\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 1, "score": 396464.4023412583 }, { "content": "pub fn make_dropdown_list_option(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_height(26.0).with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_horizontal_text_alignment(HorizontalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n ),\n\n ))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/gui.rs", "rank": 2, "score": 389710.231145275 }, { "content": "pub trait PropertyEditorDefinition: Debug {\n\n fn value_type_id(&self) -> TypeId;\n\n\n\n fn create_instance(\n\n &self,\n\n ctx: PropertyEditorBuildContext,\n\n ) -> Result<PropertyEditorInstance, InspectorError>;\n\n\n\n fn create_message(\n\n &self,\n\n ctx: PropertyEditorMessageContext,\n\n ) -> Result<Option<UiMessage>, InspectorError>;\n\n\n\n fn translate_message(&self, ctx: PropertyEditorTranslationContext) -> Option<PropertyChanged>;\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct PropertyEditorDefinitionContainer {\n\n definitions: RefCell<FxHashMap<TypeId, Rc<dyn PropertyEditorDefinition>>>,\n\n}\n", "file_path": "fyrox-ui/src/inspector/editors/mod.rs", "rank": 3, "score": 383828.94288913114 }, { "content": "pub fn make_default_anchor(ctx: &mut BuildContext, row: usize, column: usize) -> Handle<UiNode> {\n\n let default_anchor_size = 30.0;\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(default_anchor_size)\n\n .with_height(default_anchor_size)\n\n .with_visibility(false)\n\n .on_row(row)\n\n .on_column(column)\n\n .with_draw_on_top(true)\n\n .with_background(Brush::Solid(DEFAULT_ANCHOR_COLOR)),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl TileBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n\n content: TileContent::Empty,\n", "file_path": "fyrox-ui/src/dock.rs", "rank": 4, "score": 378120.70876144 }, { "content": "fn default_prop() -> PropertyInfo<'static> {\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<()>(),\n\n name: \"\",\n\n display_name: \"\",\n\n value: &(),\n\n read_only: false,\n\n min_value: None,\n\n max_value: None,\n\n step: None,\n\n precision: None,\n\n description: \"\".to_string(),\n\n is_modified: false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 5, "score": 372132.65520455426 }, { "content": "pub fn make_simple_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(false)\n\n .with_foreground(Brush::Solid(Color::opaque(160, 160, 160)))\n\n .with_max_size(Vector2::new(250.0, f32::INFINITY))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_wrap(WrapMode::Word)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "fyrox-ui/src/utils.rs", "rank": 6, "score": 371328.08962334227 }, { "content": "pub trait MessageData: 'static + Debug + Any {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool;\n\n}\n\n\n\nimpl<T> MessageData for T\n\nwhere\n\n T: 'static + Debug + PartialEq + Any,\n\n{\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool {\n\n other\n\n .as_any()\n\n .downcast_ref::<T>()\n\n .map(|other| other == self)\n\n .unwrap_or_default()\n", "file_path": "fyrox-ui/src/message.rs", "rank": 7, "score": 368892.15006092045 }, { "content": "pub fn make_property_editors_container(\n\n sender: Sender<Message>,\n\n) -> PropertyEditorDefinitionContainer {\n\n let container = PropertyEditorDefinitionContainer::new();\n\n\n\n container.insert(TexturePropertyEditorDefinition);\n\n container.insert(MaterialPropertyEditorDefinition {\n\n sender: Mutex::new(sender.clone()),\n\n });\n\n container.insert(VecCollectionPropertyEditorDefinition::<Surface>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<Layer>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<Emitter>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<LevelOfDetail>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<ErasedHandle>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<Handle<Node>>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<Property>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<LodControlledObject>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<GeometrySource>::new());\n\n container.insert(VecCollectionPropertyEditorDefinition::<EffectInput>::new());\n\n container.insert(make_status_enum_editor_definition());\n", "file_path": "editor/src/inspector/editors/mod.rs", "rank": 8, "score": 366346.66240428743 }, { "content": "fn make_text_title(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::left(5.0))\n\n .on_row(0)\n\n .on_column(0),\n\n )\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_horizontal_text_alignment(HorizontalAlignment::Left)\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/window.rs", "rank": 9, "score": 364144.7196815458 }, { "content": "fn create_header(ctx: &mut BuildContext, text: &str, layer_index: usize) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new().with_margin(make_property_margin(layer_index)))\n\n .with_text(text)\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 10, "score": 363553.4364358106 }, { "content": "pub fn make_save_file_selector(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0))\n\n .with_title(WindowTitle::Text(\"Save Scene As\".into()))\n\n .open(false),\n\n )\n\n .with_mode(FileBrowserMode::Save {\n\n default_file_name: PathBuf::from(\"unnamed.rgs\"),\n\n })\n\n .with_path(\"./\")\n\n .with_filter(make_scene_file_filter())\n\n .build(ctx)\n\n}\n\n\n\npub enum Mode {\n\n Edit,\n\n Play {\n\n // Play mode scene.\n\n scene: Handle<Scene>,\n\n // List of scenes that existed before entering play mode.\n", "file_path": "editor/src/lib.rs", "rank": 11, "score": 362401.57158474985 }, { "content": "fn make_folder(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n TreeBuilder::new(WidgetBuilder::new())\n\n .with_content(\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::left(5.0))\n\n .with_foreground(Brush::Solid(Color::opaque(153, 217, 234))),\n\n )\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl WorldViewer {\n\n pub fn new(ctx: &mut BuildContext, sender: Sender<Message>) -> Self {\n\n let track_selection_state = true;\n\n let tree_root;\n\n let node_path;\n", "file_path": "editor/src/world/mod.rs", "rank": 12, "score": 356606.7522336306 }, { "content": "pub fn make_expander_container(\n\n layer_index: usize,\n\n property_name: &str,\n\n header: Handle<UiNode>,\n\n content: Handle<UiNode>,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n ExpanderBuilder::new(WidgetBuilder::new())\n\n .with_checkbox(make_expander_check_box(layer_index, property_name, ctx))\n\n .with_expander_column(Column::strict(NAME_COLUMN_WIDTH))\n\n .with_expanded(true)\n\n .with_header(header)\n\n .with_content(content)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 13, "score": 352537.87945554673 }, { "content": "#[inline]\n\npub fn get_polygon_normal(polygon: &[Vector3<f32>]) -> Result<Vector3<f32>, &'static str> {\n\n let mut normal = Vector3::default();\n\n\n\n for (i, current) in polygon.iter().enumerate() {\n\n let next = polygon[(i + 1) % polygon.len()];\n\n normal.x += (current.y - next.y) * (current.z + next.z);\n\n normal.y += (current.z - next.z) * (current.x + next.x);\n\n normal.z += (current.x - next.x) * (current.y + next.y);\n\n }\n\n\n\n normal\n\n .try_normalize(f32::EPSILON)\n\n .ok_or(\"Unable to get normal of degenerated polygon!\")\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 14, "score": 350988.059745657 }, { "content": "pub fn make_button(ctx: &mut BuildContext, arrow: ArrowDirection, row: usize) -> Handle<UiNode> {\n\n ButtonBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::right(1.0))\n\n .on_row(row),\n\n )\n\n .with_back(\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_foreground(Brush::Solid(Color::opaque(90, 90, 90))),\n\n ))\n\n .with_normal_brush(Brush::Solid(Color::opaque(60, 60, 60)))\n\n .with_hover_brush(Brush::Solid(Color::opaque(80, 80, 80)))\n\n .with_pressed_brush(Brush::Solid(Color::opaque(80, 118, 178)))\n\n .build(ctx),\n\n )\n\n .with_content(make_arrow(ctx, arrow, 6.0))\n\n .build(ctx)\n\n}\n\n\n\nimpl<T: NumericType> NumericUpDownBuilder<T> {\n", "file_path": "fyrox-ui/src/numeric.rs", "rank": 15, "score": 347214.2440094425 }, { "content": "/// Translates cursor icon from fyrox-ui library to glutin format.\n\npub fn translate_cursor_icon(icon: crate::gui::message::CursorIcon) -> crate::window::CursorIcon {\n\n match icon {\n\n crate::gui::message::CursorIcon::Default => crate::window::CursorIcon::Default,\n\n crate::gui::message::CursorIcon::Crosshair => crate::window::CursorIcon::Crosshair,\n\n crate::gui::message::CursorIcon::Hand => crate::window::CursorIcon::Hand,\n\n crate::gui::message::CursorIcon::Arrow => crate::window::CursorIcon::Arrow,\n\n crate::gui::message::CursorIcon::Move => crate::window::CursorIcon::Move,\n\n crate::gui::message::CursorIcon::Text => crate::window::CursorIcon::Text,\n\n crate::gui::message::CursorIcon::Wait => crate::window::CursorIcon::Wait,\n\n crate::gui::message::CursorIcon::Help => crate::window::CursorIcon::Help,\n\n crate::gui::message::CursorIcon::Progress => crate::window::CursorIcon::Progress,\n\n crate::gui::message::CursorIcon::NotAllowed => crate::window::CursorIcon::NotAllowed,\n\n crate::gui::message::CursorIcon::ContextMenu => crate::window::CursorIcon::ContextMenu,\n\n crate::gui::message::CursorIcon::Cell => crate::window::CursorIcon::Cell,\n\n crate::gui::message::CursorIcon::VerticalText => crate::window::CursorIcon::VerticalText,\n\n crate::gui::message::CursorIcon::Alias => crate::window::CursorIcon::Alias,\n\n crate::gui::message::CursorIcon::Copy => crate::window::CursorIcon::Copy,\n\n crate::gui::message::CursorIcon::NoDrop => crate::window::CursorIcon::NoDrop,\n\n crate::gui::message::CursorIcon::Grab => crate::window::CursorIcon::Grab,\n\n crate::gui::message::CursorIcon::Grabbing => crate::window::CursorIcon::Grabbing,\n", "file_path": "src/utils/mod.rs", "rank": 16, "score": 345953.6266486702 }, { "content": "#[inline]\n\npub fn type_name_of<T>(_: T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\n#[cfg(feature = \"enable_profiler\")]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {\n\n let function_name = {\n\n fn scope() {}\n\n $crate::profiler::type_name_of(scope)\n\n };\n\n let _scope_guard = $crate::profiler::ScopeDefinition::new(function_name, line!());\n\n };\n\n}\n\n\n\n#[cfg(not(feature = \"enable_profiler\"))]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {};\n", "file_path": "fyrox-core/src/profiler.rs", "rank": 17, "score": 343954.53333142423 }, { "content": "pub fn make_property_margin(layer_index: usize) -> Thickness {\n\n let mut margin = HEADER_MARGIN;\n\n margin.left += 10.0 + layer_index as f32 * 10.0;\n\n margin\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 18, "score": 338187.8377753086 }, { "content": "fn make_text_mark(ctx: &mut BuildContext, text: &str, row: usize, column: usize) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_vertical_alignment(VerticalAlignment::Center)\n\n .on_row(row)\n\n .on_column(column),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/color.rs", "rank": 19, "score": 337166.68304427195 }, { "content": "/// Saves given `data` and overwrites `data_default` with the saved data.\n\n///\n\n/// Test the equality after running this method!\n\npub fn save_load<T: Visit>(test_name: &str, data: &mut T, data_default: &mut T) {\n\n // Locate output path\n\n let (bin, txt) = {\n\n let manifest_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let root = PathBuf::from(manifest_dir).join(\"test_output\");\n\n let _ = std::fs::create_dir(&root);\n\n (\n\n root.join(format!(\"{}.bin\", test_name)),\n\n root.join(format!(\"{}.txt\", test_name)),\n\n )\n\n };\n\n\n\n // Save `data`\n\n {\n\n let mut visitor = Visitor::new();\n\n data.visit(\"Data\", &mut visitor).unwrap();\n\n\n\n visitor.save_binary(&bin).unwrap();\n\n let mut file = File::create(&txt).unwrap();\n\n file.write_all(visitor.save_text().as_bytes()).unwrap();\n", "file_path": "fyrox-core-derive/tests/it/visit.rs", "rank": 20, "score": 330212.2103929583 }, { "content": "pub fn send_sync_message(ui: &UserInterface, mut msg: UiMessage) {\n\n msg.flags = MSG_SYNC_FLAG;\n\n ui.send_message(msg);\n\n}\n\n\n", "file_path": "editor/src/lib.rs", "rank": 21, "score": 329505.12656641385 }, { "content": "pub fn make_status_enum_editor_definition() -> EnumPropertyEditorDefinition<Status> {\n\n EnumPropertyEditorDefinition {\n\n variant_generator: |i| match i {\n\n 0 => Status::Stopped,\n\n 1 => Status::Playing,\n\n 2 => Status::Paused,\n\n _ => unreachable!(),\n\n },\n\n index_generator: |v| *v as usize,\n\n names_generator: || {\n\n vec![\n\n \"Stopped\".to_string(),\n\n \"Playing\".to_string(),\n\n \"Paused\".to_string(),\n\n ]\n\n },\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/editors/mod.rs", "rank": 22, "score": 329090.7069118914 }, { "content": "fn make_simple_property_container(\n\n title: Handle<UiNode>,\n\n editor: Handle<UiNode>,\n\n description: &str,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n ctx[editor].set_row(0).set_column(1);\n\n\n\n let tooltip = make_tooltip(ctx, description);\n\n ctx[title].set_tooltip(tooltip);\n\n\n\n GridBuilder::new(WidgetBuilder::new().with_child(title).with_child(editor))\n\n .add_rows(vec![Row::strict(26.0)])\n\n .add_columns(vec![Column::strict(NAME_COLUMN_WIDTH), Column::stretch()])\n\n .build(ctx)\n\n}\n\n\n\nimpl InspectorContext {\n\n pub fn from_object(\n\n object: &dyn Inspect,\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 23, "score": 322135.135467199 }, { "content": "fn create_item_views(items: &[Item], ctx: &mut BuildContext) -> Vec<Handle<UiNode>> {\n\n items\n\n .iter()\n\n .enumerate()\n\n .map(|(n, item)| {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_child(\n\n ExpanderBuilder::new(WidgetBuilder::new())\n\n .with_header(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(format!(\"Item {}\", n))\n\n .build(ctx),\n\n )\n\n .with_content(item.inspector)\n\n .build(ctx),\n\n )\n\n .with_foreground(Brush::Solid(Color::opaque(130, 130, 130))),\n\n )\n\n .build(ctx)\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/editors/array.rs", "rank": 24, "score": 316659.4303250149 }, { "content": "/// A value of a property.\n\npub trait PropertyValue: Any + Debug {\n\n /// Casts `self` to a `&dyn Any`\n\n fn as_any(&self) -> &dyn Any;\n\n}\n\n\n\nimpl<T: Debug + 'static> PropertyValue for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n}\n\n\n\n/// An error that can occur during \"type casting\"\n\n#[derive(Debug)]\n\npub enum CastError {\n\n /// Given type does not match expected.\n\n TypeMismatch {\n\n /// A name of the property.\n\n property_name: String,\n\n\n\n /// Expected type identifier.\n", "file_path": "fyrox-core/src/inspect.rs", "rank": 25, "score": 314145.50012005935 }, { "content": "/// Creates `Inspect` trait impl and field prop keys\n\npub fn create_inspect_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let prop_keys_impl = prop_keys::prop_keys_impl(ty_args);\n\n let trait_impl = self::inspect_trait_impl(ty_args, field_args, impl_body);\n\n\n\n quote! {\n\n #prop_keys_impl\n\n #trait_impl\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 26, "score": 313327.7824042345 }, { "content": "pub fn handle_sound_context_property_changed(args: &PropertyChanged) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n SoundContext::PAUSED => Some(SceneCommand::new(SetPausedCommand::new(\n\n value.cast_clone()?,\n\n ))),\n\n SoundContext::MASTER_GAIN => Some(SceneCommand::new(SetMasterGainCommand::new(\n\n value.cast_clone()?,\n\n ))),\n\n SoundContext::DISTANCE_MODEL => Some(SceneCommand::new(SetDistanceModelCommand::new(\n\n value.cast_clone()?,\n\n ))),\n\n SoundContext::RENDERER => Some(SceneCommand::new(SetRendererCommand::new(\n\n value.cast_clone()?,\n\n ))),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/sound_context.rs", "rank": 27, "score": 311071.1896133912 }, { "content": "fn make_mark(ctx: &mut BuildContext, button: HeaderButton) -> Handle<UiNode> {\n\n VectorImageBuilder::new(\n\n WidgetBuilder::new()\n\n .with_horizontal_alignment(HorizontalAlignment::Center)\n\n .with_vertical_alignment(match button {\n\n HeaderButton::Close => VerticalAlignment::Center,\n\n HeaderButton::Minimize => VerticalAlignment::Bottom,\n\n })\n\n .with_margin(match button {\n\n HeaderButton::Close => Thickness::uniform(0.0),\n\n HeaderButton::Minimize => Thickness::bottom(3.0),\n\n })\n\n .with_foreground(BRUSH_BRIGHT),\n\n )\n\n .with_primitives(match button {\n\n HeaderButton::Close => {\n\n vec![\n\n Primitive::Line {\n\n begin: Vector2::new(0.0, 0.0),\n\n end: Vector2::new(12.0, 12.0),\n", "file_path": "fyrox-ui/src/window.rs", "rank": 28, "score": 308918.9619400312 }, { "content": "pub fn window_content(window: Handle<UiNode>, ui: &UserInterface) -> Handle<UiNode> {\n\n ui.node(window)\n\n .cast::<Window>()\n\n .map(|w| w.content())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "editor/src/utils/mod.rs", "rank": 29, "score": 308901.4326987845 }, { "content": "fn make_header_button(ctx: &mut BuildContext, button: HeaderButton) -> Handle<UiNode> {\n\n ButtonBuilder::new(WidgetBuilder::new().with_margin(Thickness::uniform(2.0)))\n\n .with_back(\n\n DecoratorBuilder::new(\n\n BorderBuilder::new(WidgetBuilder::new())\n\n .with_stroke_thickness(Thickness::uniform(0.0)),\n\n )\n\n .with_normal_brush(Brush::Solid(Color::TRANSPARENT))\n\n .with_hover_brush(BRUSH_LIGHT)\n\n .with_pressed_brush(BRUSH_LIGHTEST)\n\n .build(ctx),\n\n )\n\n .with_content(make_mark(ctx, button))\n\n .build(ctx)\n\n}\n\n\n\nimpl<'a> WindowBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n", "file_path": "fyrox-ui/src/window.rs", "rank": 30, "score": 302547.4769405942 }, { "content": "/// Translates window mouse button into fyrox-ui mouse button.\n\npub fn translate_button(button: crate::event::MouseButton) -> crate::gui::message::MouseButton {\n\n match button {\n\n crate::event::MouseButton::Left => crate::gui::message::MouseButton::Left,\n\n crate::event::MouseButton::Right => crate::gui::message::MouseButton::Right,\n\n crate::event::MouseButton::Middle => crate::gui::message::MouseButton::Middle,\n\n crate::event::MouseButton::Other(i) => crate::gui::message::MouseButton::Other(i),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 31, "score": 299601.08265248215 }, { "content": "fn handle_csm_settings_property_changed(\n\n settings: &mut CsmSettings,\n\n property_changed: &PropertyChanged,\n\n) -> bool {\n\n if let FieldKind::Object(ref args) = property_changed.value {\n\n return match property_changed.name.as_ref() {\n\n CsmSettings::ENABLED => args.try_override(&mut settings.enabled),\n\n CsmSettings::SIZE => args.try_override(&mut settings.size),\n\n CsmSettings::PRECISION => args.try_override(&mut settings.precision),\n\n CsmSettings::PCF => args.try_override(&mut settings.pcf),\n\n _ => false,\n\n };\n\n }\n\n false\n\n}\n\n\n", "file_path": "editor/src/settings/graphics.rs", "rank": 32, "score": 298315.01867105847 }, { "content": "pub fn create_ui(ui: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text = TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0))\n\n .with_wrap(WrapMode::Word)\n\n .build(ui);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ui);\n\n progress_bar\n\n })\n\n .with_child({\n", "file_path": "examples/shared/mod.rs", "rank": 33, "score": 297554.7492596401 }, { "content": "pub fn handle_base_effect_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Effect>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n BaseEffect::NAME => SetNameCommand,\n\n BaseEffect::GAIN => SetGainCommand\n\n )\n\n }\n\n FieldKind::Collection(ref collection_changed) => match args.name.as_ref() {\n\n BaseEffect::INPUTS => match **collection_changed {\n\n CollectionChanged::Add => Some(SceneCommand::new(AddInputCommand {\n\n handle,\n\n value: Default::default(),\n\n })),\n\n CollectionChanged::Remove(i) => Some(SceneCommand::new(RemoveInputCommand {\n\n handle,\n\n index: i,\n", "file_path": "editor/src/inspector/handlers/effect.rs", "rank": 34, "score": 297337.82827623974 }, { "content": "pub fn handle_listener_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_listener() {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Listener::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/listener.rs", "rank": 35, "score": 297337.82827623974 }, { "content": "pub fn handle_decal_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_decal() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Decal::DIFFUSE_TEXTURE => SetDecalDiffuseTextureCommand,\n\n Decal::NORMAL_TEXTURE => SetDecalNormalTextureCommand,\n\n Decal::COLOR => SetDecalColorCommand,\n\n Decal::LAYER => SetDecalLayerIndexCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Decal::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/decal.rs", "rank": 36, "score": 297337.82827623974 }, { "content": "pub fn handle_pivot_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Sprite::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/pivot.rs", "rank": 37, "score": 297337.82827623974 }, { "content": "pub fn handle_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n joint: &mut Joint,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Joint::BODY_1 => SetJointBody1Command,\n\n Joint::BODY_2 => SetJointBody2Command\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Joint::PARAMS => {\n\n if inner.owner_type_id == TypeId::of::<BallJoint>() {\n\n handle_ball_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<RevoluteJoint>() {\n\n handle_revolute_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<FixedJoint>() {\n\n handle_fixed_joint(inner, handle)\n", "file_path": "editor/src/inspector/handlers/node/joint.rs", "rank": 38, "score": 297337.82827623974 }, { "content": "pub fn handle_sound_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Sound::GAIN => {\n\n make_command!(SetSoundSourceGainCommand, handle, value)\n\n }\n\n Sound::BUFFER => {\n\n make_command!(SetSoundSourceBufferCommand, handle, value)\n\n }\n\n Sound::PANNING => {\n\n make_command!(SetSoundSourcePanningCommand, handle, value)\n\n }\n\n Sound::PITCH => {\n\n make_command!(SetSoundSourcePitchCommand, handle, value)\n\n }\n\n Sound::LOOPING => {\n\n make_command!(SetSoundSourceLoopingCommand, handle, value)\n", "file_path": "editor/src/inspector/handlers/node/sound.rs", "rank": 39, "score": 297337.82827623974 }, { "content": "pub fn handle_base_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n base: &mut Base,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Base::NAME => SetNameCommand,\n\n Base::TAG => SetTagCommand,\n\n Base::FRUSTUM_CULLING => SetFrustumCullingCommand,\n\n Base::VISIBILITY => SetVisibleCommand,\n\n Base::MOBILITY => SetMobilityCommand,\n\n //Base::LIFETIME => SetLifetimeCommand,\n\n Base::DEPTH_OFFSET => SetDepthOffsetCommand,\n\n Base::LOD_GROUP => SetLodGroupCommand,\n\n Base::CAST_SHADOWS => SetCastShadowsCommand,\n\n Base::SCRIPT => SetScriptCommand\n\n )\n\n }\n", "file_path": "editor/src/inspector/handlers/node/base.rs", "rank": 40, "score": 297337.82827623974 }, { "content": "pub fn handle_collider_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n collider: &mut Collider,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Collider::FRICTION => SetColliderFrictionCommand,\n\n Collider::RESTITUTION => SetColliderRestitutionCommand,\n\n Collider::IS_SENSOR => SetColliderIsSensorCommand,\n\n Collider::DENSITY => SetColliderDensityCommand,\n\n Collider::SHAPE => SetColliderShapeCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() {\n\n Collider::COLLISION_GROUPS => match inner_property.value {\n\n FieldKind::Object(ref value) => match inner_property.name.as_ref() {\n\n InteractionGroups::MEMBERSHIPS => {\n\n let mut new_value = collider.collision_groups();\n", "file_path": "editor/src/inspector/handlers/node/collider.rs", "rank": 41, "score": 297337.82827623974 }, { "content": "pub fn handle_reverb_effect_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Effect>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n ReverbEffect::DRY => SetReverbDryCommand,\n\n ReverbEffect::WET => SetReverbWetCommand,\n\n ReverbEffect::FC => SetReverbFcCommand,\n\n ReverbEffect::DECAY_TIME => SetReverbDecayTimeCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n ReverbEffect::BASE => handle_base_effect_property_changed(inner, handle),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/effect.rs", "rank": 42, "score": 297337.82827623974 }, { "content": "pub fn handle_perspective_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n handle_property_changed!(args, handle,\n\n PerspectiveProjection::Z_NEAR => SetPerspectiveZNear,\n\n PerspectiveProjection::Z_FAR => SetPerspectiveZFar,\n\n PerspectiveProjection::FOV => SetPerspectiveFov\n\n )\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 43, "score": 297337.82827623974 }, { "content": "pub fn handle_sprite_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_sprite() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Sprite::TEXTURE => SetSpriteTextureCommand,\n\n Sprite::COLOR => SetSpriteColorCommand,\n\n Sprite::SIZE => SetSpriteSizeCommand,\n\n Sprite::ROTATION => SetSpriteRotationCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Sprite::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/sprite.rs", "rank": 44, "score": 297337.82827623974 }, { "content": "pub fn handle_ortho_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n handle_property_changed!(args, handle,\n\n OrthographicProjection::Z_NEAR => SetOrthoZNear,\n\n OrthographicProjection::Z_FAR => SetOrthoZFar,\n\n OrthographicProjection::VERTICAL_SIZE => SetOrthoVerticalSize\n\n )\n\n}\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 45, "score": 297337.82827623974 }, { "content": "pub fn handle_transform_property_changed(\n\n args: &PropertyChanged,\n\n node_handle: Handle<Node>,\n\n base: &Base,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n \"local_position\" => Some(SceneCommand::new(MoveNodeCommand::new(\n\n node_handle,\n\n **base.local_transform().position(),\n\n *value.cast_value()?,\n\n ))),\n\n \"local_rotation\" => Some(SceneCommand::new(RotateNodeCommand::new(\n\n node_handle,\n\n **base.local_transform().rotation(),\n\n *value.cast_value()?,\n\n ))),\n\n \"local_scale\" => Some(SceneCommand::new(ScaleNodeCommand::new(\n\n node_handle,\n\n **base.local_transform().scale(),\n", "file_path": "editor/src/inspector/handlers/node/transform.rs", "rank": 46, "score": 297337.8282762398 }, { "content": "pub fn handle_mesh_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_mesh() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Mesh::RENDER_PATH => SetMeshRenderPathCommand,\n\n Mesh::DECAL_LAYER_INDEX => SetMeshDecalLayerIndexCommand\n\n )\n\n }\n\n FieldKind::Collection(ref args) => match **args {\n\n CollectionChanged::Add => {\n\n // TODO\n\n None\n\n }\n\n CollectionChanged::Remove(_) => {\n\n // TODO\n", "file_path": "editor/src/inspector/handlers/node/mesh.rs", "rank": 47, "score": 297337.82827623974 }, { "content": "pub fn handle_collider2d_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n collider: &mut Collider,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Collider::FRICTION => SetColliderFrictionCommand,\n\n Collider::RESTITUTION => SetColliderRestitutionCommand,\n\n Collider::IS_SENSOR => SetColliderIsSensorCommand,\n\n Collider::DENSITY => SetColliderDensityCommand,\n\n Collider::SHAPE => SetColliderShapeCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() {\n\n Collider::COLLISION_GROUPS => match inner_property.value {\n\n FieldKind::Object(ref value) => match inner_property.name.as_ref() {\n\n InteractionGroups::MEMBERSHIPS => {\n\n let mut new_value = collider.collision_groups();\n", "file_path": "editor/src/inspector/handlers/node/collider2d.rs", "rank": 48, "score": 297337.82827623974 }, { "content": "pub fn handle_joint2d_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n joint: &mut Joint,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Joint::BODY_1 => SetJointBody1Command,\n\n Joint::BODY_2 => SetJointBody2Command\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Joint::PARAMS => {\n\n if inner.owner_type_id == TypeId::of::<BallJoint>() {\n\n handle_ball_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<FixedJoint>() {\n\n handle_fixed_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<PrismaticJoint>() {\n\n handle_prismatic_joint(inner, handle)\n", "file_path": "editor/src/inspector/handlers/node/joint2d.rs", "rank": 49, "score": 297337.82827623974 }, { "content": "pub fn handle_terrain_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if let Some(terrain) = node.cast::<Terrain>() {\n\n match args.value {\n\n FieldKind::Collection(ref collection_changed) => match args.name.as_ref() {\n\n Terrain::LAYERS => match &**collection_changed {\n\n CollectionChanged::Add => Some(SceneCommand::new(AddTerrainLayerCommand::new(\n\n handle, terrain,\n\n ))),\n\n CollectionChanged::Remove(index) => Some(SceneCommand::new(\n\n DeleteTerrainLayerCommand::new(handle, *index),\n\n )),\n\n CollectionChanged::ItemChanged { index, property } => {\n\n assert_eq!(property.owner_type_id, TypeId::of::<Layer>());\n\n match property.value {\n\n FieldKind::Object(ref args) => match property.name.as_ref() {\n\n Layer::MASK_PROPERTY_NAME => Some(SceneCommand::new(\n", "file_path": "editor/src/inspector/handlers/node/terrain.rs", "rank": 50, "score": 297337.82827623974 }, { "content": "pub fn handle_rectangle_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_rectangle() {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Rectangle::TEXTURE => {\n\n make_command!(SetRectangleTextureCommand, handle, value)\n\n }\n\n Rectangle::COLOR => {\n\n make_command!(SetRectangleColorCommand, handle, value)\n\n }\n\n _ => None,\n\n },\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Rectangle::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/rectangle.rs", "rank": 51, "score": 297337.82827623974 }, { "content": "pub fn handle_camera_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if let Some(camera) = node.cast::<Camera>() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Camera::EXPOSURE => SetExposureCommand,\n\n Camera::PROJECTION => SetProjectionCommand,\n\n Camera::VIEWPORT => SetViewportCommand,\n\n Camera::ENABLED => SetCameraPreviewCommand,\n\n Camera::SKY_BOX => SetSkyBoxCommand,\n\n Camera::ENVIRONMENT => SetEnvironmentMap,\n\n Camera::COLOR_GRADING_LUT => SetColorGradingLutCommand,\n\n Camera::COLOR_GRADING_ENABLED => SetColorGradingEnabledCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 52, "score": 297337.82827623974 }, { "content": "fn generate_item_container(ctx: &mut BuildContext, item: Handle<UiNode>) -> Handle<UiNode> {\n\n let item = ListViewItem {\n\n widget: WidgetBuilder::new().with_child(item).build(),\n\n };\n\n\n\n ctx.add_node(UiNode::new(item))\n\n}\n\n\n", "file_path": "fyrox-ui/src/list_view.rs", "rank": 53, "score": 295455.9843505979 }, { "content": "pub fn open_file_selector(file_selector: Handle<UiNode>, ui: &UserInterface) {\n\n ui.send_message(FileSelectorMessage::root(\n\n file_selector,\n\n MessageDirection::ToWidget,\n\n Some(std::env::current_dir().unwrap()),\n\n ));\n\n\n\n ui.send_message(WindowMessage::open_modal(\n\n file_selector,\n\n MessageDirection::ToWidget,\n\n true,\n\n ));\n\n}\n\n\n", "file_path": "editor/src/utils/mod.rs", "rank": 54, "score": 293562.4562371228 }, { "content": "fn handle_quality_property_changed(\n\n settings: &mut QualitySettings,\n\n property_changed: &PropertyChanged,\n\n) -> bool {\n\n match property_changed.value {\n\n FieldKind::Object(ref args) => {\n\n return match property_changed.name.as_ref() {\n\n QualitySettings::POINT_SHADOW_MAP_SIZE => {\n\n args.try_override(&mut settings.point_shadow_map_size)\n\n }\n\n QualitySettings::POINT_SOFT_SHADOWS => {\n\n args.try_override(&mut settings.point_soft_shadows)\n\n }\n\n QualitySettings::POINT_SHADOWS_ENABLED => {\n\n args.try_override(&mut settings.point_shadows_enabled)\n\n }\n\n QualitySettings::POINT_SHADOWS_DISTANCE => {\n\n args.try_override(&mut settings.point_shadows_distance)\n\n }\n\n QualitySettings::POINT_SHADOW_MAP_PRECISION => {\n", "file_path": "editor/src/settings/graphics.rs", "rank": 55, "score": 292424.46899422223 }, { "content": "pub fn enable_widget(handle: Handle<UiNode>, state: bool, ui: &UserInterface) {\n\n ui.send_message(WidgetMessage::enabled(\n\n handle,\n\n MessageDirection::ToWidget,\n\n state,\n\n ));\n\n}\n\n\n", "file_path": "editor/src/utils/mod.rs", "rank": 56, "score": 290972.73130328243 }, { "content": "pub fn handle_spot_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_spot_light() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n SpotLight::HOTSPOT_CONE_ANGLE => SetSpotLightHotspotCommand,\n\n SpotLight::FALLOFF_ANGLE_DELTA => SetSpotLightFalloffAngleDeltaCommand,\n\n SpotLight::SHADOW_BIAS => SetSpotLightShadowBiasCommand,\n\n SpotLight::DISTANCE => SetSpotLightDistanceCommand,\n\n SpotLight::COOKIE_TEXTURE => SetSpotLightCookieTextureCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n SpotLight::BASE_LIGHT => handle_base_light_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 57, "score": 290021.61394674785 }, { "content": "pub fn handle_directional_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_directional_light() {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n DirectionalLight::BASE_LIGHT => {\n\n handle_base_light_property_changed(inner, handle, node)\n\n }\n\n DirectionalLight::CSM_OPTIONS => match inner.name.as_ref() {\n\n CsmOptions::SPLIT_OPTIONS => match inner.value {\n\n FieldKind::Inspectable(ref split_options_value) => {\n\n if let FieldKind::Collection(ref collection_changed) =\n\n split_options_value.value\n\n {\n\n if let CollectionChanged::ItemChanged { .. } = **collection_changed\n\n {\n\n match split_options_value.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 58, "score": 290021.6139467478 }, { "content": "pub fn handle_point_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_point_light() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n PointLight::SHADOW_BIAS => SetPointLightShadowBiasCommand,\n\n PointLight::RADIUS => SetPointLightRadiusCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n PointLight::BASE_LIGHT => handle_base_light_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 59, "score": 290021.61394674785 }, { "content": "pub fn handle_base_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &mut Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n BaseLight::COLOR => SetLightColorCommand,\n\n BaseLight::CAST_SHADOWS => SetLightCastShadowsCommand,\n\n BaseLight::SCATTER => SetLightScatterCommand,\n\n BaseLight::SCATTER_ENABLED => SetLightScatterEnabledCommand,\n\n BaseLight::INTENSITY => SetLightIntensityCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n BaseLight::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 60, "score": 290021.6139467478 }, { "content": "pub fn enum_prop_ident(\n\n variant_args: &args::VariantArgs,\n\n nth: usize,\n\n field: &args::FieldArgs,\n\n) -> Ident {\n\n let variant_ident = &variant_args.ident;\n\n let field_ident = self::field_ident(&variant_args.fields, nth, field);\n\n\n\n let ident = format!(\"{}_{}\", variant_ident, field_ident).to_case(Case::UpperSnake);\n\n syn::parse_str(&ident).unwrap()\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils/prop_keys.rs", "rank": 61, "score": 289525.38663345296 }, { "content": "pub fn gen_inspect_fn_body(\n\n field_prefix: FieldPrefix,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n // `inspect` function body, consisting of a sequence of quotes\n\n let mut quotes = Vec::new();\n\n\n\n let props = field_args\n\n .fields\n\n .iter()\n\n // enumerate first, and then filter!\n\n .enumerate()\n\n .filter(|(_i, f)| !f.skip)\n\n .map(|(i, field)| self::quote_field_prop(&field_prefix, i, field, field_args.style));\n\n\n\n quotes.push(quote! {\n\n let mut props = Vec::new();\n\n #(props.push(#props);)*\n\n });\n\n\n\n // concatenate the quotes\n\n quote! {\n\n #(#quotes)*\n\n props\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 62, "score": 287801.0202524091 }, { "content": "pub trait BaseScript: Visit + Inspect + Send + Debug + 'static {\n\n fn clone_box(&self) -> Box<dyn ScriptTrait>;\n\n}\n\n\n\nimpl<T> BaseScript for T\n\nwhere\n\n T: Clone + ScriptTrait,\n\n{\n\n fn clone_box(&self) -> Box<dyn ScriptTrait> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\npub struct ScriptContext<'a, 'b, 'c> {\n\n pub dt: f32,\n\n pub plugin: &'a mut dyn Plugin,\n\n pub node: &'b mut Node,\n\n pub handle: Handle<Node>,\n\n pub scene: &'c mut Scene,\n\n pub resource_manager: &'a ResourceManager,\n\n}\n\n\n", "file_path": "src/script/mod.rs", "rank": 63, "score": 285687.68091292586 }, { "content": "pub fn handle_rigid_body_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n rigid_body: &mut RigidBody,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n RigidBody::MASS => SetBodyMassCommand,\n\n RigidBody::LIN_VEL => SetBodyLinVelCommand,\n\n RigidBody::ANG_VEL => SetBodyAngVelCommand,\n\n RigidBody::BODY_TYPE => SetBodyStatusCommand,\n\n RigidBody::X_ROTATION_LOCKED => SetBodyXRotationLockedCommand,\n\n RigidBody::Y_ROTATION_LOCKED => SetBodyYRotationLockedCommand,\n\n RigidBody::Z_ROTATION_LOCKED => SetBodyZRotationLockedCommand,\n\n RigidBody::TRANSLATION_LOCKED => SetBodyTranslationLockedCommand,\n\n RigidBody::CAN_SLEEP => SetBodyCanSleepCommand,\n\n RigidBody::CCD_ENABLED => SetBodyCcdEnabledCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n RigidBody::BASE => handle_base_property_changed(inner, handle, rigid_body),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/rigid_body.rs", "rank": 64, "score": 283153.2777232872 }, { "content": "pub fn handle_rigid_body2d_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n rigid_body: &mut RigidBody,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n RigidBody::MASS => SetBodyMassCommand,\n\n RigidBody::LIN_VEL => SetBodyLinVelCommand,\n\n RigidBody::ANG_VEL => SetBodyAngVelCommand,\n\n RigidBody::BODY_TYPE => SetBodyStatusCommand,\n\n RigidBody::ROTATION_LOCKED => SetBodyRotationLockedCommand,\n\n RigidBody::TRANSLATION_LOCKED => SetBodyTranslationLockedCommand,\n\n RigidBody::CAN_SLEEP => SetBodyCanSleepCommand,\n\n RigidBody::CCD_ENABLED => SetBodyCcdEnabledCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n RigidBody::BASE => handle_base_property_changed(inner, handle, rigid_body),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/rigid_body2d.rs", "rank": 65, "score": 283153.2777232872 }, { "content": "pub fn fetch_node_center(handle: Handle<UiNode>, ctx: &BuildContext) -> Vector2<f32> {\n\n ctx.try_get_node(handle)\n\n .map(|node| node.center())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "editor/src/utils/mod.rs", "rank": 66, "score": 282980.59587140434 }, { "content": "#[inline]\n\npub fn barycentric_is_inside(bary: (f32, f32, f32)) -> bool {\n\n (bary.0 >= 0.0) && (bary.1 >= 0.0) && (bary.0 + bary.1 < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 67, "score": 282224.9130301357 }, { "content": "pub fn impl_inspect(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_inspect_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variant_args) => self::impl_inspect_enum(&ty_args, variant_args),\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect.rs", "rank": 68, "score": 281963.0636754653 }, { "content": "fn handle_transition_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<TransitionDefinition>,\n\n) -> Option<AbsmCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n TransitionDefinition::NAME => Some(AbsmCommand::new(SetTransitionNameCommand {\n\n handle,\n\n value: value.cast_clone()?,\n\n })),\n\n TransitionDefinition::RULE => Some(AbsmCommand::new(SetTransitionRuleCommand {\n\n handle,\n\n value: value.cast_clone()?,\n\n })),\n\n TransitionDefinition::TRANSITION_TIME => {\n\n Some(AbsmCommand::new(SetTransitionTimeCommand {\n\n handle,\n\n value: value.cast_clone()?,\n\n }))\n\n }\n", "file_path": "editor/src/absm/inspector/mod.rs", "rank": 69, "score": 281962.2491424562 }, { "content": "fn handle_state_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<StateDefinition>,\n\n state_definition: &StateDefinition,\n\n) -> Option<AbsmCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n StateDefinition::POSITION => Some(AbsmCommand::new(MoveStateNodeCommand::new(\n\n handle,\n\n state_definition.position,\n\n value.cast_clone()?,\n\n ))),\n\n StateDefinition::NAME => Some(AbsmCommand::new(SetStateNameCommand {\n\n handle,\n\n value: value.cast_clone()?,\n\n })),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "editor/src/absm/inspector/mod.rs", "rank": 70, "score": 281962.2491424562 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 71, "score": 279804.9378277728 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/navmesh.rs", "rank": 72, "score": 279804.9378277728 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/lod.rs", "rank": 73, "score": 279804.9378277728 }, { "content": "fn switch_window_state(window: Handle<UiNode>, ui: &UserInterface, center: bool) {\n\n let current_state = ui.node(window).visibility();\n\n ui.send_message(if current_state {\n\n WindowMessage::close(window, MessageDirection::ToWidget)\n\n } else {\n\n WindowMessage::open(window, MessageDirection::ToWidget, center)\n\n })\n\n}\n\n\n\nimpl ViewMenu {\n\n pub fn new(ctx: &mut BuildContext) -> Self {\n\n let sidebar;\n\n let asset_browser;\n\n let world_viewer;\n\n let light_panel;\n\n let log_panel;\n\n let menu = create_root_menu_item(\n\n \"View\",\n\n vec![\n\n {\n", "file_path": "editor/src/menu/view.rs", "rank": 74, "score": 279432.5164620985 }, { "content": "fn create_ui(engine: &mut Engine) -> Interface {\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n\n\n let debug_text = TextBuilder::new(WidgetBuilder::new()).build(ctx);\n\n\n\n let definition_container = Rc::new(PropertyEditorDefinitionContainer::new());\n\n\n\n let inspector;\n\n WindowBuilder::new(WidgetBuilder::new().with_width(400.0))\n\n .with_title(WindowTitle::text(\"Inspector\"))\n\n .with_content({\n\n inspector = InspectorBuilder::new(\n\n WidgetBuilder::new().with_desired_position(Vector2::new(200.0, 200.0)),\n\n )\n\n .build(ctx);\n\n inspector\n\n })\n\n .build(ctx);\n\n\n\n Interface {\n\n debug_text,\n\n inspector,\n\n definition_container,\n\n }\n\n}\n\n\n", "file_path": "examples/inspector.rs", "rank": 75, "score": 279360.60384480853 }, { "content": "fn filtered_out(filter: &mut Option<Filter>, path: &Path) -> bool {\n\n match filter.as_mut() {\n\n Some(filter) => !filter.0.borrow_mut().deref_mut().lock().unwrap()(path),\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/file_browser.rs", "rank": 76, "score": 279089.5562871134 }, { "content": "/// A trait for resource data.\n\npub trait ResourceData: 'static + Default + Debug + Visit + Send {\n\n /// Returns path of resource data.\n\n fn path(&self) -> Cow<Path>;\n\n\n\n /// Sets new path to resource data.\n\n fn set_path(&mut self, path: PathBuf);\n\n}\n\n\n", "file_path": "fyrox-resource/src/lib.rs", "rank": 77, "score": 279044.72092454275 }, { "content": "pub fn create_file_selector(\n\n ctx: &mut BuildContext,\n\n extension: &'static str,\n\n mode: FileBrowserMode,\n\n) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0)).open(false),\n\n )\n\n .with_filter(Filter::new(move |path| {\n\n if let Some(ext) = path.extension() {\n\n ext.to_string_lossy().as_ref() == extension\n\n } else {\n\n path.is_dir()\n\n }\n\n }))\n\n .with_mode(mode)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/utils/mod.rs", "rank": 78, "score": 278403.74968195794 }, { "content": "#[inline]\n\npub fn barycentric_to_world(\n\n bary: (f32, f32, f32),\n\n pa: Vector3<f32>,\n\n pb: Vector3<f32>,\n\n pc: Vector3<f32>,\n\n) -> Vector3<f32> {\n\n pa.scale(bary.0) + pb.scale(bary.1) + pc.scale(bary.2)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 79, "score": 277540.5553163251 }, { "content": "pub fn fetch_node_screen_center(handle: Handle<UiNode>, ctx: &BuildContext) -> Vector2<f32> {\n\n ctx.try_get_node(handle)\n\n .map(|node| node.screen_bounds().center())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "editor/src/utils/mod.rs", "rank": 80, "score": 277010.93469767954 }, { "content": "pub fn make_mark(\n\n ctx: &mut BuildContext,\n\n text: &str,\n\n column: usize,\n\n color: Color,\n\n) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(0)\n\n .on_column(column)\n\n .with_background(Brush::Solid(color))\n\n .with_foreground(Brush::Solid(Color::TRANSPARENT))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "fyrox-ui/src/vec/mod.rs", "rank": 81, "score": 276888.19871007995 }, { "content": "#[inline]\n\npub fn wrapf(mut n: f32, mut min_limit: f32, mut max_limit: f32) -> f32 {\n\n if n >= min_limit && n <= max_limit {\n\n return n;\n\n }\n\n\n\n if max_limit == 0.0 && min_limit == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n max_limit -= min_limit;\n\n\n\n let offset = min_limit;\n\n min_limit = 0.0;\n\n n -= offset;\n\n\n\n let num_of_max = (n / max_limit).abs().floor();\n\n\n\n if n >= max_limit {\n\n n -= num_of_max * max_limit;\n\n } else if n < min_limit {\n\n n += (num_of_max + 1.0) * max_limit;\n\n }\n\n\n\n n + offset\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 82, "score": 276813.9689979109 }, { "content": "/// Maps key code to its name. Can be useful if you making adjustable key bindings in your\n\n/// game and you need quickly map key code to its name.\n\npub fn virtual_key_code_name(code: VirtualKeyCode) -> &'static str {\n\n match code {\n\n VirtualKeyCode::Key1 => \"1\",\n\n VirtualKeyCode::Key2 => \"2\",\n\n VirtualKeyCode::Key3 => \"3\",\n\n VirtualKeyCode::Key4 => \"4\",\n\n VirtualKeyCode::Key5 => \"5\",\n\n VirtualKeyCode::Key6 => \"6\",\n\n VirtualKeyCode::Key7 => \"7\",\n\n VirtualKeyCode::Key8 => \"8\",\n\n VirtualKeyCode::Key9 => \"9\",\n\n VirtualKeyCode::Key0 => \"0\",\n\n VirtualKeyCode::A => \"A\",\n\n VirtualKeyCode::B => \"B\",\n\n VirtualKeyCode::C => \"C\",\n\n VirtualKeyCode::D => \"D\",\n\n VirtualKeyCode::E => \"E\",\n\n VirtualKeyCode::F => \"F\",\n\n VirtualKeyCode::G => \"G\",\n\n VirtualKeyCode::H => \"H\",\n", "file_path": "src/utils/mod.rs", "rank": 83, "score": 272112.4875906946 }, { "content": "pub fn create_impl(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Visit for #ty_ident #ty_generics #where_clause {\n\n fn visit(\n\n &mut self,\n\n name: &str,\n\n visitor: &mut Visitor,\n\n ) -> VisitResult {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 84, "score": 271327.65682926687 }, { "content": "#[test]\n\nfn inspect_default() {\n\n #[derive(Debug, Default, Inspect)]\n\n pub struct Data {\n\n the_field: String,\n\n another_field: f32,\n\n }\n\n\n\n let data = Data::default();\n\n\n\n let expected = vec![\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Data>(),\n\n name: \"the_field\",\n\n display_name: \"The Field\",\n\n value: &data.the_field,\n\n ..default_prop()\n\n },\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Data>(),\n\n name: \"another_field\",\n\n display_name: \"Another Field\",\n\n value: &data.another_field,\n\n ..default_prop()\n\n },\n\n ];\n\n\n\n assert_eq!(data.properties(), expected);\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 85, "score": 270902.27718015027 }, { "content": "#[test]\n\nfn inspect_enum() {\n\n #[derive(Debug, Inspect)]\n\n pub struct NonCopy {\n\n inner: u32,\n\n }\n\n\n\n #[derive(Debug, Inspect)]\n\n pub enum Data {\n\n Named { x: u32, y: u32, z: NonCopy },\n\n Tuple(f32, f32),\n\n Unit,\n\n }\n\n\n\n let data = Data::Named {\n\n x: 0,\n\n y: 1,\n\n z: NonCopy { inner: 10 },\n\n };\n\n\n\n assert_eq!(\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 86, "score": 270881.3001338856 }, { "content": "#[test]\n\nfn inspect_struct() {\n\n #[derive(Debug, Default, Inspect)]\n\n struct Tuple(f32, f32);\n\n\n\n let x = Tuple::default();\n\n assert_eq!(\n\n x.properties(),\n\n vec![\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Tuple>(),\n\n name: \"0\",\n\n display_name: \"0\",\n\n value: &x.0,\n\n ..default_prop()\n\n },\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<Tuple>(),\n\n name: \"1\",\n\n display_name: \"1\",\n\n value: &x.1,\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 87, "score": 270805.35717636807 }, { "content": "#[inline]\n\npub fn vec3_to_vec2_by_plane(\n\n plane_class: PlaneClass,\n\n normal: Vector3<f32>,\n\n point: Vector3<f32>,\n\n) -> Vector2<f32> {\n\n match plane_class {\n\n PlaneClass::XY => {\n\n if normal.z < 0.0 {\n\n Vector2::new(point.y, point.x)\n\n } else {\n\n Vector2::new(point.x, point.y)\n\n }\n\n }\n\n PlaneClass::XZ => {\n\n if normal.y < 0.0 {\n\n Vector2::new(point.x, point.z)\n\n } else {\n\n Vector2::new(point.z, point.x)\n\n }\n\n }\n\n PlaneClass::YZ => {\n\n if normal.x < 0.0 {\n\n Vector2::new(point.z, point.y)\n\n } else {\n\n Vector2::new(point.y, point.z)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 88, "score": 270149.3033870123 }, { "content": "#[inline]\n\npub fn ray_rect_intersection(\n\n rect: Rect<f32>,\n\n origin: Vector2<f32>,\n\n dir: Vector2<f32>,\n\n) -> Option<IntersectionResult> {\n\n let min = rect.left_top_corner();\n\n let max = rect.right_bottom_corner();\n\n\n\n let (mut tmin, mut tmax) = if dir.x >= 0.0 {\n\n ((min.x - origin.x) / dir.x, (max.x - origin.x) / dir.x)\n\n } else {\n\n ((max.x - origin.x) / dir.x, (min.x - origin.x) / dir.x)\n\n };\n\n\n\n let (tymin, tymax) = if dir.y >= 0.0 {\n\n ((min.y - origin.y) / dir.y, (max.y - origin.y) / dir.y)\n\n } else {\n\n ((max.y - origin.y) / dir.y, (min.y - origin.y) / dir.y)\n\n };\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 89, "score": 270149.3033870123 }, { "content": "#[inline]\n\npub fn get_barycentric_coords_2d(\n\n p: Vector2<f32>,\n\n a: Vector2<f32>,\n\n b: Vector2<f32>,\n\n c: Vector2<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = b - a;\n\n let v1 = c - a;\n\n let v2 = p - a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let inv_denom = 1.0 / (d00 * d11 - d01.powi(2));\n\n\n\n let v = (d11 * d20 - d01 * d21) * inv_denom;\n\n let w = (d00 * d21 - d01 * d20) * inv_denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 90, "score": 270149.3033870123 }, { "content": "#[inline]\n\npub fn is_point_inside_2d_triangle(\n\n point: Vector2<f32>,\n\n pt_a: Vector2<f32>,\n\n pt_b: Vector2<f32>,\n\n pt_c: Vector2<f32>,\n\n) -> bool {\n\n let ba = pt_b - pt_a;\n\n let ca = pt_c - pt_a;\n\n\n\n let vp = point - pt_a;\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot_02 = ca.dot(&vp);\n\n let dot_12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot_02 - ca_dot_ba * dot_12) * inv_denom;\n\n let v = (ca_dot_ca * dot_12 - ca_dot_ba * dot_02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 91, "score": 270149.3033870123 }, { "content": "#[inline]\n\npub fn get_barycentric_coords(\n\n p: &Vector3<f32>,\n\n a: &Vector3<f32>,\n\n b: &Vector3<f32>,\n\n c: &Vector3<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = *b - *a;\n\n let v1 = *c - *a;\n\n let v2 = *p - *a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let denom = d00 * d11 - d01.powi(2);\n\n\n\n let v = (d11 * d20 - d01 * d21) / denom;\n\n let w = (d00 * d21 - d01 * d20) / denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 92, "score": 270149.3033870123 }, { "content": "pub fn print() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/profiler.rs", "rank": 93, "score": 269873.52943347674 }, { "content": "pub fn is_scene_needs_to_be_saved(editor_scene: Option<&EditorScene>) -> bool {\n\n editor_scene\n\n .as_ref()\n\n .map_or(false, |s| s.has_unsaved_changes || s.path.is_none())\n\n}\n\n\n\nimpl EditorScene {\n\n pub fn from_native_scene(mut scene: Scene, engine: &mut Engine, path: Option<PathBuf>) -> Self {\n\n let root = PivotBuilder::new(BaseBuilder::new()).build(&mut scene.graph);\n\n let camera_controller = CameraController::new(&mut scene.graph, root);\n\n\n\n // Prevent physics simulation in while editing scene.\n\n scene.graph.physics.enabled = false;\n\n scene.graph.physics2d.enabled = false;\n\n\n\n let mut navmeshes = Pool::new();\n\n\n\n for navmesh in scene.navmeshes.iter() {\n\n let _ = navmeshes.spawn(Navmesh {\n\n vertices: navmesh\n", "file_path": "editor/src/scene/mod.rs", "rank": 94, "score": 268657.72104693216 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n #[wasm_bindgen(js_namespace = console)]\n\n fn error(msg: String);\n\n\n\n type Error;\n\n\n\n #[wasm_bindgen(constructor)]\n\n fn new() -> Error;\n\n\n\n #[wasm_bindgen(structural, method, getter)]\n\n fn stack(error: &Error) -> String;\n\n}\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 95, "score": 268015.90634766704 }, { "content": "#[derive(Clone)]\n\nenum Selection {\n\n Keys { keys: FxHashSet<Uuid> },\n\n // It is ok to use index directly in case of tangents since\n\n // we won't change position of keys so index will be valid.\n\n LeftTangent { key: usize },\n\n RightTangent { key: usize },\n\n}\n\n\n", "file_path": "fyrox-ui/src/curve/mod.rs", "rank": 96, "score": 267898.2026250595 }, { "content": "#[inline]\n\npub fn m4x4_approx_eq(a: &Matrix4<f32>, b: &Matrix4<f32>) -> bool {\n\n a.iter()\n\n .zip(b.iter())\n\n .all(|(a, b)| (*a - *b).abs() <= 0.001)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::algebra::Vector2;\n\n use crate::math::Rect;\n\n use crate::math::SmoothAngle;\n\n\n\n #[test]\n\n fn ray_rect_intersection() {\n\n let rect = Rect::new(0.0, 0.0, 10.0, 10.0);\n\n\n\n // Edge-case: Horizontal ray.\n\n assert!(super::ray_rect_intersection(\n\n rect,\n\n Vector2::new(-1.0, 5.0),\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 97, "score": 267771.7007958988 }, { "content": "/// A trait that allows you to \"look inside\" an object that implements it. It is used for lightweight\n\n/// runtime read-only reflection. The most common use case for it is various editors.\n\n///\n\n/// It is not advised to manually implement this trait. You should use `#[derive(Inspect)]` whenever\n\n/// possible.\n\n///\n\n/// ## `#[derive(Inspect)]`\n\n///\n\n/// The proc macro reduces amount of boilerplate code to the minimum and significantly reduces a\n\n/// change of error.\n\n///\n\n/// ### Supported attributes\n\n///\n\n/// - `#[inspect(name = \"new_field_name\")]` - override field name.\n\n/// - `#[inspect(display_name = \"Human-readable Name\")]` - override display name.\n\n/// - `#[inspect(group = \"Group Name\")]` - override group name.\n\n/// - `#[inspect(expand)]` - extends the list of properties in case of composition, in other words it\n\n/// \"flattens\" and exposes the properties of an inner object. Useful when you have a structure that\n\n/// has some fields that are complex objects that implements `Inspect` too.\n\npub trait Inspect {\n\n /// Returns information about \"public\" properties.\n\n fn properties(&self) -> Vec<PropertyInfo<'_>>;\n\n}\n\n\n\nimpl<T: Inspect> Inspect for Option<T> {\n\n fn properties(&self) -> Vec<PropertyInfo<'_>> {\n\n match self {\n\n Some(v) => v.properties(),\n\n None => vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Inspect> Inspect for Box<T> {\n\n fn properties(&self) -> Vec<PropertyInfo<'_>> {\n\n (**self).properties()\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/inspect.rs", "rank": 98, "score": 267648.70744525746 }, { "content": "pub fn fix_shadows_distance(mut quality: QualitySettings) -> QualitySettings {\n\n // Scale distance because game world has different scale.\n\n quality.spot_shadows_distance *= 2.0;\n\n quality.point_shadows_distance *= 2.0;\n\n quality\n\n}\n", "file_path": "examples/shared/mod.rs", "rank": 99, "score": 267361.5595379707 } ]
Rust
crates/rome_js_parser/src/parse.rs
mrkldshv/tools
c173b0c01ee499fcb49d6ae328f1229daa183868
use crate::token_source::Trivia; use crate::*; use rome_diagnostics::Severity; use rome_js_syntax::{ JsAnyRoot, JsExpressionSnipped, JsLanguage, JsModule, JsScript, JsSyntaxNode, ModuleKind, SourceType, }; use rome_rowan::AstNode; use std::marker::PhantomData; #[derive(Debug, Clone)] pub struct Parse<T> { root: JsSyntaxNode, errors: Vec<ParseDiagnostic>, _ty: PhantomData<T>, } impl<T> Parse<T> { pub fn new_module(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Self::new(root, errors) } pub fn new_script(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Self::new(root, errors) } pub fn new(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Parse { root, errors, _ty: PhantomData, } } pub fn cast<N: AstNode<Language = JsLanguage>>(self) -> Option<Parse<N>> { if N::can_cast(self.syntax().kind()) { Some(Parse::new(self.root, self.errors)) } else { None } } pub fn syntax(&self) -> JsSyntaxNode { self.root.clone() } pub fn diagnostics(&self) -> &[Diagnostic] { self.errors.as_slice() } pub fn into_diagnostics(self) -> Vec<Diagnostic> { self.errors } pub fn has_errors(&self) -> bool { self.errors.iter().any(|diagnostic| diagnostic.is_error()) } } impl<T: AstNode<Language = JsLanguage>> Parse<T> { pub fn tree(&self) -> T { self.try_tree().unwrap_or_else(|| { panic!( "Expected tree to be a {} but root is:\n{:#?}", std::any::type_name::<T>(), self.syntax() ) }) } pub fn try_tree(&self) -> Option<T> { T::cast(self.syntax()) } pub fn ok(self) -> Result<T, Vec<ParseDiagnostic>> { if !self.errors.iter().any(|d| d.severity == Severity::Error) { Ok(self.tree()) } else { Err(self.errors) } } } pub fn parse_common( text: &str, file_id: usize, source_type: SourceType, ) -> (Vec<Event>, Vec<ParseDiagnostic>, Vec<Trivia>) { let mut parser = crate::Parser::new(text, file_id, source_type); crate::syntax::program::parse(&mut parser); let (events, trivia, errors) = parser.finish(); (events, errors, trivia) } pub fn parse_script(text: &str, file_id: usize) -> Parse<JsScript> { parse( text, file_id, SourceType::js_module().with_module_kind(ModuleKind::Script), ) .cast::<JsScript>() .unwrap() } pub fn parse_module(text: &str, file_id: usize) -> Parse<JsModule> { parse(text, file_id, SourceType::js_module()) .cast::<JsModule>() .unwrap() } pub fn parse(text: &str, file_id: usize, source_type: SourceType) -> Parse<JsAnyRoot> { tracing::debug_span!("parse", file_id = file_id).in_scope(move || { let (events, errors, tokens) = parse_common(text, file_id, source_type); let mut tree_sink = LosslessTreeSink::new(text, &tokens); crate::process(&mut tree_sink, events, errors); let (green, parse_errors) = tree_sink.finish(); Parse::new(green, parse_errors) }) } pub fn parse_expression(text: &str, file_id: usize) -> Parse<JsExpressionSnipped> { let mut parser = crate::Parser::new(text, file_id, SourceType::js_module()); crate::syntax::expr::parse_expression_snipped(&mut parser).unwrap(); let (events, tokens, errors) = parser.finish(); let mut tree_sink = LosslessTreeSink::new(text, &tokens); crate::process(&mut tree_sink, events, errors); let (green, parse_errors) = tree_sink.finish(); Parse::new_script(green, parse_errors) }
use crate::token_source::Trivia; use crate::*; use rome_diagnostics::Severity; use rome_js_syntax::{ JsAnyRoot, JsExpressionSnipped, JsLanguage, JsModule, JsScript, JsSyntaxNode, ModuleKind, SourceType, }; use rome_rowan::AstNode; use std::marker::PhantomData; #[derive(Debug, Clone)] pub struct Parse<T> { root: JsSyntaxNode, errors: Vec<ParseDiagnostic>, _ty: PhantomData<T>, } impl<T> Parse<T> { pub fn new_module(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Self::new(root, errors) } pub fn new_script(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Self::new(root, errors) } pub fn new(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Parse { root, errors, _ty: PhantomData, } } pub fn cast<N: AstNode<Language = JsLanguage>>(self) -> Option<Parse<N>> { if N::can_cast(self.syntax().kind()) { Some(Parse::new(self.root, self.errors)) } else { None } } pub fn syntax(&self) -> JsSyntaxNode { self.root.clone() } pub fn diagnostics(&self) -> &[Diagnostic] { self.errors.as_slice() } pub fn into_diagnostics(self) -> Vec<Diagnostic> { self.errors } pub fn has_errors(&self) -> bool { self.errors.iter().any(|diagnostic| diagnostic.is_error()) } } impl<T: AstNode<Language = JsLanguage>> Parse<T> { pub fn tree(&self) -> T { self.try_tree().unwrap_or_else(|| { panic!( "Expected tree to be a {} but root is:\n{:#?}", std::any::type_name::<T>(), self.syntax() ) }) } pub fn try_tree(&self) -> Option<T> { T::cast(self.syntax()) }
} pub fn parse_common( text: &str, file_id: usize, source_type: SourceType, ) -> (Vec<Event>, Vec<ParseDiagnostic>, Vec<Trivia>) { let mut parser = crate::Parser::new(text, file_id, source_type); crate::syntax::program::parse(&mut parser); let (events, trivia, errors) = parser.finish(); (events, errors, trivia) } pub fn parse_script(text: &str, file_id: usize) -> Parse<JsScript> { parse( text, file_id, SourceType::js_module().with_module_kind(ModuleKind::Script), ) .cast::<JsScript>() .unwrap() } pub fn parse_module(text: &str, file_id: usize) -> Parse<JsModule> { parse(text, file_id, SourceType::js_module()) .cast::<JsModule>() .unwrap() } pub fn parse(text: &str, file_id: usize, source_type: SourceType) -> Parse<JsAnyRoot> { tracing::debug_span!("parse", file_id = file_id).in_scope(move || { let (events, errors, tokens) = parse_common(text, file_id, source_type); let mut tree_sink = LosslessTreeSink::new(text, &tokens); crate::process(&mut tree_sink, events, errors); let (green, parse_errors) = tree_sink.finish(); Parse::new(green, parse_errors) }) } pub fn parse_expression(text: &str, file_id: usize) -> Parse<JsExpressionSnipped> { let mut parser = crate::Parser::new(text, file_id, SourceType::js_module()); crate::syntax::expr::parse_expression_snipped(&mut parser).unwrap(); let (events, tokens, errors) = parser.finish(); let mut tree_sink = LosslessTreeSink::new(text, &tokens); crate::process(&mut tree_sink, events, errors); let (green, parse_errors) = tree_sink.finish(); Parse::new_script(green, parse_errors) }
pub fn ok(self) -> Result<T, Vec<ParseDiagnostic>> { if !self.errors.iter().any(|d| d.severity == Severity::Error) { Ok(self.tree()) } else { Err(self.errors) } }
function_block-full_function
[ { "content": "#[inline]\n\npub fn process(sink: &mut impl TreeSink, mut events: Vec<Event>, errors: Vec<ParseDiagnostic>) {\n\n sink.errors(errors);\n\n let mut forward_parents = Vec::new();\n\n\n\n for i in 0..events.len() {\n\n match &mut events[i] {\n\n Event::Start {\n\n kind: TOMBSTONE, ..\n\n } => (),\n\n\n\n Event::Start {\n\n kind,\n\n forward_parent,\n\n ..\n\n } => {\n\n // For events[A, B, C], B is A's forward_parent, C is B's forward_parent,\n\n // in the normal control flow, the parent-child relation: `A -> B -> C`,\n\n // while with the magic forward_parent, it writes: `C <- B <- A`.\n\n\n\n // append `A` into parents.\n", "file_path": "crates/rome_js_parser/src/event.rs", "rank": 0, "score": 300488.47706716787 }, { "content": "/// Installs a global panic handler to show a user-friendly error message\n\n/// in case the CLI panics\n\npub fn setup_panic_handler() {\n\n set_hook(Box::new(panic_handler))\n\n}\n\n\n", "file_path": "crates/rome_cli/src/panic.rs", "rank": 1, "score": 299519.4822285829 }, { "content": "pub fn run_analyzer(root: &JsAnyRoot) {\n\n analyze(0, root, AnalysisFilter::default(), |event| {\n\n black_box(event.diagnostic());\n\n black_box(event.action());\n\n });\n\n}\n\n\n\nimpl AnalyzerMeasurement {\n\n fn total(&self) -> Duration {\n\n self.analysis\n\n }\n\n\n\n pub(crate) fn summary(&self) -> String {\n\n format!(\"{}, Analysis: {:?}\", self.id, self.total())\n\n }\n\n}\n\n\n\nimpl Display for AnalyzerMeasurement {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let _ = writeln!(f, \"\\tAnalysis: {:>10?}\", self.analysis);\n\n let _ = writeln!(f, \"\\t ----------\");\n\n let _ = writeln!(f, \"\\tTotal: {:>10?}\", self.total());\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "xtask/bench/src/features/analyzer.rs", "rank": 2, "score": 290966.376485268 }, { "content": "/// Formats a single node within a file, supported by Rome.\n\n///\n\n/// This runs a simple heuristic to determine the initial indentation\n\n/// level of the node based on the provided [FormatContext], which\n\n/// must match currently the current initial of the file. Additionally,\n\n/// because the reformatting happens only locally the resulting code\n\n/// will be indented with the same level as the original selection,\n\n/// even if it's a mismatch from the rest of the block the selection is in\n\n///\n\n/// It returns a [Formatted] result\n\npub fn format_sub_tree<\n\n C: FormatContext,\n\n L: Language,\n\n N: FormatWithRule<C, Item = SyntaxNode<L>>,\n\n>(\n\n context: C,\n\n root: &N,\n\n) -> FormatResult<Printed> {\n\n let syntax = root.item();\n\n // Determine the initial indentation level for the printer by inspecting the trivia pieces\n\n // of each token from the first token of the common root towards the start of the file\n\n let mut tokens = std::iter::successors(syntax.first_token(), |token| token.prev_token());\n\n\n\n // From the iterator of tokens, build an iterator of trivia pieces (once again the iterator is\n\n // reversed, starting from the last trailing trivia towards the first leading trivia).\n\n // The first token is handled specially as we only wan to consider its leading trivia pieces\n\n let first_token = tokens.next();\n\n let first_token_trivias = first_token\n\n .into_iter()\n\n .flat_map(|token| token.leading_trivia().pieces().rev());\n", "file_path": "crates/rome_formatter/src/lib.rs", "rank": 3, "score": 285778.8221391137 }, { "content": "/// Formats a single node within a file, supported by Rome.\n\n///\n\n/// This runs a simple heuristic to determine the initial indentation\n\n/// level of the node based on the provided [FormatContext], which\n\n/// must match currently the current initial of the file. Additionally,\n\n/// because the reformatting happens only locally the resulting code\n\n/// will be indented with the same level as the original selection,\n\n/// even if it's a mismatch from the rest of the block the selection is in\n\n///\n\n/// It returns a [Formatted] result\n\npub fn format_sub_tree(context: JsFormatContext, root: &JsSyntaxNode) -> FormatResult<Printed> {\n\n rome_formatter::format_sub_tree(context, &root.format())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::format_range;\n\n\n\n use crate::context::JsFormatContext;\n\n use rome_formatter::IndentStyle;\n\n use rome_js_parser::parse_script;\n\n use rome_rowan::{TextRange, TextSize};\n\n\n\n #[test]\n\n fn test_range_formatting() {\n\n let input = \"\n\nwhile(\n\n true\n\n) {\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 4, "score": 281117.0616512241 }, { "content": "pub fn invalid_digits_after_unicode_escape_sequence(\n\n file_id: FileId,\n\n start: usize,\n\n end: usize,\n\n) -> Diagnostic {\n\n Diagnostic::error(file_id, \"\", \"invalid digits after unicode escape sequence\")\n\n .primary(start..end, \"expected valid unicode escape sequence\")\n\n}\n", "file_path": "crates/rome_js_parser/src/lexer/errors.rs", "rank": 5, "score": 271578.67072737555 }, { "content": "pub fn css_root<I>(items: I) -> CssRoot\n\nwhere\n\n I: IntoIterator<Item = CssAnyRule>,\n\n I::IntoIter: ExactSizeIterator,\n\n{\n\n CssRoot::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_ROOT,\n\n items\n\n .into_iter()\n\n .map(|item| Some(item.into_syntax().into())),\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 6, "score": 256191.67074553407 }, { "content": "fn is_range_formatting_root(node: &JsSyntaxNode) -> bool {\n\n let kind = node.kind();\n\n\n\n // Do not format variable declaration nodes, format the whole statement instead\n\n if matches!(kind, JsSyntaxKind::JS_VARIABLE_DECLARATION) {\n\n return false;\n\n }\n\n\n\n JsAnyStatement::can_cast(kind)\n\n || JsAnyDeclaration::can_cast(kind)\n\n || matches!(\n\n kind,\n\n JsSyntaxKind::JS_DIRECTIVE | JsSyntaxKind::JS_EXPORT | JsSyntaxKind::JS_IMPORT\n\n )\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 7, "score": 252221.59356238323 }, { "content": "pub fn json_root(json_value: JsonValue) -> JsonRoot {\n\n JsonRoot::unwrap_cast(SyntaxNode::new_detached(\n\n JsonSyntaxKind::JSON_ROOT,\n\n [Some(SyntaxElement::Node(json_value.into_syntax()))],\n\n ))\n\n}\n", "file_path": "crates/rome_json_factory/src/generated/node_factory.rs", "rank": 8, "score": 252046.42088423134 }, { "content": "pub fn benchmark_analyze_lib(id: &str, root: &JsAnyRoot) -> BenchmarkSummary {\n\n let analyzer_timer = timing::start();\n\n run_analyzer(root);\n\n let analyzer_duration = analyzer_timer.stop();\n\n\n\n BenchmarkSummary::Analyzer(AnalyzerMeasurement {\n\n id: id.to_string(),\n\n analysis: analyzer_duration,\n\n })\n\n}\n\n\n", "file_path": "xtask/bench/src/features/analyzer.rs", "rank": 9, "score": 250300.19737734343 }, { "content": "/// Run the analyzer on the provided `root`: this process will use the given `filter`\n\n/// to selectively restrict analysis to specific rules / a specific source range,\n\n/// then call the `callback` when an analysis rule emits a diagnostic or action\n\npub fn analyze<B>(file_id: FileId, root: &JsAnyRoot, filter: AnalysisFilter, mut callback: B)\n\nwhere\n\n B: FnMut(&dyn AnalyzerSignal),\n\n{\n\n let registry = RuleRegistry::with_filter(&filter);\n\n\n\n let mut iter = root.syntax().preorder();\n\n while let Some(event) = iter.next() {\n\n let node = match event {\n\n WalkEvent::Enter(node) => node,\n\n WalkEvent::Leave(_) => continue,\n\n };\n\n\n\n if let Some(range) = filter.range {\n\n if node.text_range().ordering(range).is_ne() {\n\n iter.skip_subtree();\n\n continue;\n\n }\n\n }\n\n\n", "file_path": "crates/rome_analyze/src/lib.rs", "rank": 10, "score": 249050.9393570564 }, { "content": "/// Returns true if this node has a suppression comment of the provided category\n\npub fn has_suppressions_category(category: SuppressionCategory, node: &JsSyntaxNode) -> bool {\n\n // Lists cannot have a suppression comment attached, it must\n\n // belong to either the entire parent node or one of the children\n\n let kind = node.kind();\n\n if JsAnyRoot::can_cast(kind) || kind.is_list() {\n\n return false;\n\n }\n\n\n\n let first_token = match node.first_token() {\n\n Some(token) => token,\n\n None => return false,\n\n };\n\n\n\n first_token\n\n .leading_trivia()\n\n .pieces()\n\n .filter_map(|trivia| trivia.as_comments())\n\n .any(|comment| {\n\n parse_suppression_comment(comment.text())\n\n .flat_map(|suppression| suppression.categories)\n", "file_path": "crates/rome_js_syntax/src/suppression.rs", "rank": 11, "score": 248556.98845613183 }, { "content": "struct ExpectedTokens(String);\n\n\n\nimpl ToDiagnostic for ExpectedTokens {\n\n fn to_diagnostic(self, p: &Parser) -> Diagnostic {\n\n match p.cur() {\n\n JsSyntaxKind::EOF => p\n\n .err_builder(&format!(\"expected {} but instead the file ends\", self.0))\n\n .primary(p.cur_range(), \"the file ends here\"),\n\n _ => p\n\n .err_builder(&format!(\n\n \"expected {} but instead found `{}`\",\n\n self.0,\n\n p.cur_src()\n\n ))\n\n .primary(p.cur_range(), \"unexpected\"),\n\n }\n\n }\n\n}\n", "file_path": "crates/rome_js_parser/src/parser/parse_error.rs", "rank": 12, "score": 244460.757403002 }, { "content": "/// Extracts [SemanticEvent] from [SyntaxNode].\n\n///\n\n/// For a way to extract [SemanticEvent] which gives more control see [SemanticEventExtractor].\n\n///\n\n/// ```rust\n\n/// use rome_js_parser::*;\n\n/// use rome_js_syntax::*;\n\n/// use rome_js_semantic::*;\n\n/// let tree = parse(\"let a = 1\", 0, SourceType::js_script());\n\n/// for e in semantic_events(tree.syntax()) {\n\n/// dbg!(e);\n\n/// }\n\n/// ```\n\npub fn semantic_events(root: JsSyntaxNode) -> impl IntoIterator<Item = SemanticEvent> {\n\n SemanticEventIterator {\n\n iter: root.preorder(),\n\n extractor: SemanticEventExtractor::default(),\n\n }\n\n}\n", "file_path": "crates/rome_js_semantic/src/events.rs", "rank": 13, "score": 240944.7079433269 }, { "content": "fn assert_errors_are_present(program: &Parse<JsAnyRoot>, path: &Path) {\n\n assert!(\n\n !program.diagnostics().is_empty(),\n\n \"There should be errors in the file {:?}\\nSyntax Tree: {:#?}\",\n\n path.display(),\n\n program.syntax()\n\n );\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 14, "score": 238754.28980301094 }, { "content": "pub fn project_root() -> PathBuf {\n\n Path::new(\n\n &env::var(\"CARGO_MANIFEST_DIR\").unwrap_or_else(|_| env!(\"CARGO_MANIFEST_DIR\").to_owned()),\n\n )\n\n .ancestors()\n\n .nth(2)\n\n .unwrap()\n\n .to_path_buf()\n\n}\n\n\n", "file_path": "xtask/src/lib.rs", "rank": 15, "score": 238672.43132215156 }, { "content": "struct ExpectedToken(&'static str);\n\n\n\nimpl ToDiagnostic for ExpectedToken {\n\n fn to_diagnostic(self, p: &Parser) -> Diagnostic {\n\n match p.cur() {\n\n JsSyntaxKind::EOF => p\n\n .err_builder(&format!(\"expected `{}` but instead the file ends\", self.0))\n\n .primary(p.cur_range(), \"the file ends here\"),\n\n _ => p\n\n .err_builder(&format!(\n\n \"expected `{}` but instead found `{}`\",\n\n self.0,\n\n p.cur_src()\n\n ))\n\n .primary(p.cur_range(), \"unexpected\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/parser/parse_error.rs", "rank": 16, "score": 236191.70486375337 }, { "content": "pub fn run_format(root: &JsSyntaxNode) -> Printed {\n\n format_node(JsFormatContext::default(), root)\n\n .unwrap()\n\n .print()\n\n}\n\n\n\nimpl FormatterMeasurement {\n\n fn total(&self) -> Duration {\n\n self.formatting\n\n }\n\n\n\n pub(crate) fn summary(&self) -> String {\n\n format!(\"{}, Formatting: {:?}\", self.id, self.total(),)\n\n }\n\n}\n\n\n\nimpl Display for FormatterMeasurement {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let _ = writeln!(f, \"\\tFormatting: {:>10?}\", self.formatting);\n\n let _ = writeln!(f, \"\\t ----------\");\n\n let _ = writeln!(f, \"\\tTotal: {:>10?}\", self.total());\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "xtask/bench/src/features/formatter.rs", "rank": 17, "score": 235504.87962611578 }, { "content": "/// Formats a JavaScript (and its super languages) file based on its features.\n\n///\n\n/// It returns a [Formatted] result, which the user can use to override a file.\n\npub fn format_node(context: JsFormatContext, root: &JsSyntaxNode) -> FormatResult<Formatted> {\n\n rome_formatter::format_node(context, &root.format())\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 18, "score": 235452.27112188278 }, { "content": "/// Returns a clone of `token` with the leading trivia trimmed at the first\n\n/// newline piece (included), starting from the token itself and iterating backward\n\n///\n\n/// # Example\n\n/// ```\n\n/// # use rome_rowan::TriviaPiece;\n\n/// # use rome_js_syntax::{JsSyntaxToken, T};\n\n/// # use rome_js_factory::make::clone_token_up_to_first_newline;\n\n/// let input = JsSyntaxToken::new_detached(\n\n/// T![let],\n\n/// \"\\n // Comment\\n let \",\n\n/// [TriviaPiece::newline(1), TriviaPiece::whitespace(2), TriviaPiece::single_line_comment(10), TriviaPiece::newline(1), TriviaPiece::whitespace(2)],\n\n/// [TriviaPiece::whitespace(1)],\n\n/// );\n\n///\n\n/// let output = clone_token_up_to_first_newline(&input);\n\n///\n\n/// let expected = JsSyntaxToken::new_detached(\n\n/// T![let],\n\n/// \"\\n let \",\n\n/// [TriviaPiece::newline(1), TriviaPiece::whitespace(2)],\n\n/// [TriviaPiece::whitespace(1)],\n\n/// );\n\n///\n\n/// assert_eq!(output.text(), expected.text());\n\n/// ```\n\npub fn clone_token_up_to_first_newline(token: &JsSyntaxToken) -> JsSyntaxToken {\n\n let leading_trivia = token.leading_trivia().pieces();\n\n let num_pieces = leading_trivia.len();\n\n let skip_count = leading_trivia\n\n .rev()\n\n .position(|piece| piece.is_newline())\n\n .and_then(|index| num_pieces.checked_sub(index + 1))\n\n .unwrap_or(0);\n\n\n\n let mut text = String::new();\n\n\n\n for piece in token.leading_trivia().pieces().skip(skip_count) {\n\n text.push_str(piece.text());\n\n }\n\n\n\n text.push_str(token.text_trimmed());\n\n\n\n for piece in token.trailing_trivia().pieces() {\n\n text.push_str(piece.text());\n\n }\n", "file_path": "crates/rome_js_factory/src/make.rs", "rank": 19, "score": 234045.3114925393 }, { "content": "/// A template literal is simple when:\n\n///\n\n/// - all strings dont contain newlines\n\n/// - the expressions contained in the template are considered as `is_simple_call_argument`. Check\n\n/// [is_simple_call_argument].\n\npub fn is_simple_template_literal(template: &JsTemplate, depth: u8) -> SyntaxResult<bool> {\n\n for element in template.elements() {\n\n match element {\n\n JsAnyTemplateElement::JsTemplateChunkElement(chunk) => {\n\n if chunk.template_chunk_token()?.text_trimmed().contains('\\n') {\n\n return Ok(false);\n\n }\n\n }\n\n JsAnyTemplateElement::JsTemplateElement(element) => {\n\n let expression = element.expression()?;\n\n if !(SimpleArgument::from(expression).is_simple(depth)) {\n\n return Ok(false);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(true)\n\n}\n", "file_path": "crates/rome_js_formatter/src/utils/member_chain/simple_argument.rs", "rank": 20, "score": 230241.65744795193 }, { "content": "/// Formats a syntax node file based on its features.\n\n///\n\n/// It returns a [Formatted] result, which the user can use to override a file.\n\npub fn format_node<\n\n Context: FormatContext,\n\n L: Language,\n\n N: FormatWithRule<Context, Item = SyntaxNode<L>>,\n\n>(\n\n context: Context,\n\n root: &N,\n\n) -> FormatResult<Formatted> {\n\n tracing::trace_span!(\"format_node\").in_scope(move || {\n\n let print_options = context.as_print_options();\n\n let mut state = FormatState::new(context);\n\n let mut buffer = VecBuffer::new(&mut state);\n\n\n\n write!(&mut buffer, [root])?;\n\n\n\n let document = buffer.into_element();\n\n\n\n state.assert_formatted_all_tokens(root.item());\n\n\n\n Ok(Formatted::new(document, print_options))\n\n })\n\n}\n\n\n", "file_path": "crates/rome_formatter/src/lib.rs", "rank": 21, "score": 230109.2736945594 }, { "content": "/// Initializes metrics recording\n\npub fn init_metrics() {\n\n // Create and injects the metrics recording layer with the tracing library\n\n tracing_subscriber::registry().with(MetricsLayer).init();\n\n}\n\n\n", "file_path": "crates/rome_cli/src/metrics.rs", "rank": 22, "score": 230104.06971524633 }, { "content": "/// Flush and print the recorded metrics to the console\n\npub fn print_metrics() {\n\n let mut histograms: Vec<_> = METRICS\n\n .write()\n\n .drain()\n\n .flat_map(|(key, entry)| entry.into_inner().into_histograms(key.0.name()))\n\n .collect();\n\n\n\n histograms.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));\n\n\n\n for (key, histogram) in histograms {\n\n // Print the header line for the histogram with its name, mean sample\n\n // duration and standard deviation\n\n println!(\n\n \"{}: mean = {:.1?}, stdev = {:.1?}\",\n\n key,\n\n Duration::from_nanos(histogram.mean().round() as u64),\n\n Duration::from_nanos(histogram.stdev().round() as u64),\n\n );\n\n\n\n // For each quantile bucket in the histogram print out the associated\n", "file_path": "crates/rome_cli/src/metrics.rs", "rank": 23, "score": 230104.06971524633 }, { "content": "#[test]\n\npub fn node_has_comments() {\n\n let text = r\"true &&\n\n// comment\n\n(3 - 2 == 0)\";\n\n let root = parse_module(text, 0);\n\n let syntax = root.syntax();\n\n let node = syntax\n\n .descendants()\n\n .find(|n| n.kind() == JsSyntaxKind::JS_LOGICAL_EXPRESSION)\n\n .unwrap();\n\n\n\n let logical_expression = JsLogicalExpression::cast(node).unwrap();\n\n let right = logical_expression.right().unwrap();\n\n\n\n assert!(right.syntax().has_comments_direct());\n\n}\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 25, "score": 227291.34446046222 }, { "content": "#[test]\n\npub fn jsroot_ranges() {\n\n // 0123456789A\n\n let code = \" let a = 1;\";\n\n let root = parse_module(code, 0);\n\n let syntax = root.syntax();\n\n\n\n let first_let = syntax.first_token().unwrap();\n\n let range = first_let.text_range();\n\n assert_eq!(0usize, range.start().into());\n\n assert_eq!(5usize, range.end().into());\n\n\n\n let range = first_let.text_trimmed_range();\n\n assert_eq!(1usize, range.start().into());\n\n assert_eq!(4usize, range.end().into());\n\n\n\n let eq = syntax\n\n .descendants_tokens()\n\n .find(|x| x.text_trimmed() == \"=\")\n\n .unwrap();\n\n let range = eq.text_range();\n\n assert_eq!(7usize, range.start().into());\n\n assert_eq!(9usize, range.end().into());\n\n\n\n let range = eq.text_trimmed_range();\n\n assert_eq!(7usize, range.start().into());\n\n assert_eq!(8usize, range.end().into());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 27, "score": 227291.34446046222 }, { "content": "/// Formats a range within a file, supported by Rome\n\n///\n\n/// This runs a simple heuristic to determine the initial indentation\n\n/// level of the node based on the provided [FormatContext], which\n\n/// must match currently the current initial of the file. Additionally,\n\n/// because the reformatting happens only locally the resulting code\n\n/// will be indented with the same level as the original selection,\n\n/// even if it's a mismatch from the rest of the block the selection is in\n\n///\n\n/// It returns a [Formatted] result with a range corresponding to the\n\n/// range of the input that was effectively overwritten by the formatter\n\npub fn format_range(\n\n context: JsFormatContext,\n\n root: &JsSyntaxNode,\n\n range: TextRange,\n\n) -> FormatResult<Printed> {\n\n rome_formatter::format_range::<_, _, FormatJsSyntaxNode, _>(\n\n context,\n\n root,\n\n range,\n\n is_range_formatting_root,\n\n )\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 28, "score": 227291.34446046222 }, { "content": "#[test]\n\npub fn node_contains_comments() {\n\n let text = \"true && true // comment\";\n\n let root = parse_module(text, 0);\n\n let syntax = root.syntax();\n\n\n\n assert!(syntax.has_comments_descendants());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 29, "score": 224575.8624499974 }, { "content": "/// Formats the trailing trivia (comments) of a token\n\npub fn format_trailing_trivia(\n\n token: &JsSyntaxToken,\n\n) -> FormatTrailingTriviaPieces<SyntaxTriviaPiecesIterator<JsLanguage>> {\n\n FormatTrailingTriviaPieces {\n\n pieces: token.trailing_trivia().pieces(),\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct FormatTrailingTriviaPieces<I> {\n\n pieces: I,\n\n}\n\n\n\nimpl<I> Format<JsFormatContext> for FormatTrailingTriviaPieces<I>\n\nwhere\n\n I: Iterator<Item = SyntaxTriviaPiece<JsLanguage>> + Clone,\n\n{\n\n fn fmt(&self, f: &mut JsFormatter) -> FormatResult<()> {\n\n let pieces = self.pieces.clone();\n\n\n", "file_path": "crates/rome_js_formatter/src/builders.rs", "rank": 30, "score": 224575.8624499974 }, { "content": "/// Utility function to use to format ternary operators\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if it's used with nodes that are different from:\n\n/// - [rome_js_syntax::TsConditionalType]\n\n/// - [rome_js_syntax::JsConditionalExpression]\n\npub fn format_conditional(\n\n conditional: &Conditional,\n\n f: &mut JsFormatter,\n\n parent_is_conditional: bool,\n\n) -> FormatResult<()> {\n\n conditional.format_head(f)?;\n\n conditional.format_body(f, parent_is_conditional)\n\n}\n", "file_path": "crates/rome_js_formatter/src/utils/format_conditional.rs", "rank": 31, "score": 221964.7268266963 }, { "content": "pub fn js_parameters(\n\n l_paren_token: SyntaxToken,\n\n items: JsParameterList,\n\n r_paren_token: SyntaxToken,\n\n) -> JsParameters {\n\n JsParameters::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_PARAMETERS,\n\n [\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(items.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 32, "score": 221952.66644746676 }, { "content": "pub fn js_for_statement(\n\n for_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n first_semi_token: SyntaxToken,\n\n second_semi_token: SyntaxToken,\n\n r_paren_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n) -> JsForStatementBuilder {\n\n JsForStatementBuilder {\n\n for_token,\n\n l_paren_token,\n\n first_semi_token,\n\n second_semi_token,\n\n r_paren_token,\n\n body,\n\n initializer: None,\n\n test: None,\n\n update: None,\n\n }\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 33, "score": 221952.66644746676 }, { "content": "pub fn jsx_fragment(\n\n opening_fragment: JsxOpeningFragment,\n\n children: JsxChildList,\n\n closing_fragment: JsxClosingFragment,\n\n) -> JsxFragment {\n\n JsxFragment::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JSX_FRAGMENT,\n\n [\n\n Some(SyntaxElement::Node(opening_fragment.into_syntax())),\n\n Some(SyntaxElement::Node(children.into_syntax())),\n\n Some(SyntaxElement::Node(closing_fragment.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 34, "score": 221952.66644746676 }, { "content": "pub fn ts_as_expression(\n\n expression: JsAnyExpression,\n\n as_token: SyntaxToken,\n\n ty: TsType,\n\n) -> TsAsExpression {\n\n TsAsExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::TS_AS_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Node(expression.into_syntax())),\n\n Some(SyntaxElement::Token(as_token)),\n\n Some(SyntaxElement::Node(ty.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 35, "score": 221952.66644746676 }, { "content": "pub fn js_module(\n\n directives: JsDirectiveList,\n\n items: JsModuleItemList,\n\n eof_token: SyntaxToken,\n\n) -> JsModuleBuilder {\n\n JsModuleBuilder {\n\n directives,\n\n items,\n\n eof_token,\n\n interpreter_token: None,\n\n }\n\n}\n\npub struct JsModuleBuilder {\n\n directives: JsDirectiveList,\n\n items: JsModuleItemList,\n\n eof_token: SyntaxToken,\n\n interpreter_token: Option<SyntaxToken>,\n\n}\n\nimpl JsModuleBuilder {\n\n pub fn with_interpreter_token(mut self, interpreter_token: SyntaxToken) -> Self {\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 36, "score": 221952.66644746676 }, { "content": "pub fn css_block(\n\n l_curly_token: SyntaxToken,\n\n declaration_list: CssDeclarationList,\n\n r_curly_token: SyntaxToken,\n\n) -> CssBlock {\n\n CssBlock::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_BLOCK,\n\n [\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(declaration_list.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 37, "score": 221952.66644746676 }, { "content": "pub fn jsx_element(\n\n opening_element: JsxOpeningElement,\n\n children: JsxChildList,\n\n closing_element: JsxClosingElement,\n\n) -> JsxElement {\n\n JsxElement::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JSX_ELEMENT,\n\n [\n\n Some(SyntaxElement::Node(opening_element.into_syntax())),\n\n Some(SyntaxElement::Node(children.into_syntax())),\n\n Some(SyntaxElement::Node(closing_element.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 38, "score": 221952.66644746676 }, { "content": "pub fn js_script(\n\n directives: JsDirectiveList,\n\n statements: JsStatementList,\n\n eof_token: SyntaxToken,\n\n) -> JsScriptBuilder {\n\n JsScriptBuilder {\n\n directives,\n\n statements,\n\n eof_token,\n\n interpreter_token: None,\n\n }\n\n}\n\npub struct JsScriptBuilder {\n\n directives: JsDirectiveList,\n\n statements: JsStatementList,\n\n eof_token: SyntaxToken,\n\n interpreter_token: Option<SyntaxToken>,\n\n}\n\nimpl JsScriptBuilder {\n\n pub fn with_interpreter_token(mut self, interpreter_token: SyntaxToken) -> Self {\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 39, "score": 221952.66644746676 }, { "content": "pub fn js_in_expression(\n\n property: JsAnyInProperty,\n\n in_token: SyntaxToken,\n\n object: JsAnyExpression,\n\n) -> JsInExpression {\n\n JsInExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_IN_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Node(property.into_syntax())),\n\n Some(SyntaxElement::Token(in_token)),\n\n Some(SyntaxElement::Node(object.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 40, "score": 221952.66644746676 }, { "content": "pub fn js_for_in_statement(\n\n for_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n initializer: JsAnyForInOrOfInitializer,\n\n in_token: SyntaxToken,\n\n expression: JsAnyExpression,\n\n r_paren_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n) -> JsForInStatement {\n\n JsForInStatement::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_FOR_IN_STATEMENT,\n\n [\n\n Some(SyntaxElement::Token(for_token)),\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(initializer.into_syntax())),\n\n Some(SyntaxElement::Token(in_token)),\n\n Some(SyntaxElement::Node(expression.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n Some(SyntaxElement::Node(body.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 41, "score": 221952.66644746676 }, { "content": "pub fn js_template(\n\n l_tick_token: SyntaxToken,\n\n elements: JsTemplateElementList,\n\n r_tick_token: SyntaxToken,\n\n) -> JsTemplateBuilder {\n\n JsTemplateBuilder {\n\n l_tick_token,\n\n elements,\n\n r_tick_token,\n\n tag: None,\n\n type_arguments: None,\n\n }\n\n}\n\npub struct JsTemplateBuilder {\n\n l_tick_token: SyntaxToken,\n\n elements: JsTemplateElementList,\n\n r_tick_token: SyntaxToken,\n\n tag: Option<JsAnyExpression>,\n\n type_arguments: Option<TsTypeArguments>,\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 42, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn ok_scope_blocks() {\n\n assert(\"if (true) {/*START A*/ }/*END A*/\");\n\n assert(\"function f() {/*START A*/ }/*END A*/\");\n\n assert(\"for (const a of []) {/*START A*/ }/*END A*/\");\n\n assert(\"for (const a in []) {/*START A*/ }/*END A*/\");\n\n assert(\"() => {/*START A*/ }/*END A*/\");\n\n\n\n assert(\"class A { constructor () {/*START A*/ }/*END A*/ }\");\n\n assert(\"class A { get name() {/*START A*/ }/*END A*/ }\");\n\n assert(\"class A { set name(v) {/*START A*/ }/*END A*/ }\");\n\n\n\n assert(\"try {/*START A*/ }/*END A*/ catch(e) {/*START B*/ }/*END B*/ finally {/*START C*/ }/*END C*/\");\n\n}\n\n\n", "file_path": "crates/rome_js_semantic/src/tests/scopes.rs", "rank": 43, "score": 221952.66644746676 }, { "content": "pub fn js_while_statement(\n\n while_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n test: JsAnyExpression,\n\n r_paren_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n) -> JsWhileStatement {\n\n JsWhileStatement::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_WHILE_STATEMENT,\n\n [\n\n Some(SyntaxElement::Token(while_token)),\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(test.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n Some(SyntaxElement::Node(body.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 44, "score": 221952.66644746676 }, { "content": "pub fn js_do_while_statement(\n\n do_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n while_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n test: JsAnyExpression,\n\n r_paren_token: SyntaxToken,\n\n) -> JsDoWhileStatementBuilder {\n\n JsDoWhileStatementBuilder {\n\n do_token,\n\n body,\n\n while_token,\n\n l_paren_token,\n\n test,\n\n r_paren_token,\n\n semicolon_token: None,\n\n }\n\n}\n\npub struct JsDoWhileStatementBuilder {\n\n do_token: SyntaxToken,\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 45, "score": 221952.66644746676 }, { "content": "pub fn new_target(\n\n new_token: SyntaxToken,\n\n dot_token: SyntaxToken,\n\n target_token: SyntaxToken,\n\n) -> NewTarget {\n\n NewTarget::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::NEW_TARGET,\n\n [\n\n Some(SyntaxElement::Token(new_token)),\n\n Some(SyntaxElement::Token(dot_token)),\n\n Some(SyntaxElement::Token(target_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 46, "score": 221952.66644746676 }, { "content": "pub fn ts_as_assignment(\n\n assignment: JsAnyAssignment,\n\n as_token: SyntaxToken,\n\n ty: TsType,\n\n) -> TsAsAssignment {\n\n TsAsAssignment::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::TS_AS_ASSIGNMENT,\n\n [\n\n Some(SyntaxElement::Node(assignment.into_syntax())),\n\n Some(SyntaxElement::Token(as_token)),\n\n Some(SyntaxElement::Node(ty.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 47, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn jsroot_display_text_and_trimmed() {\n\n let code = \" let a = 1; \\n \";\n\n let root = parse_module(code, 0);\n\n let syntax = root.syntax();\n\n\n\n assert_eq!(format!(\"{}\", syntax), code);\n\n\n\n let syntax_text = syntax.text();\n\n assert_eq!(format!(\"{}\", syntax_text), code);\n\n\n\n let syntax_text = syntax.text_trimmed();\n\n assert_eq!(format!(\"{}\", syntax_text), code.trim());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 48, "score": 221952.66644746676 }, { "content": "pub fn css_at_media(\n\n at_token: SyntaxToken,\n\n media_token: SyntaxToken,\n\n query_list: CssAtMediaQueryList,\n\n l_curly_token: SyntaxToken,\n\n body: CssAnyRule,\n\n r_curly_token: SyntaxToken,\n\n) -> CssAtMedia {\n\n CssAtMedia::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_AT_MEDIA,\n\n [\n\n Some(SyntaxElement::Token(at_token)),\n\n Some(SyntaxElement::Token(media_token)),\n\n Some(SyntaxElement::Node(query_list.into_syntax())),\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(body.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 49, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn node_contains_trailing_comments() {\n\n let text = \"true && (3 - 2 == 0) // comment\";\n\n let root = parse_module(text, 0);\n\n let syntax = root.syntax();\n\n let node = syntax\n\n .descendants()\n\n .find(|n| n.kind() == JsSyntaxKind::JS_LOGICAL_EXPRESSION)\n\n .unwrap();\n\n\n\n let logical_expression = JsLogicalExpression::cast(node).unwrap();\n\n let right = logical_expression.right().unwrap();\n\n\n\n assert!(right.syntax().has_trailing_comments());\n\n assert!(!right.syntax().has_leading_comments());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 50, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn node_contains_leading_comments() {\n\n let text = r\"true &&\n\n// comment\n\n(3 - 2 == 0)\";\n\n let root = parse_module(text, 0);\n\n let syntax = root.syntax();\n\n let node = syntax\n\n .descendants()\n\n .find(|n| n.kind() == JsSyntaxKind::JS_LOGICAL_EXPRESSION)\n\n .unwrap();\n\n\n\n let logical_expression = JsLogicalExpression::cast(node).unwrap();\n\n let right = logical_expression.right().unwrap();\n\n\n\n assert!(right.syntax().has_leading_comments());\n\n assert!(!right.syntax().has_trailing_comments());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 51, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn ok_variable_declaration() {\n\n assert(\n\n r#\"import a/*#A*/ from 'a';\n\nlet a/*#B*/ = 1;\n", "file_path": "crates/rome_js_semantic/src/tests/events.rs", "rank": 52, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn node_range_must_be_correct() {\n\n // 0123456789A123456789B123456789\n\n let text = \" function foo() { let a = 1; }\";\n\n let root = parse_module(text, 0);\n\n\n\n let var_decl = root\n\n .syntax()\n\n .descendants()\n\n .find(|x| x.kind() == JsSyntaxKind::JS_VARIABLE_STATEMENT)\n\n .unwrap();\n\n\n\n let range = var_decl.text_range();\n\n assert_eq!(18usize, range.start().into());\n\n assert_eq!(29usize, range.end().into());\n\n\n\n let range = var_decl.text_trimmed_range();\n\n assert_eq!(18usize, range.start().into());\n\n assert_eq!(28usize, range.end().into());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 53, "score": 221952.66644746676 }, { "content": "pub fn js_with_statement(\n\n with_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n object: JsAnyExpression,\n\n r_paren_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n) -> JsWithStatement {\n\n JsWithStatement::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_WITH_STATEMENT,\n\n [\n\n Some(SyntaxElement::Token(with_token)),\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(object.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n Some(SyntaxElement::Node(body.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 54, "score": 221952.66644746676 }, { "content": "pub fn js_if_statement(\n\n if_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n test: JsAnyExpression,\n\n r_paren_token: SyntaxToken,\n\n consequent: JsAnyStatement,\n\n) -> JsIfStatementBuilder {\n\n JsIfStatementBuilder {\n\n if_token,\n\n l_paren_token,\n\n test,\n\n r_paren_token,\n\n consequent,\n\n else_clause: None,\n\n }\n\n}\n\npub struct JsIfStatementBuilder {\n\n if_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n test: JsAnyExpression,\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 55, "score": 221952.66644746676 }, { "content": "pub fn js_for_of_statement(\n\n for_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n initializer: JsAnyForInOrOfInitializer,\n\n of_token: SyntaxToken,\n\n expression: JsAnyExpression,\n\n r_paren_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n) -> JsForOfStatementBuilder {\n\n JsForOfStatementBuilder {\n\n for_token,\n\n l_paren_token,\n\n initializer,\n\n of_token,\n\n expression,\n\n r_paren_token,\n\n body,\n\n await_token: None,\n\n }\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 56, "score": 221952.66644746676 }, { "content": "pub fn css_at_keyframes(\n\n at_token: SyntaxToken,\n\n keyframes_token: SyntaxToken,\n\n name: CssIdentifier,\n\n css_string: CssString,\n\n body: CssAtKeyframesBody,\n\n) -> CssAtKeyframes {\n\n CssAtKeyframes::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_AT_KEYFRAMES,\n\n [\n\n Some(SyntaxElement::Token(at_token)),\n\n Some(SyntaxElement::Token(keyframes_token)),\n\n Some(SyntaxElement::Node(name.into_syntax())),\n\n Some(SyntaxElement::Node(css_string.into_syntax())),\n\n Some(SyntaxElement::Node(body.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 57, "score": 221952.66644746676 }, { "content": "pub fn import_meta(\n\n import_token: SyntaxToken,\n\n dot_token: SyntaxToken,\n\n meta_token: SyntaxToken,\n\n) -> ImportMeta {\n\n ImportMeta::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::IMPORT_META,\n\n [\n\n Some(SyntaxElement::Token(import_token)),\n\n Some(SyntaxElement::Token(dot_token)),\n\n Some(SyntaxElement::Token(meta_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 58, "score": 221952.66644746676 }, { "content": "pub fn css_attribute(\n\n l_brack_token: SyntaxToken,\n\n attribute_name: CssAttributeName,\n\n r_brack_token: SyntaxToken,\n\n) -> CssAttributeBuilder {\n\n CssAttributeBuilder {\n\n l_brack_token,\n\n attribute_name,\n\n r_brack_token,\n\n attribute_meta: None,\n\n }\n\n}\n\npub struct CssAttributeBuilder {\n\n l_brack_token: SyntaxToken,\n\n attribute_name: CssAttributeName,\n\n r_brack_token: SyntaxToken,\n\n attribute_meta: Option<CssAttributeMeta>,\n\n}\n\nimpl CssAttributeBuilder {\n\n pub fn with_attribute_meta(mut self, attribute_meta: CssAttributeMeta) -> Self {\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 59, "score": 221952.66644746676 }, { "content": "#[test]\n\npub fn test_trivia_attached_to_tokens() {\n\n let text = \"/**/let a = 1; // nice variable \\n /*hey*/ let \\t b = 2; // another nice variable\";\n\n let m = parse_module(text, 0);\n\n let mut tokens = m.syntax().descendants_tokens();\n\n\n\n let is_let = |x: &JsSyntaxToken| x.text_trimmed() == \"let\";\n\n let first_let = tokens.find(is_let).unwrap();\n\n\n\n // first let leading trivia asserts\n\n let pieces: Vec<_> = first_let.leading_trivia().pieces().collect();\n\n assert!(matches!(pieces.get(0).map(|x| x.text()), Some(\"/**/\")));\n\n assert!(matches!(pieces.get(1), None));\n\n\n\n // first let trailing trivia asserts\n\n let pieces: Vec<_> = first_let.trailing_trivia().pieces().collect();\n\n assert!(matches!(pieces.get(0).map(|x| x.text()), Some(\" \")));\n\n assert!(matches!(pieces.get(1), None));\n\n\n\n // second let leading trivia asserts\n\n let second_let = tokens.find(is_let).unwrap();\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 60, "score": 221952.66644746676 }, { "content": "pub fn css_declaration(\n\n name: CssIdentifier,\n\n css_custom_property: CssCustomProperty,\n\n colon_token: SyntaxToken,\n\n value: CssAnyValue,\n\n) -> CssDeclarationBuilder {\n\n CssDeclarationBuilder {\n\n name,\n\n css_custom_property,\n\n colon_token,\n\n value,\n\n important: None,\n\n }\n\n}\n\npub struct CssDeclarationBuilder {\n\n name: CssIdentifier,\n\n css_custom_property: CssCustomProperty,\n\n colon_token: SyntaxToken,\n\n value: CssAnyValue,\n\n important: Option<CssDeclarationImportant>,\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 61, "score": 221952.66644746676 }, { "content": "/// The `write` function takes a target buffer and an `Arguments` struct that can be precompiled with the `format_args!` macro.\n\n///\n\n/// The arguments will be formatted in-order into the output buffer provided.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rome_formatter::prelude::*;\n\n/// use rome_formatter::{VecBuffer, format_args, FormatState, write, Formatted};\n\n///\n\n/// let mut state = FormatState::new(SimpleFormatContext::default());\n\n/// let mut buffer = VecBuffer::new(&mut state);\n\n///\n\n/// write(&mut buffer, format_args!(token(\"Hello World\"))).unwrap();\n\n///\n\n/// let formatted = Formatted::new(buffer.into_element(), PrinterOptions::default());\n\n///\n\n/// assert_eq!(\"Hello World\", formatted.print().as_code())\n\n/// ```\n\n///\n\n/// Please note that using [`write!`] might be preferable. Example:\n\n///\n\n/// ```\n\n/// use rome_formatter::prelude::*;\n\n/// use rome_formatter::{VecBuffer, format_args, FormatState, write, Formatted};\n\n///\n\n/// let mut state = FormatState::new(SimpleFormatContext::default());\n\n/// let mut buffer = VecBuffer::new(&mut state);\n\n///\n\n/// write!(&mut buffer, [token(\"Hello World\")]).unwrap();\n\n///\n\n/// let formatted = Formatted::new(buffer.into_element(), PrinterOptions::default());\n\n///\n\n/// assert_eq!(\"Hello World\", formatted.print().as_code())\n\n/// ```\n\n///\n\npub fn write<Context>(\n\n output: &mut dyn Buffer<Context = Context>,\n\n args: Arguments<Context>,\n\n) -> FormatResult<()> {\n\n let mut f = Formatter::new(output);\n\n\n\n f.write_fmt(args)\n\n}\n\n\n", "file_path": "crates/rome_formatter/src/lib.rs", "rank": 62, "score": 220679.78541008092 }, { "content": "pub fn js_export_as_clause(\n\n as_token: SyntaxToken,\n\n exported_name: JsLiteralExportName,\n\n) -> JsExportAsClause {\n\n JsExportAsClause::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_EXPORT_AS_CLAUSE,\n\n [\n\n Some(SyntaxElement::Token(as_token)),\n\n Some(SyntaxElement::Node(exported_name.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 63, "score": 219417.13053123956 }, { "content": "pub fn js_function_declaration(\n\n function_token: SyntaxToken,\n\n id: JsAnyBinding,\n\n parameters: JsParameters,\n\n body: JsFunctionBody,\n\n) -> JsFunctionDeclarationBuilder {\n\n JsFunctionDeclarationBuilder {\n\n function_token,\n\n id,\n\n parameters,\n\n body,\n\n async_token: None,\n\n star_token: None,\n\n type_parameters: None,\n\n return_type_annotation: None,\n\n }\n\n}\n\npub struct JsFunctionDeclarationBuilder {\n\n function_token: SyntaxToken,\n\n id: JsAnyBinding,\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 64, "score": 219417.13053123956 }, { "content": "#[test]\n\npub fn just_trivia_must_be_appended_to_eof() {\n\n // 0123456789A123456789B123456789C123\n\n let text = \"// just trivia... nothing else....\";\n\n let root = parse_module(text, 0);\n\n let syntax = root.syntax();\n\n\n\n let range = syntax.text_range();\n\n let start = range.start();\n\n let end = range.end();\n\n\n\n assert_eq!(TextSize::from(0), start);\n\n assert_eq!(TextSize::from(34), end);\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 65, "score": 219417.13053123956 }, { "content": "pub fn js_await_expression(\n\n await_token: SyntaxToken,\n\n argument: JsAnyExpression,\n\n) -> JsAwaitExpression {\n\n JsAwaitExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_AWAIT_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Token(await_token)),\n\n Some(SyntaxElement::Node(argument.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 66, "score": 219417.13053123956 }, { "content": "pub fn js_initializer_clause(\n\n eq_token: SyntaxToken,\n\n expression: JsAnyExpression,\n\n) -> JsInitializerClause {\n\n JsInitializerClause::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_INITIALIZER_CLAUSE,\n\n [\n\n Some(SyntaxElement::Token(eq_token)),\n\n Some(SyntaxElement::Node(expression.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 67, "score": 219417.13053123956 }, { "content": "pub fn js_block_statement(\n\n l_curly_token: SyntaxToken,\n\n statements: JsStatementList,\n\n r_curly_token: SyntaxToken,\n\n) -> JsBlockStatement {\n\n JsBlockStatement::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_BLOCK_STATEMENT,\n\n [\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(statements.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 68, "score": 219417.13053123956 }, { "content": "pub fn js_array_expression(\n\n l_brack_token: SyntaxToken,\n\n elements: JsArrayElementList,\n\n r_brack_token: SyntaxToken,\n\n) -> JsArrayExpression {\n\n JsArrayExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_ARRAY_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Token(l_brack_token)),\n\n Some(SyntaxElement::Node(elements.into_syntax())),\n\n Some(SyntaxElement::Token(r_brack_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 69, "score": 219417.13053123956 }, { "content": "pub fn js_extends_clause(\n\n extends_token: SyntaxToken,\n\n super_class: JsAnyExpression,\n\n) -> JsExtendsClauseBuilder {\n\n JsExtendsClauseBuilder {\n\n extends_token,\n\n super_class,\n\n type_arguments: None,\n\n }\n\n}\n\npub struct JsExtendsClauseBuilder {\n\n extends_token: SyntaxToken,\n\n super_class: JsAnyExpression,\n\n type_arguments: Option<TsTypeArguments>,\n\n}\n\nimpl JsExtendsClauseBuilder {\n\n pub fn with_type_arguments(mut self, type_arguments: TsTypeArguments) -> Self {\n\n self.type_arguments = Some(type_arguments);\n\n self\n\n }\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 70, "score": 219417.13053123956 }, { "content": "pub fn css_declaration_important(\n\n excl_token: SyntaxToken,\n\n important_token: SyntaxToken,\n\n) -> CssDeclarationImportant {\n\n CssDeclarationImportant::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_DECLARATION_IMPORTANT,\n\n [\n\n Some(SyntaxElement::Token(excl_token)),\n\n Some(SyntaxElement::Token(important_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 71, "score": 219417.13053123956 }, { "content": "pub fn js_call_expression(\n\n callee: JsAnyExpression,\n\n arguments: JsCallArguments,\n\n) -> JsCallExpressionBuilder {\n\n JsCallExpressionBuilder {\n\n callee,\n\n arguments,\n\n optional_chain_token: None,\n\n type_arguments: None,\n\n }\n\n}\n\npub struct JsCallExpressionBuilder {\n\n callee: JsAnyExpression,\n\n arguments: JsCallArguments,\n\n optional_chain_token: Option<SyntaxToken>,\n\n type_arguments: Option<TsTypeArguments>,\n\n}\n\nimpl JsCallExpressionBuilder {\n\n pub fn with_optional_chain_token(mut self, optional_chain_token: SyntaxToken) -> Self {\n\n self.optional_chain_token = Some(optional_chain_token);\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 72, "score": 219417.13053123956 }, { "content": "pub fn css_simple_function(\n\n name: CssIdentifier,\n\n l_paren_token: SyntaxToken,\n\n items: CssParameterList,\n\n r_paren_token: SyntaxToken,\n\n) -> CssSimpleFunction {\n\n CssSimpleFunction::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_SIMPLE_FUNCTION,\n\n [\n\n Some(SyntaxElement::Node(name.into_syntax())),\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(items.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 73, "score": 219417.13053123956 }, { "content": "pub fn js_conditional_expression(\n\n test: JsAnyExpression,\n\n question_mark_token: SyntaxToken,\n\n consequent: JsAnyExpression,\n\n colon_token: SyntaxToken,\n\n alternate: JsAnyExpression,\n\n) -> JsConditionalExpression {\n\n JsConditionalExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_CONDITIONAL_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Node(test.into_syntax())),\n\n Some(SyntaxElement::Token(question_mark_token)),\n\n Some(SyntaxElement::Node(consequent.into_syntax())),\n\n Some(SyntaxElement::Token(colon_token)),\n\n Some(SyntaxElement::Node(alternate.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 74, "score": 219417.13053123956 }, { "content": "pub fn js_function_body(\n\n l_curly_token: SyntaxToken,\n\n directives: JsDirectiveList,\n\n statements: JsStatementList,\n\n r_curly_token: SyntaxToken,\n\n) -> JsFunctionBody {\n\n JsFunctionBody::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_FUNCTION_BODY,\n\n [\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(directives.into_syntax())),\n\n Some(SyntaxElement::Node(statements.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 75, "score": 219417.13053123956 }, { "content": "pub fn js_function_expression(\n\n function_token: SyntaxToken,\n\n parameters: JsParameters,\n\n body: JsFunctionBody,\n\n) -> JsFunctionExpressionBuilder {\n\n JsFunctionExpressionBuilder {\n\n function_token,\n\n parameters,\n\n body,\n\n async_token: None,\n\n star_token: None,\n\n id: None,\n\n type_parameters: None,\n\n return_type_annotation: None,\n\n }\n\n}\n\npub struct JsFunctionExpressionBuilder {\n\n function_token: SyntaxToken,\n\n parameters: JsParameters,\n\n body: JsFunctionBody,\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 76, "score": 219417.13053123956 }, { "content": "pub fn js_constructor_parameters(\n\n l_paren_token: SyntaxToken,\n\n parameters: JsConstructorParameterList,\n\n r_paren_token: SyntaxToken,\n\n) -> JsConstructorParameters {\n\n JsConstructorParameters::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_CONSTRUCTOR_PARAMETERS,\n\n [\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(parameters.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 77, "score": 219417.13053123956 }, { "content": "pub fn css_var_function(\n\n var_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n property: CssCustomProperty,\n\n r_paren_token: SyntaxToken,\n\n) -> CssVarFunctionBuilder {\n\n CssVarFunctionBuilder {\n\n var_token,\n\n l_paren_token,\n\n property,\n\n r_paren_token,\n\n value: None,\n\n }\n\n}\n\npub struct CssVarFunctionBuilder {\n\n var_token: SyntaxToken,\n\n l_paren_token: SyntaxToken,\n\n property: CssCustomProperty,\n\n r_paren_token: SyntaxToken,\n\n value: Option<CssVarFunctionValue>,\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 78, "score": 219417.13053123956 }, { "content": "pub fn js_assignment_with_default(\n\n pattern: JsAnyAssignmentPattern,\n\n eq_token: SyntaxToken,\n\n default: JsAnyExpression,\n\n) -> JsAssignmentWithDefault {\n\n JsAssignmentWithDefault::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_ASSIGNMENT_WITH_DEFAULT,\n\n [\n\n Some(SyntaxElement::Node(pattern.into_syntax())),\n\n Some(SyntaxElement::Token(eq_token)),\n\n Some(SyntaxElement::Node(default.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 79, "score": 219417.13053123956 }, { "content": "pub fn js_call_arguments(\n\n l_paren_token: SyntaxToken,\n\n args: JsCallArgumentList,\n\n r_paren_token: SyntaxToken,\n\n) -> JsCallArguments {\n\n JsCallArguments::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_CALL_ARGUMENTS,\n\n [\n\n Some(SyntaxElement::Token(l_paren_token)),\n\n Some(SyntaxElement::Node(args.into_syntax())),\n\n Some(SyntaxElement::Token(r_paren_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 80, "score": 219417.13053123956 }, { "content": "pub fn js_instanceof_expression(\n\n left: JsAnyExpression,\n\n instanceof_token: SyntaxToken,\n\n right: JsAnyExpression,\n\n) -> JsInstanceofExpression {\n\n JsInstanceofExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_INSTANCEOF_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Node(left.into_syntax())),\n\n Some(SyntaxElement::Token(instanceof_token)),\n\n Some(SyntaxElement::Node(right.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 81, "score": 219417.13053123956 }, { "content": "pub fn js_case_clause(\n\n case_token: SyntaxToken,\n\n test: JsAnyExpression,\n\n colon_token: SyntaxToken,\n\n consequent: JsStatementList,\n\n) -> JsCaseClause {\n\n JsCaseClause::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_CASE_CLAUSE,\n\n [\n\n Some(SyntaxElement::Token(case_token)),\n\n Some(SyntaxElement::Node(test.into_syntax())),\n\n Some(SyntaxElement::Token(colon_token)),\n\n Some(SyntaxElement::Node(consequent.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 82, "score": 219417.13053123956 }, { "content": "pub fn js_expression_snipped(\n\n expression: JsAnyExpression,\n\n eof_token: SyntaxToken,\n\n) -> JsExpressionSnipped {\n\n JsExpressionSnipped::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_EXPRESSION_SNIPPED,\n\n [\n\n Some(SyntaxElement::Node(expression.into_syntax())),\n\n Some(SyntaxElement::Token(eof_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 83, "score": 219417.13053123956 }, { "content": "pub fn js_class_expression(\n\n class_token: SyntaxToken,\n\n l_curly_token: SyntaxToken,\n\n members: JsClassMemberList,\n\n r_curly_token: SyntaxToken,\n\n) -> JsClassExpressionBuilder {\n\n JsClassExpressionBuilder {\n\n class_token,\n\n l_curly_token,\n\n members,\n\n r_curly_token,\n\n id: None,\n\n type_parameters: None,\n\n extends_clause: None,\n\n implements_clause: None,\n\n }\n\n}\n\npub struct JsClassExpressionBuilder {\n\n class_token: SyntaxToken,\n\n l_curly_token: SyntaxToken,\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 84, "score": 219417.13053123956 }, { "content": "pub fn js_binary_expression(\n\n left: JsAnyExpression,\n\n operator_token_token: SyntaxToken,\n\n right: JsAnyExpression,\n\n) -> JsBinaryExpression {\n\n JsBinaryExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_BINARY_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Node(left.into_syntax())),\n\n Some(SyntaxElement::Token(operator_token_token)),\n\n Some(SyntaxElement::Node(right.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 85, "score": 219417.13053123956 }, { "content": "pub fn css_at_media_query(\n\n condition_token: SyntaxToken,\n\n or_token: SyntaxToken,\n\n ty: CssAnyAtMediaQueryType,\n\n) -> CssAtMediaQueryBuilder {\n\n CssAtMediaQueryBuilder {\n\n condition_token,\n\n or_token,\n\n ty,\n\n only_token: None,\n\n consequent: None,\n\n }\n\n}\n\npub struct CssAtMediaQueryBuilder {\n\n condition_token: SyntaxToken,\n\n or_token: SyntaxToken,\n\n ty: CssAnyAtMediaQueryType,\n\n only_token: Option<SyntaxToken>,\n\n consequent: Option<CssAtMediaQueryConsequent>,\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 86, "score": 219417.13053123956 }, { "content": "pub fn js_for_variable_declaration(\n\n kind_token_token: SyntaxToken,\n\n declarator: JsVariableDeclarator,\n\n) -> JsForVariableDeclaration {\n\n JsForVariableDeclaration::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_FOR_VARIABLE_DECLARATION,\n\n [\n\n Some(SyntaxElement::Token(kind_token_token)),\n\n Some(SyntaxElement::Node(declarator.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 87, "score": 219417.13053123956 }, { "content": "pub fn js_export_from_clause(\n\n star_token: SyntaxToken,\n\n from_token: SyntaxToken,\n\n source: JsModuleSource,\n\n) -> JsExportFromClauseBuilder {\n\n JsExportFromClauseBuilder {\n\n star_token,\n\n from_token,\n\n source,\n\n export_as: None,\n\n assertion: None,\n\n semicolon_token: None,\n\n }\n\n}\n\npub struct JsExportFromClauseBuilder {\n\n star_token: SyntaxToken,\n\n from_token: SyntaxToken,\n\n source: JsModuleSource,\n\n export_as: Option<JsExportAsClause>,\n\n assertion: Option<JsImportAssertion>,\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 88, "score": 219417.13053123956 }, { "content": "pub fn css_keyframes_selector(\n\n from_token: SyntaxToken,\n\n to_token: SyntaxToken,\n\n css_percentage: CssPercentage,\n\n) -> CssKeyframesSelector {\n\n CssKeyframesSelector::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_KEYFRAMES_SELECTOR,\n\n [\n\n Some(SyntaxElement::Token(from_token)),\n\n Some(SyntaxElement::Token(to_token)),\n\n Some(SyntaxElement::Node(css_percentage.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 89, "score": 219417.13053123956 }, { "content": "pub fn js_default_clause(\n\n default_token: SyntaxToken,\n\n colon_token: SyntaxToken,\n\n consequent: JsStatementList,\n\n) -> JsDefaultClause {\n\n JsDefaultClause::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_DEFAULT_CLAUSE,\n\n [\n\n Some(SyntaxElement::Token(default_token)),\n\n Some(SyntaxElement::Token(colon_token)),\n\n Some(SyntaxElement::Node(consequent.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 90, "score": 219417.13053123956 }, { "content": "pub fn js_assignment_expression(\n\n left: JsAnyAssignmentPattern,\n\n operator_token_token: SyntaxToken,\n\n right: JsAnyExpression,\n\n) -> JsAssignmentExpression {\n\n JsAssignmentExpression::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_ASSIGNMENT_EXPRESSION,\n\n [\n\n Some(SyntaxElement::Node(left.into_syntax())),\n\n Some(SyntaxElement::Token(operator_token_token)),\n\n Some(SyntaxElement::Node(right.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 91, "score": 219417.13053123956 }, { "content": "pub fn css_attribute_matcher(\n\n matcher_type_token: SyntaxToken,\n\n exactly_or_hyphen_token: SyntaxToken,\n\n prefix_token: SyntaxToken,\n\n suffix_token: SyntaxToken,\n\n times_assign_token: SyntaxToken,\n\n eq_token: SyntaxToken,\n\n matcher_name: CssString,\n\n css_identifier: CssIdentifier,\n\n) -> CssAttributeMatcher {\n\n CssAttributeMatcher::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_ATTRIBUTE_MATCHER,\n\n [\n\n Some(SyntaxElement::Token(matcher_type_token)),\n\n Some(SyntaxElement::Token(exactly_or_hyphen_token)),\n\n Some(SyntaxElement::Token(prefix_token)),\n\n Some(SyntaxElement::Token(suffix_token)),\n\n Some(SyntaxElement::Token(times_assign_token)),\n\n Some(SyntaxElement::Token(eq_token)),\n\n Some(SyntaxElement::Node(matcher_name.into_syntax())),\n\n Some(SyntaxElement::Node(css_identifier.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 92, "score": 219417.13053123956 }, { "content": "pub fn css_at_keyframes_body(\n\n l_curly_token: SyntaxToken,\n\n items: CssAtKeyframesItemList,\n\n r_curly_token: SyntaxToken,\n\n) -> CssAtKeyframesBody {\n\n CssAtKeyframesBody::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_AT_KEYFRAMES_BODY,\n\n [\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(items.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 93, "score": 219417.13053123956 }, { "content": "pub fn css_keyframes_block(\n\n selectors: CssKeyframesSelectorList,\n\n l_curly_token: SyntaxToken,\n\n declarations: CssDeclarationList,\n\n r_curly_token: SyntaxToken,\n\n) -> CssKeyframesBlock {\n\n CssKeyframesBlock::unwrap_cast(SyntaxNode::new_detached(\n\n CssSyntaxKind::CSS_KEYFRAMES_BLOCK,\n\n [\n\n Some(SyntaxElement::Node(selectors.into_syntax())),\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(declarations.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_css_factory/src/generated/node_factory.rs", "rank": 94, "score": 219417.13053123956 }, { "content": "pub fn js_labeled_statement(\n\n label_token: SyntaxToken,\n\n colon_token: SyntaxToken,\n\n body: JsAnyStatement,\n\n) -> JsLabeledStatement {\n\n JsLabeledStatement::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_LABELED_STATEMENT,\n\n [\n\n Some(SyntaxElement::Token(label_token)),\n\n Some(SyntaxElement::Token(colon_token)),\n\n Some(SyntaxElement::Node(body.into_syntax())),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 95, "score": 219417.13053123956 }, { "content": "pub fn js_class_declaration(\n\n class_token: SyntaxToken,\n\n id: JsAnyBinding,\n\n l_curly_token: SyntaxToken,\n\n members: JsClassMemberList,\n\n r_curly_token: SyntaxToken,\n\n) -> JsClassDeclarationBuilder {\n\n JsClassDeclarationBuilder {\n\n class_token,\n\n id,\n\n l_curly_token,\n\n members,\n\n r_curly_token,\n\n abstract_token: None,\n\n type_parameters: None,\n\n extends_clause: None,\n\n implements_clause: None,\n\n }\n\n}\n\npub struct JsClassDeclarationBuilder {\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 96, "score": 219417.13053123956 }, { "content": "#[test]\n\npub fn last_trivia_must_be_appended_to_eof() {\n\n // 0123456789A123456789B123456789CC\n\n let text = \" function foo() { let a = 1; }\\n\";\n\n let root = parse_module(text, 0);\n\n let syntax = root.syntax();\n\n\n\n let range = syntax.text_range();\n\n let start = range.start();\n\n let end = range.end();\n\n\n\n assert_eq!(TextSize::from(0), start);\n\n assert_eq!(TextSize::from(31), end);\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 97, "score": 219417.13053123956 }, { "content": "pub fn js_import_assertion(\n\n assert_token: SyntaxToken,\n\n l_curly_token: SyntaxToken,\n\n assertions: JsImportAssertionEntryList,\n\n r_curly_token: SyntaxToken,\n\n) -> JsImportAssertion {\n\n JsImportAssertion::unwrap_cast(SyntaxNode::new_detached(\n\n JsSyntaxKind::JS_IMPORT_ASSERTION,\n\n [\n\n Some(SyntaxElement::Token(assert_token)),\n\n Some(SyntaxElement::Token(l_curly_token)),\n\n Some(SyntaxElement::Node(assertions.into_syntax())),\n\n Some(SyntaxElement::Token(r_curly_token)),\n\n ],\n\n ))\n\n}\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 98, "score": 219417.13053123956 }, { "content": "pub fn js_catch_declaration(\n\n l_paren_token: SyntaxToken,\n\n binding: JsAnyBindingPattern,\n\n r_paren_token: SyntaxToken,\n\n) -> JsCatchDeclarationBuilder {\n\n JsCatchDeclarationBuilder {\n\n l_paren_token,\n\n binding,\n\n r_paren_token,\n\n type_annotation: None,\n\n }\n\n}\n\npub struct JsCatchDeclarationBuilder {\n\n l_paren_token: SyntaxToken,\n\n binding: JsAnyBindingPattern,\n\n r_paren_token: SyntaxToken,\n\n type_annotation: Option<TsTypeAnnotation>,\n\n}\n\nimpl JsCatchDeclarationBuilder {\n\n pub fn with_type_annotation(mut self, type_annotation: TsTypeAnnotation) -> Self {\n", "file_path": "crates/rome_js_factory/src/generated/node_factory.rs", "rank": 99, "score": 219417.13053123956 } ]
Rust
couchbase-lite/src/query.rs
adrien-jeser-doctolib/couchbase-lite-rust
49ef36b21aef1817314cf98f679587be55604a7d
use crate::{ error::{c4error_init, Error}, ffi::{ c4query_new, c4query_new2, c4query_release, c4query_run, c4query_setParameters, c4queryenum_next, c4queryenum_release, kC4DefaultQueryOptions, kC4N1QLQuery, C4Query, C4QueryEnumerator, FLArrayIterator_GetCount, FLArrayIterator_GetValueAt, c4query_columnCount, c4query_columnTitle }, fl_slice::{fl_slice_empty, AsFlSlice}, value::{FromValueRef, ValueRef}, Database, Result, }; use fallible_streaming_iterator::FallibleStreamingIterator; use serde::Serialize; use std::ptr::NonNull; use crate::fl_slice::fl_slice_to_str_unchecked; use std::convert::TryFrom; pub struct Query<'db> { _db: &'db Database, inner: NonNull<C4Query>, } impl Drop for Query<'_> { fn drop(&mut self) { unsafe { c4query_release(self.inner.as_ptr()) }; } } impl Query<'_> { pub(crate) fn new<'a, 'b>(db: &'a Database, query_json: &'b str) -> Result<Query<'a>> { let mut c4err = c4error_init(); let query = unsafe { c4query_new( db.inner.0.as_ptr(), query_json.as_bytes().as_flslice(), &mut c4err, ) }; NonNull::new(query) .map(|inner| Query { _db: db, inner }) .ok_or_else(|| c4err.into()) } pub(crate) fn new_n1ql<'a, 'b>(db: &'a Database, query_n1ql: &'b str) -> Result<Query<'a>> { let mut c4err = c4error_init(); let mut out_error_pos: std::os::raw::c_int = -1; let query = unsafe { c4query_new2( db.inner.0.as_ptr(), kC4N1QLQuery, query_n1ql.as_bytes().as_flslice(), &mut out_error_pos, &mut c4err, ) }; NonNull::new(query) .map(|inner| Query { _db: db, inner }) .ok_or_else(|| c4err.into()) } pub fn set_parameters<T>(&self, parameters: &T) -> Result<()> where T: Serialize, { let param_string = serde_json::to_string(parameters)?; let param_slice = param_string.as_bytes().as_flslice(); unsafe { c4query_setParameters(self.inner.as_ptr(), param_slice); } Ok(()) } pub fn run(&self) -> Result<Enumerator> { let mut c4err = c4error_init(); let it = unsafe { c4query_run( self.inner.as_ptr(), &kC4DefaultQueryOptions, fl_slice_empty(), &mut c4err, ) }; NonNull::new(it) .map(|inner| Enumerator { _query: self, reach_end: false, inner, }) .ok_or_else(|| c4err.into()) } pub fn column_names(&self) -> Result<Vec<String>> { let col_count = unsafe { c4query_columnCount(self.inner.as_ptr()) }; let column_count = match usize::try_from(col_count) { Ok(value) => value, Err(_) => return Err(Error::LogicError("column count doesn't fit".to_string())), }; let mut names = Vec::with_capacity(column_count); for col_index in 0..col_count { let title = unsafe { c4query_columnTitle(self.inner.as_ptr(), col_index) }; let name = unsafe { fl_slice_to_str_unchecked(title).to_owned() }; names.push(name); } Ok(names) } } pub struct Enumerator<'query> { _query: &'query Query<'query>, reach_end: bool, inner: NonNull<C4QueryEnumerator>, } impl Drop for Enumerator<'_> { fn drop(&mut self) { unsafe { c4queryenum_release(self.inner.as_ptr()) }; } } impl<'en> FallibleStreamingIterator for Enumerator<'en> { type Error = crate::error::Error; type Item = Enumerator<'en>; fn advance(&mut self) -> Result<()> { if self.reach_end { return Ok(()); } let mut c4err = c4error_init(); if unsafe { c4queryenum_next(self.inner.as_ptr(), &mut c4err) } { Ok(()) } else { if c4err.code == 0 { self.reach_end = true; Ok(()) } else { Err(c4err.into()) } } } fn get(&self) -> Option<&Enumerator<'en>> { if !self.reach_end { Some(self) } else { None } } } impl<'a> Enumerator<'a> { pub fn get_raw_checked(&self, i: u32) -> Result<ValueRef<'a>> { let n = unsafe { FLArrayIterator_GetCount(&self.inner.as_ref().columns) }; if i >= n { return Err(Error::LogicError(format!( "Enumerator::get_raw_checked: Index out of bounds {} / {}", i, n ))); } let val: ValueRef = unsafe { FLArrayIterator_GetValueAt(&self.inner.as_ref().columns, i) }.into(); Ok(val) } pub fn get_checked<T>(&self, i: u32) -> Result<T> where T: FromValueRef<'a>, { let value_ref = self.get_raw_checked(i)?; FromValueRef::column_result(value_ref) } pub fn col_count(&self) -> u32 { unsafe { FLArrayIterator_GetCount(&self.inner.as_ref().columns) } } }
use crate::{ error::{c4error_init, Error}, ffi::{ c4query_new, c4query_new2, c4query_release, c4query_run, c4query_setParameters, c4queryenum_next, c4queryenum_release, kC4DefaultQueryOptions, kC4N1QLQuery, C4Query, C4QueryEnumerator, FLArrayIterator_GetCount, FLArrayIterator_GetValueAt, c4query_columnCount, c4query_columnTitle }, fl_slice::{fl_slice_empty, AsFlSlice}, value::{FromValueRef, ValueRef}, Database, Result, }; use fallible_streaming_iterator::FallibleStreamingIterator; use serde::Serialize; use std::ptr::NonNull; use crate::fl_slice::fl_slice_to_str_unchecked; use std::convert::TryFrom; pub struct Query<'db> { _db: &'db Database, inner: NonNull<C4Query>, } impl Drop for Query<'_> { fn drop(&mut self) { unsafe { c4query_release(self.inner.as_ptr()) }; } } impl Query<'_> { pub(crate) fn new<'a, 'b>(db: &'a Database, query_json: &'b str) -> Result<Query<'a>> { let mut c4err = c4error_init(); let query = unsafe { c4query_new( db.inner.0.as_ptr(),
.ok_or_else(|| c4err.into()) } pub(crate) fn new_n1ql<'a, 'b>(db: &'a Database, query_n1ql: &'b str) -> Result<Query<'a>> { let mut c4err = c4error_init(); let mut out_error_pos: std::os::raw::c_int = -1; let query = unsafe { c4query_new2( db.inner.0.as_ptr(), kC4N1QLQuery, query_n1ql.as_bytes().as_flslice(), &mut out_error_pos, &mut c4err, ) }; NonNull::new(query) .map(|inner| Query { _db: db, inner }) .ok_or_else(|| c4err.into()) } pub fn set_parameters<T>(&self, parameters: &T) -> Result<()> where T: Serialize, { let param_string = serde_json::to_string(parameters)?; let param_slice = param_string.as_bytes().as_flslice(); unsafe { c4query_setParameters(self.inner.as_ptr(), param_slice); } Ok(()) } pub fn run(&self) -> Result<Enumerator> { let mut c4err = c4error_init(); let it = unsafe { c4query_run( self.inner.as_ptr(), &kC4DefaultQueryOptions, fl_slice_empty(), &mut c4err, ) }; NonNull::new(it) .map(|inner| Enumerator { _query: self, reach_end: false, inner, }) .ok_or_else(|| c4err.into()) } pub fn column_names(&self) -> Result<Vec<String>> { let col_count = unsafe { c4query_columnCount(self.inner.as_ptr()) }; let column_count = match usize::try_from(col_count) { Ok(value) => value, Err(_) => return Err(Error::LogicError("column count doesn't fit".to_string())), }; let mut names = Vec::with_capacity(column_count); for col_index in 0..col_count { let title = unsafe { c4query_columnTitle(self.inner.as_ptr(), col_index) }; let name = unsafe { fl_slice_to_str_unchecked(title).to_owned() }; names.push(name); } Ok(names) } } pub struct Enumerator<'query> { _query: &'query Query<'query>, reach_end: bool, inner: NonNull<C4QueryEnumerator>, } impl Drop for Enumerator<'_> { fn drop(&mut self) { unsafe { c4queryenum_release(self.inner.as_ptr()) }; } } impl<'en> FallibleStreamingIterator for Enumerator<'en> { type Error = crate::error::Error; type Item = Enumerator<'en>; fn advance(&mut self) -> Result<()> { if self.reach_end { return Ok(()); } let mut c4err = c4error_init(); if unsafe { c4queryenum_next(self.inner.as_ptr(), &mut c4err) } { Ok(()) } else { if c4err.code == 0 { self.reach_end = true; Ok(()) } else { Err(c4err.into()) } } } fn get(&self) -> Option<&Enumerator<'en>> { if !self.reach_end { Some(self) } else { None } } } impl<'a> Enumerator<'a> { pub fn get_raw_checked(&self, i: u32) -> Result<ValueRef<'a>> { let n = unsafe { FLArrayIterator_GetCount(&self.inner.as_ref().columns) }; if i >= n { return Err(Error::LogicError(format!( "Enumerator::get_raw_checked: Index out of bounds {} / {}", i, n ))); } let val: ValueRef = unsafe { FLArrayIterator_GetValueAt(&self.inner.as_ref().columns, i) }.into(); Ok(val) } pub fn get_checked<T>(&self, i: u32) -> Result<T> where T: FromValueRef<'a>, { let value_ref = self.get_raw_checked(i)?; FromValueRef::column_result(value_ref) } pub fn col_count(&self) -> u32 { unsafe { FLArrayIterator_GetCount(&self.inner.as_ref().columns) } } }
query_json.as_bytes().as_flslice(), &mut c4err, ) }; NonNull::new(query) .map(|inner| Query { _db: db, inner })
function_block-random_span
[ { "content": "fn print_external_changes(db: &mut Option<Database>) -> Result<(), Box<dyn std::error::Error>> {\n\n let db = db\n\n .as_mut()\n\n .ok_or_else(|| format!(\"print_external_changes: db not OPEN\"))?;\n\n let mut doc_ids = HashSet::<String>::new();\n\n for change in db.observed_changes() {\n\n println!(\n\n \"observed change: doc id {} was changed, external {}\",\n\n change.doc_id(),\n\n change.external()\n\n );\n\n if change.external() {\n\n doc_ids.insert(change.doc_id().into());\n\n }\n\n }\n\n for doc_id in &doc_ids {\n\n let doc = match db.get_existing(doc_id.as_str()) {\n\n Ok(x) => x,\n\n Err(err) => {\n\n eprintln!(\"Can not get {}: {}\", doc_id, err);\n", "file_path": "chat-demo/src/main.rs", "rank": 0, "score": 145611.0118021078 }, { "content": "fn print_all_messages(db: &Database) -> Result<(), Box<dyn std::error::Error>> {\n\n let query = db.query(r#\"{\"WHAT\": [\"._id\"], \"WHERE\": [\"=\", [\".type\"], \"Message\"]}\"#)?;\n\n let mut iter = query.run()?;\n\n while let Some(item) = iter.next()? {\n\n // work with item\n\n let id = item.get_raw_checked(0)?;\n\n let id = id.as_str()?;\n\n println!(\"iteration id {}\", id);\n\n let doc = db.get_existing(id)?;\n\n println!(\"doc id {}\", doc.id());\n\n\n\n let db_msg: Message = doc.decode_data()?;\n\n println!(\"db_msg: {:?}\", db_msg);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "chat-demo/src/main.rs", "rank": 1, "score": 130785.81892764906 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n env_logger::init();\n\n let mut runtime = tokio::runtime::Runtime::new()?;\n\n\n\n let db_path = env::args().nth(1).expect(\"No path to db file\");\n\n let db_path = Path::new(&db_path);\n\n let sync_url = env::args()\n\n .nth(2)\n\n .unwrap_or_else(|| \"ws://vps820494.ovh.net:4984/billeo-db\".to_string());\n\n let token: Option<String> = Some(\"1b91ed5c6a58efd479c74011b592c86fc43f1c82\".into());\n\n\n\n use_web_sockets(runtime.handle().clone());\n\n let (db_thread, db_exec) = run_db_thread(db_path);\n\n let db_exec_repl = db_exec.clone();\n\n db_exec.spawn(move |db| {\n\n if let Some(db) = db.as_mut() {\n\n db.start_replicator(\n\n &sync_url,\n\n token.as_ref().map(String::as_str),\n\n move |repl_state| {\n", "file_path": "chat-demo/src/main.rs", "rank": 2, "score": 112244.04211179909 }, { "content": "fn load_body(inner: NonNull<C4Document>) -> Result<()> {\n\n let mut c4err = c4error_init();\n\n if unsafe { c4doc_loadRevisionBody(inner.as_ptr(), &mut c4err) } {\n\n Ok(())\n\n } else {\n\n Err(c4err.into())\n\n }\n\n}\n\n\n\n#[repr(transparent)]\n\npub(crate) struct C4DocumentOwner(pub(crate) NonNull<C4Document>);\n\n\n\nimpl Debug for C4DocumentOwner {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", unsafe { self.0.as_ref() })\n\n }\n\n}\n\n\n\nimpl C4DocumentOwner {\n\n pub(crate) fn exists(&self) -> bool {\n", "file_path": "couchbase-lite/src/document.rs", "rank": 3, "score": 100783.11237633953 }, { "content": "#[derive(Clone)]\n\nstruct DbQueryExecutor {\n\n inner: mpsc::Sender<Job<Database>>,\n\n}\n\n\n\nimpl DbQueryExecutor {\n\n pub fn spawn<F: FnOnce(&mut Option<Database>) + Send + 'static>(&self, job: F) {\n\n self.inner\n\n .send(Box::new(job))\n\n .expect(\"thread_pool::Executor::spawn failed\");\n\n }\n\n}\n\n\n", "file_path": "chat-demo/src/main.rs", "rank": 4, "score": 97211.16409988521 }, { "content": "/// use embedded web-socket library\n\npub fn use_web_sockets(handle: Handle) {\n\n let handle = Arc::new(handle);\n\n let sock_factory = C4SocketFactory {\n\n context: Arc::into_raw(handle) as *mut c_void,\n\n framing: kC4NoFraming as C4SocketFraming,\n\n\n\n open: Some(ws_open),\n\n write: Some(ws_write),\n\n completedReceive: Some(ws_completed_receive),\n\n close: None,\n\n requestClose: Some(ws_request_close),\n\n dispose: Some(ws_dispose),\n\n };\n\n\n\n unsafe { c4socket_registerFactory(sock_factory) };\n\n}\n\n\n", "file_path": "couchbase-lite/src/repl_transport.rs", "rank": 5, "score": 92641.95938437148 }, { "content": "fn search_file_in_directory<P>(dirs: &[P], file: &str) -> Result<PathBuf, ()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n for dir in dirs {\n\n let file_path = dir.as_ref().join(file);\n\n if file_path.exists() && file_path.is_file() {\n\n return Ok(file_path);\n\n }\n\n }\n\n Err(())\n\n}\n\n\n", "file_path": "couchbase-lite-core-sys/build.rs", "rank": 6, "score": 84823.52778097679 }, { "content": "fn fail(s: &str) -> ! {\n\n panic!(\"\\n{}\\n\\nbuild script failed, must exit now\", s)\n\n}\n", "file_path": "couchbase-lite-core-sys/build.rs", "rank": 7, "score": 80716.77483984453 }, { "content": "fn run_db_thread(db_path: &Path) -> (std::thread::JoinHandle<()>, DbQueryExecutor) {\n\n let (sender, receiver) = std::sync::mpsc::channel::<Job<Database>>();\n\n let db_path: std::path::PathBuf = db_path.into();\n\n let join_handle = std::thread::spawn(move || {\n\n let mut db = match Database::open(&db_path, DatabaseConfig::default()) {\n\n Ok(db) => {\n\n println!(\"We read all messages after open:\");\n\n print_all_messages(&db).expect(\"read from db failed\");\n\n println!(\"read all messages after open done\");\n\n Some(db)\n\n }\n\n Err(err) => {\n\n error!(\"Initialiazion cause error: {}\", err);\n\n None\n\n }\n\n };\n\n loop {\n\n match receiver.recv() {\n\n Ok(x) => x(&mut db),\n\n Err(err) => {\n\n trace!(\"db_thread: recv error: {}\", err);\n\n break;\n\n }\n\n }\n\n }\n\n });\n\n (join_handle, DbQueryExecutor { inner: sender })\n\n}\n\n\n", "file_path": "chat-demo/src/main.rs", "rank": 8, "score": 77675.528242574 }, { "content": "fn getenv_unwrap(v: &str) -> String {\n\n match env::var(v) {\n\n Ok(s) => s,\n\n Err(..) => fail(&format!(\"environment variable `{}` not defined\", v)),\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite-core-sys/build.rs", "rank": 9, "score": 73758.28454103236 }, { "content": "struct DbIndexesListIterator {\n\n _enc_data: FlSliceOwner,\n\n array: ValueRefArray,\n\n next_idx: u32,\n\n cur_val: Option<FLValue>,\n\n}\n\n\n\nimpl DbIndexesListIterator {\n\n fn new(enc_data: FlSliceOwner) -> Result<Self> {\n\n let fvalue = unsafe { FLValue_FromData(enc_data.as_flslice(), FLTrust_kFLTrusted) };\n\n let val: ValueRef = fvalue.into();\n\n let array = match val {\n\n ValueRef::Array(arr) => arr,\n\n _ => {\n\n return Err(Error::LogicError(\n\n \"db indexes are not fleece encoded array\".into(),\n\n ))\n\n }\n\n };\n\n\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 10, "score": 69574.13278819523 }, { "content": "#[test]\n\nfn test_n1ql_query() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n let mut trans = db.transaction().unwrap();\n\n for i in 0..10_000 {\n\n let foo = Foo {\n\n i,\n\n s: format!(\"Hello {}\", i),\n\n };\n\n let mut doc = Document::new(&foo).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n }\n\n trans.commit().unwrap();\n\n\n\n let query = db.n1ql_query(\"SELECT s WHERE s LIKE '%555'\").unwrap();\n\n let expected = vec![\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 11, "score": 66317.7073651394 }, { "content": "#[test]\n\nfn test_n1ql_query_with_parameter() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n let mut trans = db.transaction().unwrap();\n\n for i in 0..10_000 {\n\n let foo = Foo {\n\n i,\n\n s: format!(\"Hello {}\", i),\n\n };\n\n let mut doc = Document::new(&foo).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n }\n\n trans.commit().unwrap();\n\n\n\n let query = db\n\n .n1ql_query(\"SELECT s WHERE s LIKE $pattern ORDER BY s LIMIT 2 OFFSET 1\")\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 12, "score": 64435.31549334556 }, { "content": "fn into_msg_desc(err: C4Error) -> (FlSliceOwner, FlSliceOwner) {\n\n let msg: FlSliceOwner = unsafe { c4error_getMessage(err) }.into();\n\n let desc: FlSliceOwner = unsafe { c4error_getDescription(err) }.into();\n\n (msg, desc)\n\n}\n\n\n\n#[inline]\n\npub(crate) fn c4error_init() -> C4Error {\n\n C4Error {\n\n domain: kC4MaxErrorDomainPlus1 as C4ErrorDomain,\n\n code: 0,\n\n internal_info: 0,\n\n }\n\n}\n\n\n\nimpl From<serde_json::Error> for Error {\n\n fn from(e: serde_json::Error) -> Self {\n\n Error::SerdeJson(e)\n\n }\n\n}\n", "file_path": "couchbase-lite/src/error.rs", "rank": 13, "score": 58272.03514723528 }, { "content": "fn cc_system_include_dirs() -> Result<(Vec<PathBuf>, Vec<PathBuf>), String> {\n\n let cc_build = cc::Build::new();\n\n\n\n let cc_process = cc_build\n\n .get_compiler()\n\n .to_command()\n\n .env(\"LANG\", \"C\")\n\n .env(\"LC_MESSAGES\", \"C\")\n\n .args(&[\"-v\", \"-x\", \"c\", \"-E\", \"-\"])\n\n .stderr(Stdio::piped())\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::inherit())\n\n .spawn()\n\n .map_err(|err| err.to_string())?;\n\n\n\n cc_process\n\n .stdin\n\n .ok_or_else(|| format!(\"can not get stdin of cc\"))?\n\n .write_all(b\"\\n\")\n\n .map_err(|err| err.to_string())?;\n", "file_path": "couchbase-lite-core-sys/build.rs", "rank": 14, "score": 56166.654788489606 }, { "content": "type Job<T> = Box<dyn FnOnce(&mut Option<T>) + Send>;\n\n\n", "file_path": "chat-demo/src/main.rs", "rank": 15, "score": 50503.56150621956 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(tag = \"type\")]\n\nstruct Message {\n\n msg: String,\n\n}\n\n\n", "file_path": "chat-demo/src/main.rs", "rank": 16, "score": 48221.935010634654 }, { "content": "#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]\n\n#[serde(tag = \"type\")]\n\nstruct Foo {\n\n i: i32,\n\n s: String,\n\n}\n\n\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 17, "score": 46999.69916540172 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct Empty {}\n\n\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 18, "score": 46999.69916540172 }, { "content": "struct ReplicatorParams {\n\n url: String,\n\n token: Option<String>,\n\n}\n\n\n\npub(crate) struct DbInner(NonNull<C4Database>);\n\n/// According to\n\n/// https://github.com/couchbase/couchbase-lite-core/wiki/Thread-Safety\n\n/// it is possible to call from any thread, but not concurrently\n\nunsafe impl Send for DbInner {}\n\n\n\nimpl Drop for Database {\n\n fn drop(&mut self) {\n\n if let Some(repl) = self.db_replicator.take() {\n\n repl.stop();\n\n }\n\n self.db_observers.clear();\n\n unsafe { c4db_release(self.inner.0.as_ptr()) };\n\n }\n\n}\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 19, "score": 46999.69916540172 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(tag = \"type\")]\n\nstruct S {\n\n f: f64,\n\n s: String,\n\n}\n\n\n\nimpl PartialEq for S {\n\n fn eq(&self, o: &S) -> bool {\n\n (self.f - o.f).abs() < 1e-13 && self.s == o.s\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 20, "score": 46999.69916540172 }, { "content": "struct Socket {\n\n handle: Arc<Handle>,\n\n writer: Arc<TokioMutex<Option<WsWriter>>>,\n\n stop_read: Arc<TokioMutex<Option<oneshot::Sender<()>>>>,\n\n read_data_avaible: AtomicUsize,\n\n read_confirmed: Arc<Notify>,\n\n close_confirmied: Arc<Notify>,\n\n closed: Arc<TokioMutex<bool>>,\n\n c4sock: usize,\n\n last_activity: Arc<TokioMutex<Instant>>,\n\n}\n\n\n\nimpl Socket {\n\n fn close(self: Arc<Self>) {\n\n let c4sock = self.c4sock;\n\n trace!(\"Socket::close, c4sock {:x}\", c4sock);\n\n let writer = self.writer.clone();\n\n let stop_read = self.stop_read.clone();\n\n self.handle.spawn(async move {\n\n {\n", "file_path": "couchbase-lite/src/repl_transport.rs", "rank": 21, "score": 46999.69916540172 }, { "content": "fn save_msg(\n\n db: &mut Database,\n\n data: &str,\n\n doc_id: Option<&str>,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let mut trans = db.transaction()?;\n\n let msg = Message { msg: data.into() };\n\n let mut doc = if let Some(doc_id) = doc_id {\n\n println!(\"save_msg: edit message\");\n\n Document::new_with_id(doc_id, &msg)?\n\n } else {\n\n Document::new(&msg)?\n\n };\n\n println!(\"save_msg: doc id {}\", doc.id());\n\n trans.save(&mut doc)?;\n\n trans.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "chat-demo/src/main.rs", "rank": 22, "score": 43886.1203216068 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let target_os = env::var(\"CARGO_CFG_TARGET_OS\").unwrap();\n\n let cross_to_windows = target_os == \"windows\"\n\n && !cfg!(target_os = \"windows\")\n\n && !env::var(\"ONLY_CARGO_CHECK\").is_ok();\n\n let cross_to_macos =\n\n target_os == \"macos\" && !cfg!(target_os = \"macos\") && !env::var(\"ONLY_CARGO_CHECK\").is_ok();\n\n let cross_to_android = target_os == \"android\" && !env::var(\"ONLY_CARGO_CHECK\").is_ok();\n\n\n\n if cross_to_windows || cross_to_macos {\n\n println!(\"cargo:rustc-link-lib=dylib=LiteCore\");\n\n } else {\n\n if cross_to_android {\n\n let dst = cmake::Config::new(Path::new(\"couchbase-lite-core\"))\n\n .define(\"CMAKE_CXX_FLAGS\", env::var(\"CMAKE_CXX_FLAGS\").unwrap())\n\n .define(\"CMAKE_C_FLAGS\", env::var(\"CMAKE_C_FLAGS\").unwrap())\n\n .define(\"ANDROID_ABI\", env::var(\"ANDROID_ABI\").unwrap())\n\n .define(\"ANDROID_NDK\", env::var(\"NDK_HOME\").unwrap())\n", "file_path": "couchbase-lite-core-sys/build.rs", "rank": 23, "score": 43886.1203216068 }, { "content": "#[test]\n\nfn test_indices() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n\n\n fn get_index_list(db: &Database) -> Vec<String> {\n\n let mut ret = vec![];\n\n let mut index_name_it = db.get_indexes().unwrap();\n\n while let Some(value) = index_name_it.next().unwrap() {\n\n println!(\"index name: {}\", value);\n\n ret.push(value.into());\n\n }\n\n ret\n\n }\n\n\n\n println!(\"before index creation:\");\n\n assert!(get_index_list(&db).is_empty());\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 24, "score": 42783.631943733155 }, { "content": "#[test]\n\nfn test_write_read() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n let mut ids_and_data = Vec::<(String, Foo)>::new();\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n {\n\n let mut trans = db.transaction().unwrap();\n\n for i in 17..=180 {\n\n let foo = Foo {\n\n i: i,\n\n s: format!(\"Hello {}\", i),\n\n };\n\n let mut doc = Document::new(&foo).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n ids_and_data.push((doc.id().into(), foo));\n\n }\n\n trans.commit().unwrap();\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 25, "score": 41769.57340100066 }, { "content": "#[test]\n\nfn test_observed_changes() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n db.register_observer(|| println!(\"something changed\"))\n\n .unwrap();\n\n let changes: Vec<_> = db.observed_changes().collect();\n\n assert!(changes.is_empty());\n\n let doc_id: String = {\n\n let mut trans = db.transaction().unwrap();\n\n let foo = Foo {\n\n i: 17,\n\n s: \"hello\".into(),\n\n };\n\n let mut doc = Document::new(&foo).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n trans.commit().unwrap();\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 26, "score": 41769.57340100066 }, { "content": "#[test]\n\nfn test_like_performance() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]\n\n #[serde(tag = \"type\")]\n\n struct Data {\n\n field1: String,\n\n field2: String,\n\n }\n\n\n\n const N: usize = 3_000;\n\n let mut trans = db.transaction().unwrap();\n\n for i in 0..N {\n\n let d = Data {\n\n field1: format!(\"_common_prefix_{}\", i),\n\n field2: format!(\"{}\", i + 1),\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 27, "score": 41769.57340100066 }, { "content": "#[test]\n\nfn test_save_float() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n let mut trans = db.transaction().unwrap();\n\n let s = S {\n\n f: 17.48,\n\n s: \"ABCD\".into(),\n\n };\n\n let mut doc = Document::new(&s).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n trans.commit().unwrap();\n\n let doc_id: String = doc.id().into();\n\n drop(doc);\n\n\n\n let doc = db.get_existing(&doc_id).unwrap();\n\n let loaded_s: S = doc.decode_data().unwrap();\n\n assert_eq!(s, loaded_s);\n\n }\n\n tmp_dir.close().expect(\"Can not close tmp_dir\");\n\n}\n\n\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 28, "score": 41769.57340100066 }, { "content": "#[test]\n\nfn test_save_and_select_parent() {\n\n fn create_s(i: i32) -> S {\n\n S {\n\n f: f64::from(i) / 3.6,\n\n s: format!(\"Hello {}\", i),\n\n }\n\n }\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n let s = create_s(500);\n\n let mut trans = db.transaction().unwrap();\n\n let mut doc = Document::new(&s).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n trans.commit().unwrap();\n\n let prec_rev_id: String = doc.rev_id().into();\n\n let doc_id: String = doc.id().into();\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 29, "score": 40833.71558387313 }, { "content": "#[test]\n\nfn test_save_several_times() {\n\n fn create_s(i: i32) -> S {\n\n S {\n\n f: f64::from(i) / 3.6,\n\n s: format!(\"Hello {}\", i),\n\n }\n\n }\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n let s = create_s(500);\n\n let mut trans = db.transaction().unwrap();\n\n let mut doc = Document::new(&s).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n trans.commit().unwrap();\n\n let doc_id: String = doc.id().into();\n\n drop(doc);\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 30, "score": 40833.71558387313 }, { "content": "#[test]\n\nfn test_like_offset_limit() {\n\n let _ = env_logger::try_init();\n\n let tmp_dir = tempdir().expect(\"Can not create tmp directory\");\n\n println!(\"we create tempdir at {}\", tmp_dir.path().display());\n\n let db_path = tmp_dir.path().join(\"a.cblite2\");\n\n {\n\n let mut db = Database::open(&db_path, DatabaseConfig::default()).unwrap();\n\n let mut trans = db.transaction().unwrap();\n\n for i in 0..10_000 {\n\n let foo = Foo {\n\n i: i,\n\n s: format!(\"Hello {}\", i),\n\n };\n\n let mut doc = Document::new(&foo).unwrap();\n\n trans.save(&mut doc).unwrap();\n\n }\n\n trans.commit().unwrap();\n\n\n\n assert_eq!(\n\n vec![\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 31, "score": 40833.71558387313 }, { "content": "pub trait FromValueRef<'a>: Sized {\n\n fn column_result(value: ValueRef<'a>) -> Result<Self>;\n\n}\n\n\n\nimpl<'a> FromValueRef<'a> for &'a str {\n\n fn column_result(value: ValueRef<'a>) -> Result<Self> {\n\n if let ValueRef::String(x) = value {\n\n Ok(x)\n\n } else {\n\n Err(Error::LogicError(format!(\n\n \"Wrong ValueRef type, expect String, got {:?}\",\n\n value\n\n )))\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> FromValueRef<'a> for u16 {\n\n fn column_result(value: ValueRef<'a>) -> Result<Self> {\n\n match value {\n", "file_path": "couchbase-lite/src/value.rs", "rank": 32, "score": 39336.111707798285 }, { "content": "fn run_bindgen_for_c_headers<P: AsRef<Path>>(\n\n target: &str,\n\n include_dirs: &[P],\n\n framework_dirs: &[P],\n\n c_headers: &[&str],\n\n output_rust: &Path,\n\n) -> Result<(), String> {\n\n assert!(!c_headers.is_empty());\n\n let c_file_path = search_file_in_directory(include_dirs, c_headers[0])\n\n .map_err(|_| format!(\"Can not find {}\", c_headers[0]))?;\n\n\n\n let mut dependicies = Vec::with_capacity(c_headers.len());\n\n for header in c_headers.iter() {\n\n let c_file_path = search_file_in_directory(include_dirs, header)\n\n .map_err(|_| format!(\"Can not find {}\", header))?;\n\n dependicies.push(c_file_path);\n\n }\n\n\n\n if let Ok(out_meta) = output_rust.metadata() {\n\n let mut res_recent_enough = true;\n", "file_path": "couchbase-lite-core-sys/build.rs", "rank": 33, "score": 33814.46460571832 }, { "content": " /// fleece library errors\n\n FlError(u32),\n\n /// argument contains 0 character\n\n NulError(std::ffi::NulError),\n\n}\n\n\n\nimpl std::error::Error for Error {}\n\n\n\npub(crate) type Result<T> = std::result::Result<T, Error>;\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::DbError(err) => {\n\n let (msg, desc) = into_msg_desc(*err);\n\n write!(\n\n fmt,\n\n \"c4 error {}: {}\",\n\n desc.as_utf8_lossy(),\n\n msg.as_utf8_lossy()\n", "file_path": "couchbase-lite/src/error.rs", "rank": 34, "score": 29363.547148253034 }, { "content": "\n\nimpl From<json5::Error> for Error {\n\n fn from(error: json5::Error) -> Self {\n\n Error::Json5(error)\n\n }\n\n}\n\n\n\nimpl From<std::ffi::NulError> for Error {\n\n fn from(err: std::ffi::NulError) -> Self {\n\n Error::NulError(err)\n\n }\n\n}\n", "file_path": "couchbase-lite/src/error.rs", "rank": 35, "score": 29359.29642031822 }, { "content": "use crate::{\n\n ffi::{\n\n c4error_getDescription, c4error_getMessage, kC4MaxErrorDomainPlus1, C4Error, C4ErrorDomain,\n\n },\n\n fl_slice::FlSliceOwner,\n\n};\n\nuse std::fmt;\n\n\n\n/// Enum listing possible errors.\n\npub enum Error {\n\n /// couchbase-lite-core error\n\n DbError(C4Error),\n\n /// UTF-8 encoding problem\n\n Utf8,\n\n /// `serde_json::Error`\n\n SerdeJson(serde_json::Error),\n\n /// some invariant was broken\n\n LogicError(String),\n\n /// `json5::Error`\n\n Json5(json5::Error),\n", "file_path": "couchbase-lite/src/error.rs", "rank": 36, "score": 29358.360629993942 }, { "content": " )\n\n }\n\n Error::Utf8 => write!(fmt, \"Utf8 encoding/decoding error\"),\n\n Error::Json5(err) => write!(fmt, \"Json5: {}\", err),\n\n Error::LogicError(msg) => write!(fmt, \"LogicError: {}\", msg),\n\n Error::SerdeJson(err) => write!(fmt, \"SerdeJson: {}\", err),\n\n Error::FlError(err) => write!(fmt, \"FlError: {}\", err),\n\n Error::NulError(err) => write!(fmt, \"NulError: {}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::DbError(err) => {\n\n let (msg, desc) = into_msg_desc(*err);\n\n write!(\n\n fmt,\n\n \"{:?} / {}: {}\",\n", "file_path": "couchbase-lite/src/error.rs", "rank": 37, "score": 29354.937601091933 }, { "content": " *err,\n\n desc.as_utf8_lossy(),\n\n msg.as_utf8_lossy()\n\n )\n\n }\n\n Error::Utf8 => write!(fmt, \"Utf8 encoding/decoding error\"),\n\n Error::Json5(err) => write!(fmt, \"Json5: {:?}\", err),\n\n Error::LogicError(msg) => write!(fmt, \"LogicError: {}\", msg),\n\n Error::SerdeJson(err) => write!(fmt, \"SerdeJson: {:?}\", err),\n\n Error::FlError(err) => write!(fmt, \"FlError: {}\", err),\n\n Error::NulError(err) => write!(fmt, \"NulError: {:?}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<C4Error> for Error {\n\n fn from(err: C4Error) -> Self {\n\n Error::DbError(err)\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite/src/error.rs", "rank": 38, "score": 29353.813162351285 }, { "content": " unsafe { fl_slice_to_str_unchecked(self.inner.docID) }\n\n }\n\n #[inline]\n\n pub fn revision_id(&self) -> &str {\n\n unsafe { fl_slice_to_str_unchecked(self.inner.revID) }\n\n }\n\n #[inline]\n\n pub fn body_size(&self) -> u32 {\n\n self.inner.bodySize\n\n }\n\n}\n\n\n\nimpl Drop for DbChange {\n\n fn drop(&mut self) {\n\n unsafe { c4dbobs_releaseChanges(&mut self.inner, 1) };\n\n }\n\n}\n\n\n\nimpl<'obs> Iterator for DbChangesIter<'obs> {\n\n type Item = DbChange;\n", "file_path": "couchbase-lite/src/observer.rs", "rank": 48, "score": 24.341825710858224 }, { "content": " fn drop(&mut self) {\n\n unsafe { c4enum_free(self.inner.as_ptr()) };\n\n }\n\n}\n\n\n\nimpl<'a> DocEnumerator<'a> {\n\n pub(crate) fn enumerate_all_docs(\n\n db: &'a Database,\n\n flags: DocEnumeratorFlags,\n\n ) -> Result<DocEnumerator<'a>> {\n\n let mut c4err = c4error_init();\n\n let opts = C4EnumeratorOptions { flags: flags.bits };\n\n let enum_ptr = unsafe { c4db_enumerateAllDocs(db.inner.0.as_ptr(), &opts, &mut c4err) };\n\n NonNull::new(enum_ptr)\n\n .map(|inner| DocEnumerator {\n\n _db: db,\n\n inner,\n\n reach_end: false,\n\n })\n\n .ok_or_else(|| c4err.into())\n", "file_path": "couchbase-lite/src/doc_enumerator.rs", "rank": 49, "score": 24.09421511639319 }, { "content": " }\n\n\n\n pub(crate) fn start(&mut self) {\n\n unsafe { c4repl_start(self.inner.as_ptr(), false) };\n\n }\n\n\n\n pub(crate) fn restart(self, db: &Database, url: &str, token: Option<&str>) -> Result<Self> {\n\n let Replicator {\n\n inner: prev_inner,\n\n free_callback_f,\n\n boxed_callback_f,\n\n c_callback_on_status_changed,\n\n } = self;\n\n mem::forget(self);\n\n unsafe {\n\n c4repl_stop(prev_inner.as_ptr());\n\n c4repl_free(prev_inner.as_ptr());\n\n }\n\n let mut repl = Replicator::do_new(\n\n db,\n", "file_path": "couchbase-lite/src/replicator.rs", "rank": 50, "score": 24.04511922504631 }, { "content": "use crate::{\n\n error::Error,\n\n ffi::{\n\n c4dbobs_create, c4dbobs_free, c4dbobs_getChanges, c4dbobs_releaseChanges, C4DatabaseChange,\n\n C4DatabaseObserver,\n\n },\n\n fl_slice::fl_slice_to_str_unchecked,\n\n Database, Result,\n\n};\n\nuse log::error;\n\nuse std::{mem::MaybeUninit, os::raw::c_void, panic::catch_unwind, process::abort, ptr::NonNull};\n\n\n\npub(crate) struct DatabaseObserver {\n\n inner: NonNull<C4DatabaseObserver>,\n\n free_callback_f: unsafe extern \"C\" fn(_: *mut c_void),\n\n boxed_callback_f: NonNull<c_void>,\n\n}\n\n\n\nimpl Drop for DatabaseObserver {\n\n fn drop(&mut self) {\n", "file_path": "couchbase-lite/src/observer.rs", "rank": 51, "score": 23.954848394113313 }, { "content": "use crate::{\n\n document::{C4DocumentOwner, Document},\n\n error::{c4error_init, Error},\n\n ffi::{\n\n c4db_beginTransaction, c4db_endTransaction, c4doc_create, c4doc_update, kC4ErrorConflict,\n\n kC4ErrorNotFound, kRevDeleted, C4Document, C4RevisionFlags, LiteCoreDomain,\n\n },\n\n fl_slice::{AsFlSlice, FlSliceOwner},\n\n Database, Result,\n\n};\n\nuse std::{ops::Deref, ptr::NonNull};\n\n\n\npub struct Transaction<'db> {\n\n db: &'db Database,\n\n finished: bool,\n\n}\n\n\n\nimpl Transaction<'_> {\n\n pub(crate) fn new(db: &mut Database) -> Result<Transaction> {\n\n let mut c4err = c4error_init();\n", "file_path": "couchbase-lite/src/transaction.rs", "rank": 52, "score": 22.82103620234121 }, { "content": "\n\nimpl Database {\n\n pub fn open(path: &Path, cfg: DatabaseConfig) -> Result<Database> {\n\n Lazy::force(&DB_LOGGER);\n\n let mut error = c4error_init();\n\n let os_path_utf8 = path.to_str().ok_or(Error::Utf8)?;\n\n let os_path_utf8: C4String = os_path_utf8.as_flslice();\n\n let db_ptr = unsafe { c4db_open(os_path_utf8, &cfg.inner, &mut error) };\n\n NonNull::new(db_ptr)\n\n .map(|inner| Database {\n\n inner: DbInner(inner),\n\n db_events: Arc::new(Mutex::new(HashSet::new())),\n\n db_observers: vec![],\n\n db_replicator: None,\n\n replicator_params: None,\n\n })\n\n .ok_or_else(|| error.into())\n\n }\n\n\n\n pub(crate) fn internal_get(&self, doc_id: &str, must_exists: bool) -> Result<C4DocumentOwner> {\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 53, "score": 22.18575760924454 }, { "content": " if unsafe { c4db_beginTransaction(db.inner.0.as_ptr(), &mut c4err) } {\n\n Ok(Transaction {\n\n db,\n\n finished: false,\n\n })\n\n } else {\n\n Err(c4err.into())\n\n }\n\n }\n\n\n\n pub fn commit(mut self) -> Result<()> {\n\n self.end_transaction(true)\n\n }\n\n\n\n fn end_transaction(&mut self, commit: bool) -> Result<()> {\n\n self.finished = true;\n\n let mut c4err = c4error_init();\n\n if unsafe { c4db_endTransaction(self.db.inner.0.as_ptr(), commit, &mut c4err) } {\n\n Ok(())\n\n } else {\n", "file_path": "couchbase-lite/src/transaction.rs", "rank": 54, "score": 21.353903191196398 }, { "content": " Err(c4err.into())\n\n }\n\n }\n\n pub fn compact(&self) -> Result<bool> {\n\n let mut c4err = c4error_init();\n\n let ok = unsafe { c4db_compact(self.inner.0.as_ptr(), &mut c4err) };\n\n if !ok {\n\n return Err(c4err.into());\n\n }\n\n Ok(ok)\n\n }\n\n}\n\n\n\npub struct ObserverdChangesIter<'db> {\n\n db: &'db Database,\n\n obs_it: Option<DbChangesIter<'db>>,\n\n}\n\n\n\nimpl<'db> Iterator for ObserverdChangesIter<'db> {\n\n type Item = DbChange;\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 55, "score": 21.275414858570887 }, { "content": " pub(crate) fn rev_id(&self) -> &str {\n\n unsafe { fl_slice_to_str_unchecked(self.0.as_ref().selectedRev.revID) }\n\n }\n\n pub(crate) fn select_parent(&mut self) -> bool {\n\n unsafe { c4doc_selectParentRevision(self.0.as_mut()) }\n\n }\n\n}\n\n\n\nimpl Drop for C4DocumentOwner {\n\n fn drop(&mut self) {\n\n unsafe { c4doc_release(self.0.as_ptr()) };\n\n }\n\n}\n", "file_path": "couchbase-lite/src/document.rs", "rank": 56, "score": 21.221429560759532 }, { "content": "mod c4_header;\n\n\n\npub use c4_header::*;\n\n\n\nuse std::os::raw::c_void;\n\n\n\n//bindgen can not handle inline functions so\n\n\n\n#[inline]\n\npub unsafe fn c4db_release(db: *mut C4Database) {\n\n c4base_release(db as *mut c_void)\n\n}\n\n\n\n#[inline]\n\n#[allow(non_snake_case)]\n\npub unsafe fn FLSliceResult_Release(s: FLSliceResult) {\n\n _FLBuf_Release(s.buf);\n\n}\n\n\n\n#[inline]\n\npub unsafe fn c4query_release(r: *mut C4Query) {\n\n c4base_release(r as *mut c_void)\n\n}\n", "file_path": "couchbase-lite-core-sys/src/lib.rs", "rank": 57, "score": 20.959992822048726 }, { "content": "\n\n pub(crate) fn replace_c4doc(&mut self, doc: Option<C4DocumentOwner>) {\n\n self.inner = doc;\n\n }\n\n\n\n pub fn decode_data<T: DeserializeOwned>(self) -> Result<T> {\n\n if let Some(ref json) = self.unsaved_json5_body {\n\n let x: T = json5::from_str(&json)?;\n\n return Ok(x);\n\n }\n\n let inner: &C4DocumentOwner = self.inner.as_ref().ok_or_else(|| {\n\n Error::LogicError(format!(\n\n \"Document {} have no underlying C4Document\",\n\n self.id\n\n ))\n\n })?;\n\n load_body(inner.0)?;\n\n let mut c4err = c4error_init();\n\n let body = unsafe { c4doc_bodyAsJSON(inner.0.as_ptr(), true, &mut c4err) };\n\n if body.buf.is_null() {\n", "file_path": "couchbase-lite/src/document.rs", "rank": 58, "score": 20.788766623082026 }, { "content": "#[repr(transparent)]\n\npub(crate) struct FlSliceOwner(FLSliceResult);\n\n\n\nimpl FlSliceOwner {\n\n #[inline]\n\n pub fn as_bytes(&self) -> &[u8] {\n\n unsafe { slice::from_raw_parts(self.0.buf as *const u8, self.0.size) }\n\n }\n\n #[inline]\n\n pub fn as_utf8_lossy(&self) -> Cow<str> {\n\n String::from_utf8_lossy(self.as_bytes())\n\n }\n\n}\n\n\n\nimpl Drop for FlSliceOwner {\n\n fn drop(&mut self) {\n\n unsafe { FLSliceResult_Release(self.0) };\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite/src/fl_slice.rs", "rank": 59, "score": 20.675230671332326 }, { "content": " drop(Box::from_raw(p as *mut T));\n\n}\n\n\n\npub(crate) struct DbChangesIter<'obs> {\n\n obs: &'obs DatabaseObserver,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DbChange {\n\n inner: C4DatabaseChange,\n\n external: bool,\n\n}\n\n\n\nimpl DbChange {\n\n #[inline]\n\n pub fn external(&self) -> bool {\n\n self.external\n\n }\n\n #[inline]\n\n pub fn doc_id(&self) -> &str {\n", "file_path": "couchbase-lite/src/observer.rs", "rank": 60, "score": 20.41164883020633 }, { "content": " }\n\n\n\n pub fn get_doc(&self) -> Result<Document> {\n\n let mut c4err = c4error_init();\n\n let doc_ptr = unsafe { c4enum_getDocument(self.inner.as_ptr(), &mut c4err) };\n\n let c4doc: C4DocumentOwner =\n\n NonNull::new(doc_ptr).map(C4DocumentOwner).ok_or_else(|| {\n\n let err: Error = c4err.into();\n\n err\n\n })?;\n\n let id: String = c4doc.id().into();\n\n Ok(Document::new_internal(c4doc, id))\n\n }\n\n}\n\n\n\nimpl<'en> FallibleStreamingIterator for DocEnumerator<'en> {\n\n type Error = crate::error::Error;\n\n type Item = DocEnumerator<'en>;\n\n\n\n fn advance(&mut self) -> Result<()> {\n", "file_path": "couchbase-lite/src/doc_enumerator.rs", "rank": 61, "score": 20.24521463471984 }, { "content": "use crate::{\n\n document::{C4DocumentOwner, Document},\n\n error::{c4error_init, Error},\n\n ffi::{\n\n c4db_enumerateAllDocs, c4enum_free, c4enum_getDocument, c4enum_next, C4DocEnumerator,\n\n C4EnumeratorOptions,\n\n },\n\n Database, Result,\n\n};\n\nuse bitflags::bitflags;\n\nuse fallible_streaming_iterator::FallibleStreamingIterator;\n\nuse std::ptr::NonNull;\n\n\n\npub struct DocEnumerator<'a> {\n\n _db: &'a Database,\n\n reach_end: bool,\n\n inner: NonNull<C4DocEnumerator>,\n\n}\n\n\n\nimpl Drop for DocEnumerator<'_> {\n", "file_path": "couchbase-lite/src/doc_enumerator.rs", "rank": 62, "score": 20.043879872429613 }, { "content": "\n\npub use crate::{\n\n doc_enumerator::{DocEnumerator, DocEnumeratorFlags},\n\n document::Document,\n\n error::Error,\n\n query::Query,\n\n query::Enumerator,\n\n replicator::ReplicatorState,\n\n value::ValueRef,\n\n};\n\npub use couchbase_lite_core_sys as ffi;\n\npub use fallible_streaming_iterator;\n\n#[cfg(feature = \"replication\")]\n\npub use repl_transport::use_web_sockets;\n\n\n\nuse crate::{\n\n document::C4DocumentOwner,\n\n error::{c4error_init, Result},\n\n ffi::{\n\n c4db_createIndex, c4db_getDocumentCount, c4db_getIndexes, c4db_open, c4db_release,\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 63, "score": 19.69949967373655 }, { "content": "\n\n pub(crate) fn encode(&self, db: &Database) -> Result<FlSliceOwner> {\n\n if let Some(json5) = self.unsaved_json5_body.as_ref() {\n\n let mut c4err = c4error_init();\n\n let encoded = unsafe {\n\n c4db_encodeJSON(\n\n db.inner.0.as_ptr(),\n\n json5.as_bytes().as_flslice(),\n\n &mut c4err,\n\n )\n\n };\n\n if !encoded.buf.is_null() {\n\n Ok(encoded.into())\n\n } else {\n\n return Err(c4err.into());\n\n }\n\n } else {\n\n Ok(FlSliceOwner::default())\n\n }\n\n }\n", "file_path": "couchbase-lite/src/document.rs", "rank": 64, "score": 19.513479009915045 }, { "content": " unsafe {\n\n c4dbobs_free(self.inner.as_ptr());\n\n (self.free_callback_f)(self.boxed_callback_f.as_ptr());\n\n }\n\n }\n\n}\n\n\n\nimpl DatabaseObserver {\n\n pub(crate) fn new<F>(db: &Database, callback_f: F) -> Result<DatabaseObserver>\n\n where\n\n F: FnMut(*const C4DatabaseObserver) + Send + 'static,\n\n {\n\n unsafe extern \"C\" fn call_boxed_closure<F>(\n\n obs: *mut C4DatabaseObserver,\n\n context: *mut c_void,\n\n ) where\n\n F: FnMut(*const C4DatabaseObserver) + Send,\n\n {\n\n let r = catch_unwind(|| {\n\n let boxed_f = context as *mut F;\n", "file_path": "couchbase-lite/src/observer.rs", "rank": 65, "score": 19.495169218068302 }, { "content": " /// Returns the names of all indexes in the database\n\n pub fn get_indexes(&self) -> Result<impl FallibleStreamingIterator<Item = str, Error = Error>> {\n\n let mut c4err = c4error_init();\n\n let enc_data = unsafe { c4db_getIndexes(self.inner.0.as_ptr(), &mut c4err) };\n\n if enc_data.buf.is_null() {\n\n return Err(c4err.into());\n\n }\n\n\n\n let enc_data: FlSliceOwner = enc_data.into();\n\n let indexes_list = DbIndexesListIterator::new(enc_data)?;\n\n Ok(indexes_list)\n\n }\n\n\n\n /// Creates a database index, of the values of specific expressions across\n\n /// all documents. The name is used to identify the index for later updating\n\n /// or deletion; if an index with the same name already exists, it will be\n\n /// replaced unless it has the exact same expressions.\n\n /// Note: If some documents are missing the values to be indexed,\n\n /// those documents will just be omitted from the index. It's not an error.\n\n pub fn create_index(\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 66, "score": 19.089603550166164 }, { "content": " pub(crate) fn new<F>(\n\n db: &Database,\n\n url: &str,\n\n token: Option<&str>,\n\n state_changed_callback: F,\n\n ) -> Result<Self>\n\n where\n\n F: FnMut(C4ReplicatorStatus) + Send + 'static,\n\n {\n\n unsafe extern \"C\" fn call_on_status_changed<F>(\n\n c4_repl: *mut C4Replicator,\n\n status: C4ReplicatorStatus,\n\n ctx: *mut c_void,\n\n ) where\n\n F: FnMut(C4ReplicatorStatus) + Send,\n\n {\n\n info!(\"on_status_changed: repl {:?}, status {:?}\", c4_repl, status);\n\n let r = catch_unwind(|| {\n\n let boxed_f = ctx as *mut F;\n\n assert!(\n", "file_path": "couchbase-lite/src/replicator.rs", "rank": 67, "score": 18.69743020328025 }, { "content": " .map(|inner| DatabaseObserver {\n\n inner,\n\n free_callback_f: free_boxed_value::<F>,\n\n boxed_callback_f: unsafe { NonNull::new_unchecked(boxed_f as *mut c_void) },\n\n })\n\n .ok_or_else(|| {\n\n unsafe { free_boxed_value::<F>(boxed_f as *mut c_void) };\n\n Error::LogicError(format!(\"c4dbobs_create return null\"))\n\n })\n\n }\n\n\n\n pub(crate) fn match_obs_ptr(&self, obs_ptr: usize) -> bool {\n\n self.inner.as_ptr() as usize == obs_ptr\n\n }\n\n pub(crate) fn changes_iter(&self) -> DbChangesIter {\n\n DbChangesIter { obs: self }\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn free_boxed_value<T>(p: *mut c_void) {\n", "file_path": "couchbase-lite/src/observer.rs", "rank": 68, "score": 18.12895708522577 }, { "content": "impl Document {\n\n pub(crate) fn new_internal<S>(inner: C4DocumentOwner, doc_id: S) -> Self\n\n where\n\n S: Into<String>,\n\n {\n\n Self {\n\n inner: Some(inner),\n\n id: doc_id.into(),\n\n unsaved_json5_body: None,\n\n }\n\n }\n\n\n\n /// return the document's ID\n\n pub fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n pub(crate) fn is_empty(&self) -> bool {\n\n self.unsaved_json5_body.is_none()\n\n }\n", "file_path": "couchbase-lite/src/document.rs", "rank": 69, "score": 17.600178660086318 }, { "content": "use crate::ffi::{FLSlice, FLSliceResult, FLSliceResult_Release};\n\nuse std::{borrow::Cow, os::raw::c_void, ptr, slice, str};\n\n\n\npub(crate) trait AsFlSlice {\n\n fn as_flslice(&self) -> FLSlice;\n\n}\n\n\n\nimpl<'a> AsFlSlice for &'a [u8] {\n\n fn as_flslice(&self) -> FLSlice {\n\n FLSlice {\n\n buf: if !self.is_empty() {\n\n self.as_ptr() as *const c_void\n\n } else {\n\n ptr::null()\n\n },\n\n size: self.len(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite/src/fl_slice.rs", "rank": 70, "score": 16.80647151927693 }, { "content": " pub fn query(&self, query_json: &str) -> Result<Query> {\n\n Query::new(self, query_json)\n\n }\n\n /// Compiles a query from an expression given as N1QL.\n\n pub fn n1ql_query(&self, query: &str) -> Result<Query> {\n\n Query::new_n1ql(self, query)\n\n }\n\n\n\n /// Creates an enumerator ordered by docID.\n\n pub fn enumerate_all_docs(&self, flags: DocEnumeratorFlags) -> Result<DocEnumerator> {\n\n DocEnumerator::enumerate_all_docs(self, flags)\n\n }\n\n\n\n /// Register a database observer, with a callback that will be invoked after the database\n\n /// changes. The callback will be called _once_, after the first change. After that it won't\n\n /// be called again until all of the changes have been read by calling `Database::observed_changes`.\n\n pub fn register_observer<F>(&mut self, mut callback_f: F) -> Result<()>\n\n where\n\n F: FnMut() + Send + 'static,\n\n {\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 71, "score": 16.800526858596037 }, { "content": " let mut c4err = c4error_init();\n\n let doc_ptr = unsafe {\n\n c4doc_get(\n\n self.inner.0.as_ptr(),\n\n doc_id.as_bytes().as_flslice(),\n\n must_exists,\n\n &mut c4err,\n\n )\n\n };\n\n NonNull::new(doc_ptr)\n\n .map(C4DocumentOwner)\n\n .ok_or_else(|| c4err.into())\n\n }\n\n\n\n /// Begin a new transaction, the transaction defaults to rolling back\n\n /// when it is dropped. If you want the transaction to commit,\n\n /// you must call `Transaction::commit`\n\n pub fn transaction(&mut self) -> Result<Transaction> {\n\n Transaction::new(self)\n\n }\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 72, "score": 16.150608821511153 }, { "content": " Ok(Self {\n\n _enc_data: enc_data,\n\n array,\n\n next_idx: 0,\n\n cur_val: None,\n\n })\n\n }\n\n}\n\n\n\nimpl FallibleStreamingIterator for DbIndexesListIterator {\n\n type Error = Error;\n\n type Item = str;\n\n\n\n fn advance(&mut self) -> Result<()> {\n\n if self.next_idx < self.array.len() {\n\n let val = unsafe { self.array.get_raw(self.next_idx) };\n\n let val_type = unsafe { FLValue_GetType(val) };\n\n if val_type != FLValueType::kFLString {\n\n return Err(Error::LogicError(format!(\n\n \"Wrong index type, expect String, got {:?}\",\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 73, "score": 16.128722559171937 }, { "content": "use crate::{\n\n error::{c4error_init, Error},\n\n ffi::{\n\n c4db_encodeJSON, c4doc_bodyAsJSON, c4doc_loadRevisionBody, c4doc_release, kDocExists,\n\n kRevDeleted, C4Document, C4DocumentFlags, C4RevisionFlags, c4doc_selectParentRevision\n\n },\n\n fl_slice::{fl_slice_to_str_unchecked, AsFlSlice, FlSliceOwner},\n\n Database, Result,\n\n};\n\nuse serde::{de::DeserializeOwned, Serialize};\n\nuse std::{fmt, fmt::Debug, ptr::NonNull};\n\nuse uuid::Uuid;\n\n\n\n#[derive(Debug)]\n\npub struct Document {\n\n id: String,\n\n unsaved_json5_body: Option<String>,\n\n pub(crate) inner: Option<C4DocumentOwner>,\n\n}\n\n\n", "file_path": "couchbase-lite/src/document.rs", "rank": 74, "score": 15.994082273750625 }, { "content": " \"LIMIT\": 2, \"OFFSET\": 1\n\n}\n\n\"#,\n\n )\n\n .unwrap()\n\n );\n\n }\n\n tmp_dir.close().expect(\"Can not close tmp_dir\");\n\n\n\n fn query_data(db: &Database, query: &str) -> Result<Vec<String>, couchbase_lite::Error> {\n\n let query = db.query(query)?;\n\n let mut iter = query.run()?;\n\n let mut query_ret = Vec::with_capacity(10);\n\n while let Some(item) = iter.next()? {\n\n let val = item.get_raw_checked(0)?;\n\n let val = val.as_str()?;\n\n query_ret.push(val.to_string());\n\n }\n\n query_ret.sort();\n\n Ok(query_ret)\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite/tests/smoke_tests.rs", "rank": 75, "score": 15.590645650831977 }, { "content": "}\n\n\n\nimpl Deref for Transaction<'_> {\n\n type Target = Database;\n\n\n\n fn deref(&self) -> &Database {\n\n self.db\n\n }\n\n}\n\n\n\nimpl Drop for Transaction<'_> {\n\n #[allow(unused_must_use)]\n\n fn drop(&mut self) {\n\n if !self.finished {\n\n self.end_transaction(false);\n\n }\n\n }\n\n}\n", "file_path": "couchbase-lite/src/transaction.rs", "rank": 76, "score": 15.345651073688632 }, { "content": "\n\n /// starts database replication\n\n pub fn start_replicator<F>(\n\n &mut self,\n\n url: &str,\n\n token: Option<&str>,\n\n mut repl_status_changed: F,\n\n ) -> Result<()>\n\n where\n\n F: FnMut(ReplicatorState) + Send + 'static,\n\n {\n\n let mut db_replicator =\n\n Replicator::new(\n\n self,\n\n url,\n\n token,\n\n move |status| match ReplicatorState::try_from(status) {\n\n Ok(state) => repl_status_changed(state),\n\n Err(err) => {\n\n error!(\"replicator status change: invalid status {}\", err);\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 77, "score": 15.311660449817603 }, { "content": " .map(|inner| Replicator {\n\n inner,\n\n free_callback_f,\n\n boxed_callback_f,\n\n c_callback_on_status_changed: call_on_status_changed,\n\n })\n\n .ok_or_else(|| {\n\n unsafe { free_callback_f(boxed_callback_f.as_ptr()) };\n\n c4err.into()\n\n })\n\n }\n\n\n\n pub(crate) fn stop(self) {\n\n unsafe { c4repl_stop(self.inner.as_ptr()) };\n\n }\n\n\n\n pub(crate) fn status(&self) -> C4ReplicatorStatus {\n\n unsafe { c4repl_getStatus(self.inner.as_ptr()) }\n\n }\n\n}\n", "file_path": "couchbase-lite/src/replicator.rs", "rank": 78, "score": 15.11434663873114 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl AsFlSlice for FLSliceResult {\n\n fn as_flslice(&self) -> FLSlice {\n\n FLSlice {\n\n buf: self.buf,\n\n size: self.size,\n\n }\n\n }\n\n}\n\n\n\n#[inline]\n\npub(crate) unsafe fn fl_slice_to_str_unchecked<'a>(s: FLSlice) -> &'a str {\n\n let bytes: &[u8] = slice::from_raw_parts(s.buf as *const u8, s.size);\n\n str::from_utf8_unchecked(bytes)\n\n}\n\n\n\n#[inline]\n\npub(crate) unsafe fn fl_slice_to_slice<'a>(s: FLSlice) -> &'a [u8] {\n\n slice::from_raw_parts(s.buf as *const u8, s.size)\n\n}\n", "file_path": "couchbase-lite/src/fl_slice.rs", "rank": 79, "score": 15.063015305572225 }, { "content": " Err(c4err.into())\n\n }\n\n }\n\n\n\n pub fn save(&mut self, doc: &mut Document) -> Result<()> {\n\n self.main_save(doc, false)\n\n }\n\n\n\n pub fn delete(&mut self, doc: &mut Document) -> Result<()> {\n\n self.main_save(doc, true)\n\n }\n\n\n\n fn main_save(&mut self, doc: &mut Document, deletion: bool) -> Result<()> {\n\n if deletion && !doc.exists() {\n\n return Err(Error::LogicError(format!(\n\n \"Cannot delete a document that has not yet been saved, doc_id {}\",\n\n doc.id()\n\n )));\n\n }\n\n let mut new_doc = match self.internal_save(doc, None, deletion) {\n", "file_path": "couchbase-lite/src/transaction.rs", "rank": 80, "score": 14.555596579855305 }, { "content": " index_type,\n\n &opts,\n\n &mut c4err,\n\n )\n\n }\n\n } else {\n\n unsafe {\n\n c4db_createIndex(\n\n self.inner.0.as_ptr(),\n\n index_name.as_flslice(),\n\n expression_json.as_flslice(),\n\n index_type,\n\n ptr::null(),\n\n &mut c4err,\n\n )\n\n }\n\n };\n\n if result {\n\n Ok(())\n\n } else {\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 81, "score": 14.26611650516998 }, { "content": "\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let mut item = MaybeUninit::<C4DatabaseChange>::uninit();\n\n let mut out_external = false;\n\n let n = unsafe {\n\n c4dbobs_getChanges(\n\n self.obs.inner.as_ptr(),\n\n item.as_mut_ptr(),\n\n 1,\n\n &mut out_external,\n\n )\n\n };\n\n if n > 0 {\n\n let item = unsafe { item.assume_init() };\n\n Some(DbChange {\n\n inner: item,\n\n external: out_external,\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "couchbase-lite/src/observer.rs", "rank": 82, "score": 13.950939590075851 }, { "content": " path::Path,\n\n ptr,\n\n ptr::NonNull,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\n/// Database configuration, used during open\n\npub struct DatabaseConfig {\n\n inner: C4DatabaseConfig,\n\n}\n\n\n\nimpl Default for DatabaseConfig {\n\n fn default() -> Self {\n\n Self {\n\n inner: C4DatabaseConfig {\n\n flags: kC4DB_Create as C4DatabaseFlags,\n\n storageEngine: unsafe { kC4SQLiteStorageEngine },\n\n versioning: kC4RevisionTrees as C4DocumentVersioning,\n\n encryptionKey: C4EncryptionKey {\n\n algorithm: kC4EncryptionNone as C4EncryptionAlgorithm,\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 83, "score": 13.711899008979598 }, { "content": " rev_flags,\n\n &mut c4err,\n\n )\n\n }\n\n } else {\n\n unsafe {\n\n c4doc_create(\n\n self.db.inner.0.as_ptr(),\n\n doc.id().as_bytes().as_flslice(),\n\n body.as_bytes().as_flslice(),\n\n rev_flags,\n\n &mut c4err,\n\n )\n\n }\n\n };\n\n\n\n NonNull::new(new_doc)\n\n .map(C4DocumentOwner)\n\n .ok_or_else(|| c4err.into())\n\n }\n", "file_path": "couchbase-lite/src/transaction.rs", "rank": 84, "score": 13.687636719082906 }, { "content": " }\n\n },\n\n )?;\n\n db_replicator.start();\n\n self.db_replicator = Some(db_replicator);\n\n self.replicator_params = Some(ReplicatorParams {\n\n url: url.into(),\n\n token: token.map(str::to_string),\n\n });\n\n Ok(())\n\n }\n\n /// restart database replicator, gives error if `Database::start_replicator`\n\n /// haven't called yet\n\n pub fn restart_replicator(&mut self) -> Result<()> {\n\n let replicator_params = self.replicator_params.as_ref().ok_or_else(|| {\n\n Error::LogicError(\n\n \"you call restart_replicator, but have not yet call start_replicator (params)\"\n\n .into(),\n\n )\n\n })?;\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 85, "score": 13.6812892464024 }, { "content": " pub fn update_data<T>(&mut self, data: &T) -> Result<()>\n\n where\n\n T: Serialize,\n\n {\n\n let body = json5::to_string(data)?;\n\n self.unsaved_json5_body = Some(body);\n\n Ok(())\n\n }\n\n\n\n pub fn exists(&self) -> bool {\n\n self.inner.as_ref().map(|x| x.exists()).unwrap_or(false)\n\n }\n\n pub fn is_deleted(&self) -> bool {\n\n self.inner.as_ref().map(|x| x.is_deleted()).unwrap_or(false)\n\n }\n\n pub fn rev_id(&self) -> &str {\n\n self.inner.as_ref().map(|x| x.rev_id()).unwrap_or(\"N/A\")\n\n }\n\n pub fn select_parent(&mut self) -> bool {\n\n self.inner.as_mut().map(|x| x.select_parent()).unwrap_or(false)\n\n }\n\n}\n\n\n", "file_path": "couchbase-lite/src/document.rs", "rank": 86, "score": 13.415274517377041 }, { "content": " c_callback_on_status_changed: C4ReplicatorStatusChangedCallback,\n\n free_callback_f: unsafe fn(_: *mut c_void),\n\n boxed_callback_f: NonNull<c_void>,\n\n}\n\n\n\n/// it should be safe to call replicator API from any thread\n\n/// according to https://github.com/couchbase/couchbase-lite-core/wiki/Thread-Safety\n\nunsafe impl Send for Replicator {}\n\n\n\nimpl Drop for Replicator {\n\n fn drop(&mut self) {\n\n unsafe {\n\n c4repl_free(self.inner.as_ptr());\n\n (self.free_callback_f)(self.boxed_callback_f.as_ptr());\n\n }\n\n }\n\n}\n\n\n\nimpl Replicator {\n\n /// For example: url \"ws://192.168.1.132:4984/demo/\"\n", "file_path": "couchbase-lite/src/replicator.rs", "rank": 87, "score": 13.207032688386546 }, { "content": " url,\n\n token,\n\n free_callback_f,\n\n boxed_callback_f,\n\n c_callback_on_status_changed,\n\n )?;\n\n repl.start();\n\n Ok(repl)\n\n }\n\n\n\n fn do_new(\n\n db: &Database,\n\n url: &str,\n\n token: Option<&str>,\n\n free_callback_f: unsafe fn(_: *mut c_void),\n\n boxed_callback_f: NonNull<c_void>,\n\n call_on_status_changed: C4ReplicatorStatusChangedCallback,\n\n ) -> Result<Self> {\n\n let mut remote_addr = C4Address {\n\n scheme: fl_slice_empty(),\n", "file_path": "couchbase-lite/src/replicator.rs", "rank": 88, "score": 13.119030251836104 }, { "content": " return Err(c4err.into());\n\n }\n\n let body: FlSliceOwner = body.into();\n\n let x: T = serde_json::from_slice(body.as_bytes())?;\n\n Ok(x)\n\n }\n\n\n\n pub fn new_with_id<S, T>(doc_id: S, data: &T) -> Result<Self>\n\n where\n\n S: Into<String>,\n\n T: Serialize,\n\n {\n\n Ok(Self {\n\n inner: None,\n\n id: doc_id.into(),\n\n unsaved_json5_body: Some(json5::to_string(data)?),\n\n })\n\n }\n\n\n\n pub fn new<T>(data: &T) -> Result<Self>\n", "file_path": "couchbase-lite/src/document.rs", "rank": 89, "score": 13.114229640408583 }, { "content": " bytes: [0; 32],\n\n },\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl DatabaseConfig {\n\n pub fn with_auto_compact() -> Self {\n\n Self {\n\n inner: C4DatabaseConfig {\n\n flags: kC4DB_Create as C4DatabaseFlags | kC4DB_AutoCompact as C4DatabaseFlags,\n\n storageEngine: unsafe { kC4SQLiteStorageEngine },\n\n versioning: kC4RevisionTrees as C4DocumentVersioning,\n\n encryptionKey: C4EncryptionKey {\n\n algorithm: kC4EncryptionNone as C4EncryptionAlgorithm,\n\n bytes: [0; 32],\n\n },\n\n },\n\n }\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 90, "score": 12.901620244435566 }, { "content": " /// Returns the number of (undeleted) documents in the database\n\n pub fn document_count(&self) -> u64 {\n\n unsafe { c4db_getDocumentCount(self.inner.0.as_ptr()) }\n\n }\n\n\n\n /// Return existing document from database\n\n pub fn get_existing(&self, doc_id: &str) -> Result<Document> {\n\n self.internal_get(doc_id, true)\n\n .map(|x| Document::new_internal(x, doc_id))\n\n }\n\n\n\n /// Return existing document from database or createa new one\n\n pub fn get_or_create(&self, doc_id: &str) -> Result<Document> {\n\n self.internal_get(doc_id, false)\n\n .map(|x| Document::new_internal(x, doc_id))\n\n }\n\n\n\n /// Compiles a query from an expression given as JSON.\n\n /// The expression is a predicate that describes which documents should be returned.\n\n /// A separate, optional sort expression describes the ordering of the results.\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 91, "score": 12.720204394002986 }, { "content": " where\n\n T: Serialize,\n\n {\n\n Ok(Self {\n\n inner: None,\n\n id: Uuid::new_v4().to_hyphenated().to_string(),\n\n unsaved_json5_body: Some(json5::to_string(data)?),\n\n })\n\n }\n\n\n\n pub fn new_with_id_json5<S: Into<String>>(doc_id: S, json5_str: String) -> Result<Self> {\n\n Ok(Self {\n\n inner: None,\n\n id: doc_id.into(),\n\n unsaved_json5_body: Some(json5_str),\n\n })\n\n }\n\n\n\n /// Update internal buffer with data, you need save document\n\n /// to database to make this change permanent\n", "file_path": "couchbase-lite/src/document.rs", "rank": 92, "score": 12.684432837287133 }, { "content": " pushFilter: None,\n\n validationFunc: None,\n\n onStatusChanged: call_on_status_changed,\n\n onDocumentsEnded: None,\n\n onBlobProgress: None,\n\n callbackContext: boxed_callback_f.as_ptr() as *mut c_void,\n\n socketFactory: ptr::null_mut(),\n\n };\n\n\n\n let mut c4err = c4error_init();\n\n let repl = unsafe {\n\n c4repl_new(\n\n db.inner.0.as_ptr(),\n\n remote_addr,\n\n db_name,\n\n repl_params,\n\n &mut c4err,\n\n )\n\n };\n\n NonNull::new(repl)\n", "file_path": "couchbase-lite/src/replicator.rs", "rank": 93, "score": 12.598846493569049 }, { "content": " UnsignedInt(u64),\n\n Double(f64),\n\n String(&'a str),\n\n Array(ValueRefArray),\n\n Dict(ValueRefDict),\n\n}\n\n\n\nimpl ValueRef<'_> {\n\n pub fn as_str(&self) -> Result<&str> {\n\n FromValueRef::column_result(*self)\n\n }\n\n pub fn as_u64(&self) -> Result<u64> {\n\n FromValueRef::column_result(*self)\n\n }\n\n pub fn is_null(&self) -> bool {\n\n match self {\n\n ValueRef::Null => true,\n\n _ => false,\n\n }\n\n }\n", "file_path": "couchbase-lite/src/value.rs", "rank": 94, "score": 12.48012755652321 }, { "content": "//! couchbase-lite is an ergonomic wrapper for using couchbase-lite-core from Rust.\n\n//! ```rust\n\n//! # #[macro_use]\n\n//! # extern crate serde;\n\n//! # use serde::{Serialize, Deserialize};\n\n//! use couchbase_lite::{\n\n//! Database, DatabaseConfig, Document,\n\n//! fallible_streaming_iterator::FallibleStreamingIterator\n\n//! };\n\n//! use std::path::Path;\n\n//!\n\n//! #[derive(Serialize, Deserialize, Debug)]\n\n//! #[serde(tag = \"type\")]\n\n//! struct Message {\n\n//! msg: String,\n\n//! }\n\n//!\n\n//! fn main() -> Result<(), couchbase_lite::Error> {\n\n//! let mut db = Database::open(Path::new(\"a.cblite2\"), DatabaseConfig::default())?;\n\n//! {\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 95, "score": 12.447303357926243 }, { "content": " pub(crate) unsafe fn get_raw(&self, idx: u32) -> FLValue {\n\n FLArray_Get(self.0, idx)\n\n }\n\n pub fn get<'a>(&'a self, idx: u32) -> ValueRef<'a> {\n\n unsafe { self.get_raw(idx) }.into()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\n#[repr(transparent)]\n\npub struct ValueRefDict(FLDict);\n\n\n\nimpl ValueRefDict {\n\n pub fn len(&self) -> u32 {\n\n unsafe { FLDict_Count(self.0) }\n\n }\n\n pub fn is_empty(&self) -> bool {\n\n unsafe { FLDict_IsEmpty(self.0) }\n\n }\n\n pub fn decode<T>(self) -> Result<T> where T: DeserializeOwned {\n", "file_path": "couchbase-lite/src/value.rs", "rank": 96, "score": 12.402496186060084 }, { "content": "use crate::{\n\n error::Error,\n\n ffi::{\n\n FLArray, FLArray_Count, FLArray_Get, FLArray_IsEmpty, FLDict, FLDict_Count, FLDict_IsEmpty,\n\n FLValue, FLValueType, FLValue_AsArray, FLValue_AsBool, FLValue_AsDict, FLValue_AsDouble,\n\n FLValue_AsInt, FLValue_AsString, FLValue_AsUnsigned, FLValue_GetType, FLValue_IsDouble,\n\n FLValue_IsInteger, FLValue_IsUnsigned, FLValue_ToJSON\n\n },\n\n fl_slice::fl_slice_to_str_unchecked,\n\n Result,\n\n};\n\nuse std::convert::TryFrom;\n\nuse serde::de::DeserializeOwned;\n\nuse crate::fl_slice::FlSliceOwner;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum ValueRef<'a> {\n\n Null,\n\n Bool(bool),\n\n SignedInt(i64),\n", "file_path": "couchbase-lite/src/value.rs", "rank": 97, "score": 12.321325073383694 }, { "content": " let repl = self.db_replicator.take().ok_or_else(|| {\n\n Error::LogicError(\n\n \"you call restart_replicator, but have not yet call start_replicator (repl)\".into(),\n\n )\n\n })?;\n\n self.db_replicator = Some(repl.restart(\n\n self,\n\n &replicator_params.url,\n\n replicator_params.token.as_ref().map(String::as_str),\n\n )?);\n\n Ok(())\n\n }\n\n\n\n /// stop database replication\n\n pub fn stop_replicator(&mut self) {\n\n if let Some(repl) = self.db_replicator.take() {\n\n repl.stop();\n\n }\n\n }\n\n\n", "file_path": "couchbase-lite/src/lib.rs", "rank": 98, "score": 12.086320050180383 }, { "content": "#![allow(non_upper_case_globals)]\n\n\n\nuse crate::ffi::{\n\n c4log_setRustCallback, kC4DatabaseLog, kC4DefaultLog, kC4QueryLog, kC4SyncLog, kC4WebSocketLog,\n\n C4LogDomain, C4LogLevel,\n\n};\n\nuse once_cell::sync::Lazy;\n\nuse std::{ffi::CStr, os::raw::c_char};\n\n\n\nmacro_rules! define_log_level {\n\n ($const_name:ident) => {\n\n const $const_name: C4LogLevel = crate::ffi::$const_name as C4LogLevel;\n\n };\n\n}\n\n\n\ndefine_log_level!(kC4LogDebug);\n\ndefine_log_level!(kC4LogVerbose);\n\ndefine_log_level!(kC4LogInfo);\n\ndefine_log_level!(kC4LogWarning);\n\ndefine_log_level!(kC4LogError);\n", "file_path": "couchbase-lite/src/log_reroute.rs", "rank": 99, "score": 11.868205599424872 } ]
Rust
src/rust/iced-x86/src/formatter/fast/options.rs
darfink/iced
6371d812392a02bd9c37cbe4f19d2dcdf33aacd4
struct Flags1; impl Flags1 { const SPACE_AFTER_OPERAND_SEPARATOR: u32 = 0x0000_0001; const RIP_RELATIVE_ADDRESSES: u32 = 0x0000_0002; const USE_PSEUDO_OPS: u32 = 0x0000_0004; const SHOW_SYMBOL_ADDRESS: u32 = 0x0000_0008; const ALWAYS_SHOW_SEGMENT_REGISTER: u32 = 0x0000_0010; const ALWAYS_SHOW_MEMORY_SIZE: u32 = 0x0000_0020; const UPPERCASE_HEX: u32 = 0x0000_0040; const USE_HEX_PREFIX: u32 = 0x0000_0080; } #[derive(Debug, Clone, Eq, PartialEq, Hash)] #[allow(missing_copy_implementations)] pub struct FastFormatterOptions { options1: u32, } impl FastFormatterOptions { #[must_use] #[inline] pub(super) fn new() -> Self { Self { options1: Flags1::USE_PSEUDO_OPS | Flags1::UPPERCASE_HEX } } #[must_use] #[inline] pub fn space_after_operand_separator(&self) -> bool { (self.options1 & Flags1::SPACE_AFTER_OPERAND_SEPARATOR) != 0 } #[inline] pub fn set_space_after_operand_separator(&mut self, value: bool) { if value { self.options1 |= Flags1::SPACE_AFTER_OPERAND_SEPARATOR; } else { self.options1 &= !Flags1::SPACE_AFTER_OPERAND_SEPARATOR; } } #[must_use] #[inline] pub fn rip_relative_addresses(&self) -> bool { (self.options1 & Flags1::RIP_RELATIVE_ADDRESSES) != 0 } #[inline] pub fn set_rip_relative_addresses(&mut self, value: bool) { if value { self.options1 |= Flags1::RIP_RELATIVE_ADDRESSES; } else { self.options1 &= !Flags1::RIP_RELATIVE_ADDRESSES; } } #[must_use] #[inline] pub fn use_pseudo_ops(&self) -> bool { (self.options1 & Flags1::USE_PSEUDO_OPS) != 0 } #[inline] pub fn set_use_pseudo_ops(&mut self, value: bool) { if value { self.options1 |= Flags1::USE_PSEUDO_OPS; } else { self.options1 &= !Flags1::USE_PSEUDO_OPS; } } #[must_use] #[inline] pub fn show_symbol_address(&self) -> bool { (self.options1 & Flags1::SHOW_SYMBOL_ADDRESS) != 0 } #[inline] pub fn set_show_symbol_address(&mut self, value: bool) { if value { self.options1 |= Flags1::SHOW_SYMBOL_ADDRESS; } else { self.options1 &= !Flags1::SHOW_SYMBOL_ADDRESS; } } #[must_use] #[inline] pub fn always_show_segment_register(&self) -> bool { (self.options1 & Flags1::ALWAYS_SHOW_SEGMENT_REGISTER) != 0 } #[inline] pub fn set_always_show_segment_register(&mut self, value: bool) { if value { self.options1 |= Flags1::ALWAYS_SHOW_SEGMENT_REGISTER; } else { self.options1 &= !Flags1::ALWAYS_SHOW_SEGMENT_REGISTER; } } #[must_use] #[inline] pub fn always_show_memory_size(&self) -> bool { (self.options1 & Flags1::ALWAYS_SHOW_MEMORY_SIZE) != 0 } #[inline] pub fn set_always_show_memory_size(&mut self, value: bool) { if value { self.options1 |= Flags1::ALWAYS_SHOW_MEMORY_SIZE; } else { self.options1 &= !Flags1::ALWAYS_SHOW_MEMORY_SIZE; } } #[must_use] #[inline] pub fn uppercase_hex(&self) -> bool { (self.options1 & Flags1::UPPERCASE_HEX) != 0 } #[inline] pub fn set_uppercase_hex(&mut self, value: bool) { if value { self.options1 |= Flags1::UPPERCASE_HEX; } else { self.options1 &= !Flags1::UPPERCASE_HEX; } } #[must_use] #[inline] pub fn use_hex_prefix(&self) -> bool { (self.options1 & Flags1::USE_HEX_PREFIX) != 0 } #[inline] pub fn set_use_hex_prefix(&mut self, value: bool) { if value { self.options1 |= Flags1::USE_HEX_PREFIX; } else { self.options1 &= !Flags1::USE_HEX_PREFIX; } } }
struct Flags1; impl Flags1 { const SPACE_AFTER_OPERAND_SEPARATOR: u32 = 0x0000_0001; const RIP_RELATIVE_ADDRESSES: u32 = 0x0000_0002; const USE_PSEUDO_OPS: u32 = 0x0000_0004; const SHOW_SYMBOL_ADDRESS: u32 = 0x0000_0008; const ALWAYS_SHOW_SEGMENT_REGISTER: u32 = 0x0000_0010; const ALWAYS_SHOW_MEMORY_SIZE: u32 = 0x0000_0020; const UPPERCASE_HEX: u32 = 0x0000_0040; const USE_HEX_PREFIX: u32 = 0x0000_0080; } #[derive(Debug, Clone, Eq, PartialEq, Hash)] #[allow(missing_copy_implementations)] pub struct FastFormatterOptions { options1: u32, } impl FastFormatterOptions { #[must_use] #[inline] pub(super) fn new() -> Self { Self { options1: Flags1::USE_PSEUDO_OPS | Flags1::UPPERCASE_HEX } } #[must_use] #[inline] pub fn space_after_operand_separator(&self) -> bool { (self.options1 & Flags1::SPACE_AFTER_OPERAND_SEPARATOR) != 0 } #[inline] pub fn set_space_after_operand_separator(&mut self, value: bool) { if value { self.options1 |= Flags1::SPACE_AFTER_OPERAND_SEPARATOR; } else { self.options1 &= !Flags1::SPACE_AFTER_OPERAND_SEPARATOR; } } #[must_use] #[inline] pub fn rip_relative_addresses(&self) -> bool { (self.options1 & Flags1::RIP_RELATIVE_ADDRESSES) != 0 } #[inline] pub fn set_rip_relative_addresses(&mut self, value: bool) {
} #[must_use] #[inline] pub fn use_pseudo_ops(&self) -> bool { (self.options1 & Flags1::USE_PSEUDO_OPS) != 0 } #[inline] pub fn set_use_pseudo_ops(&mut self, value: bool) { if value { self.options1 |= Flags1::USE_PSEUDO_OPS; } else { self.options1 &= !Flags1::USE_PSEUDO_OPS; } } #[must_use] #[inline] pub fn show_symbol_address(&self) -> bool { (self.options1 & Flags1::SHOW_SYMBOL_ADDRESS) != 0 } #[inline] pub fn set_show_symbol_address(&mut self, value: bool) { if value { self.options1 |= Flags1::SHOW_SYMBOL_ADDRESS; } else { self.options1 &= !Flags1::SHOW_SYMBOL_ADDRESS; } } #[must_use] #[inline] pub fn always_show_segment_register(&self) -> bool { (self.options1 & Flags1::ALWAYS_SHOW_SEGMENT_REGISTER) != 0 } #[inline] pub fn set_always_show_segment_register(&mut self, value: bool) { if value { self.options1 |= Flags1::ALWAYS_SHOW_SEGMENT_REGISTER; } else { self.options1 &= !Flags1::ALWAYS_SHOW_SEGMENT_REGISTER; } } #[must_use] #[inline] pub fn always_show_memory_size(&self) -> bool { (self.options1 & Flags1::ALWAYS_SHOW_MEMORY_SIZE) != 0 } #[inline] pub fn set_always_show_memory_size(&mut self, value: bool) { if value { self.options1 |= Flags1::ALWAYS_SHOW_MEMORY_SIZE; } else { self.options1 &= !Flags1::ALWAYS_SHOW_MEMORY_SIZE; } } #[must_use] #[inline] pub fn uppercase_hex(&self) -> bool { (self.options1 & Flags1::UPPERCASE_HEX) != 0 } #[inline] pub fn set_uppercase_hex(&mut self, value: bool) { if value { self.options1 |= Flags1::UPPERCASE_HEX; } else { self.options1 &= !Flags1::UPPERCASE_HEX; } } #[must_use] #[inline] pub fn use_hex_prefix(&self) -> bool { (self.options1 & Flags1::USE_HEX_PREFIX) != 0 } #[inline] pub fn set_use_hex_prefix(&mut self, value: bool) { if value { self.options1 |= Flags1::USE_HEX_PREFIX; } else { self.options1 &= !Flags1::USE_HEX_PREFIX; } } }
if value { self.options1 |= Flags1::RIP_RELATIVE_ADDRESSES; } else { self.options1 &= !Flags1::RIP_RELATIVE_ADDRESSES; }
if_condition
[ { "content": "fn read_infos(bitness: u32, is_misc: bool) -> (Vec<InstructionInfo>, HashSet<u32>) {\n\n\tlet mut filename = get_formatter_unit_tests_dir();\n\n\tif is_misc {\n\n\t\tfilename.push(format!(\"InstructionInfos{}_Misc.txt\", bitness));\n\n\t} else {\n\n\t\tfilename.push(format!(\"InstructionInfos{}.txt\", bitness));\n\n\t}\n\n\n\n\tlet display_filename = filename.display().to_string();\n\n\tlet file = File::open(filename).unwrap_or_else(|_| panic!(\"Couldn't open file {}\", display_filename));\n\n\tlet mut infos: Vec<InstructionInfo> = Vec::new();\n\n\tlet mut line_number = 0;\n\n\tlet mut ignored: HashSet<u32> = HashSet::new();\n\n\tlet mut test_case_number = 0;\n\n\tfor info in BufReader::new(file).lines() {\n\n\t\tlet result = match info {\n\n\t\t\tOk(line) => {\n\n\t\t\t\tline_number += 1;\n\n\t\t\t\tif line.is_empty() || line.starts_with('#') {\n\n\t\t\t\t\tcontinue;\n", "file_path": "src/rust/iced-x86/src/formatter/tests/instr_infos.rs", "rank": 0, "score": 316127.1014354029 }, { "content": "#[wasm_bindgen(js_name = \"getIcedFeatures\")]\n\npub fn get_iced_features() -> u32 {\n\n\t#[allow(unused_mut)]\n\n\tlet mut flags = 0;\n\n\t#[cfg(not(feature = \"no_vex\"))]\n\n\t{\n\n\t\tflags |= 1;\n\n\t}\n\n\t#[cfg(not(feature = \"no_evex\"))]\n\n\t{\n\n\t\tflags |= 2;\n\n\t}\n\n\t#[cfg(not(feature = \"no_xop\"))]\n\n\t{\n\n\t\tflags |= 4;\n\n\t}\n\n\t#[cfg(not(feature = \"no_d3now\"))]\n\n\t{\n\n\t\tflags |= 8;\n\n\t}\n\n\tflags\n\n}\n", "file_path": "src/rust/iced-x86-js/src/lib.rs", "rank": 1, "score": 314156.576657676 }, { "content": "fn decoder_new_panics(bitness: u32) {\n\n\tlet _ = Decoder::new(bitness, b\"\\x90\", DecoderOptions::NONE);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/misc_tests.rs", "rank": 2, "score": 257652.1882443564 }, { "content": "fn decoder_try_new_fails(bitness: u32) {\n\n\tassert!(Decoder::try_new(bitness, b\"\\x90\", DecoderOptions::NONE).is_err());\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/misc_tests.rs", "rank": 3, "score": 254088.48946271217 }, { "content": "struct Flags1;\n\nimpl Flags1 {\n\n\tconst UPPERCASE_PREFIXES: u32 = 0x0000_0001;\n\n\tconst UPPERCASE_MNEMONICS: u32 = 0x0000_0002;\n\n\tconst UPPERCASE_REGISTERS: u32 = 0x0000_0004;\n\n\tconst UPPERCASE_KEYWORDS: u32 = 0x0000_0008;\n\n\tconst UPPERCASE_DECORATORS: u32 = 0x0000_0010;\n\n\tconst UPPERCASE_ALL: u32 = 0x0000_0020;\n\n\tconst SPACE_AFTER_OPERAND_SEPARATOR: u32 = 0x0000_0040;\n\n\tconst SPACE_AFTER_MEMORY_BRACKET: u32 = 0x0000_0080;\n\n\tconst SPACE_BETWEEN_MEMORY_ADD_OPERATORS: u32 = 0x0000_0100;\n\n\tconst SPACE_BETWEEN_MEMORY_MUL_OPERATORS: u32 = 0x0000_0200;\n\n\tconst SCALE_BEFORE_INDEX: u32 = 0x0000_0400;\n\n\tconst ALWAYS_SHOW_SCALE: u32 = 0x0000_0800;\n\n\tconst ALWAYS_SHOW_SEGMENT_REGISTER: u32 = 0x0000_1000;\n\n\tconst SHOW_ZERO_DISPLACEMENTS: u32 = 0x0000_2000;\n\n\tconst LEADING_ZEROES: u32 = 0x0000_4000;\n\n\tconst UPPERCASE_HEX: u32 = 0x0000_8000;\n\n\tconst SMALL_HEX_NUMBERS_IN_DECIMAL: u32 = 0x0001_0000;\n\n\tconst ADD_LEADING_ZERO_TO_HEX_NUMBERS: u32 = 0x0002_0000;\n", "file_path": "src/rust/iced-x86/src/formatter/fmt_opts.rs", "rank": 4, "score": 238114.79467893657 }, { "content": "fn get_op_code(enc_flags2: u32) -> u32 {\n\n\t(enc_flags2 >> EncFlags2::OP_CODE_SHIFT) as u16 as u32\n\n}\n\n\n\n#[repr(C)]\n\npub(super) struct LegacyHandler {\n\n\tbase: OpCodeHandler,\n\n\ttable_byte1: u32,\n\n\ttable_byte2: u32,\n\n\tmandatory_prefix: u32,\n\n}\n\n\n\nimpl LegacyHandler {\n\n\tpub(super) fn new(enc_flags1: u32, enc_flags2: u32, enc_flags3: u32) -> Self {\n\n\t\tlet group_index = if (enc_flags2 & EncFlags2::HAS_GROUP_INDEX) == 0 { -1 } else { ((enc_flags2 >> EncFlags2::GROUP_INDEX_SHIFT) & 7) as i32 };\n\n\t\tlet rm_group_index =\n\n\t\t\tif (enc_flags2 & EncFlags2::HAS_RM_GROUP_INDEX) == 0 { -1 } else { ((enc_flags2 >> EncFlags2::GROUP_INDEX_SHIFT) & 7) as i32 };\n\n\t\t// SAFETY: generated data is valid\n\n\t\tlet table: LegacyOpCodeTable = unsafe { mem::transmute(((enc_flags2 >> EncFlags2::TABLE_SHIFT) & EncFlags2::TABLE_MASK) as u8) };\n\n\t\tlet (table_byte1, table_byte2) = match table {\n", "file_path": "src/rust/iced-x86/src/encoder/op_code_handler.rs", "rank": 6, "score": 226777.78885149522 }, { "content": "#[test]\n\n#[cfg(feature = \"encoder\")]\n\n#[cfg(not(feature = \"no_vex\"))]\n\nfn eq_and_hash_ignore_some_fields() {\n\n\tuse core::hash::{Hash, Hasher};\n\n\tuse std::collections::hash_map::DefaultHasher;\n\n\tlet mut instr1 = Instruction::try_with_reg_reg_mem_reg_u32(\n\n\t\tCode::VEX_Vpermil2ps_xmm_xmm_xmmm128_xmm_imm4,\n\n\t\tRegister::XMM1,\n\n\t\tRegister::XMM2,\n\n\t\tMemoryOperand::new(Register::RCX, Register::R14, 8, 0x1234_5678, 8, false, Register::FS),\n\n\t\tRegister::XMM10,\n\n\t\t0xA5,\n\n\t)\n\n\t.unwrap();\n\n\tlet mut instr2 = instr1;\n\n\tassert!(instr1.eq_all_bits(&instr2));\n\n\tinstr1.set_code_size(CodeSize::Code32);\n\n\tinstr2.set_code_size(CodeSize::Code64);\n\n\tassert!(!instr1.eq_all_bits(&instr2));\n\n\tinstr1.set_len(10);\n\n\tinstr2.set_len(5);\n\n\tinstr1.set_ip(0x9733_3795_FA7C_EAAB);\n\n\tinstr2.set_ip(0x9BE5_A3A0_7A66_FC05);\n\n\tassert_eq!(instr1, instr2);\n\n\tlet mut hasher1 = DefaultHasher::new();\n\n\tlet mut hasher2 = DefaultHasher::new();\n\n\tinstr1.hash(&mut hasher1);\n\n\tinstr2.hash(&mut hasher2);\n\n\tassert_eq!(hasher1.finish(), hasher2.finish());\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/test/instr_misc.rs", "rank": 7, "score": 226323.06575897645 }, { "content": "fn encode(bitness: u32) {\n\n\tfor info in &encoder_tests(true, false) {\n\n\t\tif info.bitness() == bitness {\n\n\t\t\tencode_test(info);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 8, "score": 212066.50677246758 }, { "content": "fn decode(bitness: u32) {\n\n\tfor info in test_cases::get_test_cases(bitness) {\n\n\t\tdecode_test(bitness, info);\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 9, "score": 212066.50677246758 }, { "content": "fn create_panics_if_invalid_bitness_core(tests: Vec<fn(u32) -> Instruction>) {\n\n\tfor f in tests {\n\n\t\tlet result = panic::catch_unwind(|| f(128));\n\n\t\tassert!(result.is_err());\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/create.rs", "rank": 10, "score": 210633.55034232733 }, { "content": "fn with_test_core(tests: Vec<(u32, &str, u32, Instruction)>) {\n\n\tfor (bitness, hex_bytes, options, created_instr) in tests {\n\n\t\tlet bytes = to_vec_u8(hex_bytes).unwrap();\n\n\t\tlet mut decoder = create_decoder(bitness, bytes.as_slice(), get_default_ip(bitness), options).0;\n\n\t\tlet orig_rip = decoder.ip();\n\n\t\tlet mut decoded_instr = decoder.decode();\n\n\t\tdecoded_instr.set_code_size(CodeSize::default());\n\n\t\tdecoded_instr.set_len(0);\n\n\t\tdecoded_instr.set_next_ip(0);\n\n\n\n\t\tassert!(decoded_instr.eq_all_bits(&created_instr));\n\n\n\n\t\tlet mut encoder = Encoder::new(decoder.bitness());\n\n\t\tlet _ = encoder.encode(&created_instr, orig_rip).unwrap();\n\n\t\tassert_eq!(encoder.take_buffer(), bytes);\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/create.rs", "rank": 11, "score": 209876.2275478873 }, { "content": "fn decode_misc(bitness: u32) {\n\n\tfor info in test_cases::get_misc_test_cases(bitness) {\n\n\t\tdecode_test(bitness, info);\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 12, "score": 209334.7556271663 }, { "content": "fn test_info(bitness: u32) {\n\n\tlet mut factory = InstructionInfoFactory::new();\n\n\tfor tc in get_instr_info_test_cases(bitness) {\n\n\t\ttest_info_core(tc, &mut factory);\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/info/tests/mod.rs", "rank": 13, "score": 209334.7556271663 }, { "content": "fn decode_mem(bitness: u32) {\n\n\tfor info in test_cases::get_mem_test_cases(bitness) {\n\n\t\tdecode_mem_test(bitness, info);\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 14, "score": 209334.7556271663 }, { "content": "fn get_vsib(op_code: &OpCodeInfo) -> Option<(bool, bool)> {\n\n\tfor &op_kind in op_code.op_kinds() {\n\n\t\tmatch op_kind {\n\n\t\t\tOpCodeOperandKind::mem_vsib32x | OpCodeOperandKind::mem_vsib32y | OpCodeOperandKind::mem_vsib32z => return Some((true, false)),\n\n\t\t\tOpCodeOperandKind::mem_vsib64x | OpCodeOperandKind::mem_vsib64y | OpCodeOperandKind::mem_vsib64z => return Some((false, true)),\n\n\t\t\t_ => {}\n\n\t\t}\n\n\t}\n\n\n\n\tNone\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 15, "score": 208783.60329955947 }, { "content": "fn skip_prefixes(bytes: &[u8], bitness: u32) -> (usize, u32) {\n\n\tlet mut rex = 0;\n\n\tfor (i, &b) in bytes.iter().enumerate() {\n\n\t\tmatch b {\n\n\t\t\t0x26 | 0x2E | 0x36 | 0x3E | 0x64 | 0x65 | 0x66 | 0x67 | 0xF0 | 0xF2 | 0xF3 => {\n\n\t\t\t\trex = 0;\n\n\t\t\t}\n\n\t\t\t_ => {\n\n\t\t\t\tif bitness == 64 && (b & 0xF0) == 0x40 {\n\n\t\t\t\t\trex = b as u32;\n\n\t\t\t\t} else {\n\n\t\t\t\t\treturn (i, rex);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\t(bytes.len(), rex)\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 16, "score": 207772.24313511577 }, { "content": "fn get_vvvvv_info(op_code: &OpCodeInfo) -> (bool, bool, u8) {\n\n\tlet mut uses_vvvv = false;\n\n\tlet mut is_vsib = false;\n\n\tlet mut vvvv_mask = match op_code.encoding() {\n\n\t\tEncodingKind::EVEX => 0x1F,\n\n\t\tEncodingKind::VEX | EncodingKind::XOP => 0xF,\n\n\t\tEncodingKind::Legacy | EncodingKind::D3NOW => panic!(),\n\n\t};\n\n\tfor &op_kind in op_code.op_kinds() {\n\n\t\tmatch op_kind {\n\n\t\t\tOpCodeOperandKind::mem_vsib32x\n\n\t\t\t| OpCodeOperandKind::mem_vsib64x\n\n\t\t\t| OpCodeOperandKind::mem_vsib32y\n\n\t\t\t| OpCodeOperandKind::mem_vsib64y\n\n\t\t\t| OpCodeOperandKind::mem_vsib32z\n\n\t\t\t| OpCodeOperandKind::mem_vsib64z => is_vsib = true,\n\n\t\t\tOpCodeOperandKind::k_vvvv | OpCodeOperandKind::tmm_vvvv => {\n\n\t\t\t\tuses_vvvv = true;\n\n\t\t\t\tvvvv_mask = 0x7;\n\n\t\t\t}\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 17, "score": 206891.6942693823 }, { "content": "fn non_decode_encode(bitness: u32) {\n\n\tconst RIP: u64 = 0;\n\n\tfor tc in non_decoded_tests::get_tests() {\n\n\t\tif tc.0 != bitness {\n\n\t\t\tcontinue;\n\n\t\t}\n\n\t\tlet expected_bytes = to_vec_u8(tc.1).unwrap();\n\n\t\tlet mut encoder = Encoder::new(bitness);\n\n\t\tassert_eq!(encoder.bitness(), bitness);\n\n\t\tlet encoded_instr_len = encoder.encode(&tc.2, RIP).unwrap();\n\n\t\tlet encoded_bytes = encoder.take_buffer();\n\n\t\tassert_eq!(encoded_bytes, expected_bytes);\n\n\t\tassert_eq!(encoded_instr_len, encoded_bytes.len());\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 18, "score": 206730.23059269247 }, { "content": "fn filter_removed_code_tests(strings: Vec<String>, ignored: &HashSet<u32>) -> Vec<String> {\n\n\tif ignored.is_empty() {\n\n\t\tstrings\n\n\t} else {\n\n\t\tstrings.into_iter().enumerate().filter(|a| !ignored.contains(&(a.0 as u32))).map(|a| a.1).collect()\n\n\t}\n\n}\n", "file_path": "src/rust/iced-x86/src/formatter/tests/mod.rs", "rank": 19, "score": 202858.6485156059 }, { "content": "fn get_tests(include_other_tests: bool, include_invalid: bool, can_encode: Option<bool>) -> Vec<DecoderTestInfo> {\n\n\tlet mut v: Vec<DecoderTestInfo> = Vec::new();\n\n\tlet bitness_array = [16, 32, 64];\n\n\tfor bitness in &bitness_array {\n\n\t\tadd_tests(&mut v, get_test_cases(*bitness), include_invalid, can_encode);\n\n\t}\n\n\tif include_other_tests {\n\n\t\tfor bitness in &bitness_array {\n\n\t\t\tadd_tests(&mut v, get_misc_test_cases(*bitness), include_invalid, can_encode);\n\n\t\t}\n\n\t\tfor bitness in &bitness_array {\n\n\t\t\tadd_tests_mem(&mut v, get_mem_test_cases(*bitness), include_invalid, can_encode);\n\n\t\t}\n\n\t}\n\n\tv\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/test_utils.rs", "rank": 20, "score": 201296.67348136677 }, { "content": "#[cfg(not(feature = \"instr_info\"))]\n\nfn reg_number(reg: Register) -> u32 {\n\n\tif Register::AL <= reg && reg <= Register::R15L {\n\n\t\treg as u32 - Register::AL as u32\n\n\t} else if Register::AX <= reg && reg <= Register::R15W {\n\n\t\treg as u32 - Register::AX as u32\n\n\t} else if Register::EAX <= reg && reg <= Register::R15D {\n\n\t\treg as u32 - Register::EAX as u32\n\n\t} else if Register::RAX <= reg && reg <= Register::R15 {\n\n\t\treg as u32 - Register::RAX as u32\n\n\t} else if Register::XMM0 <= reg && reg <= Register::XMM31 {\n\n\t\treg as u32 - Register::XMM0 as u32\n\n\t} else if Register::YMM0 <= reg && reg <= Register::YMM31 {\n\n\t\treg as u32 - Register::YMM0 as u32\n\n\t} else if Register::ZMM0 <= reg && reg <= Register::ZMM31 {\n\n\t\treg as u32 - Register::ZMM0 as u32\n\n\t} else if Register::K0 <= reg && reg <= Register::K7 {\n\n\t\treg as u32 - Register::K0 as u32\n\n\t} else if Register::BND0 <= reg && reg <= Register::BND3 {\n\n\t\treg as u32 - Register::BND0 as u32\n\n\t} else if Register::CR0 <= reg && reg <= Register::CR15 {\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 21, "score": 196652.2743222175 }, { "content": "#[cfg(not(feature = \"instr_info\"))]\n\nfn reg_size(reg: Register) -> u32 {\n\n\tif Register::AX <= reg && reg <= Register::R15W {\n\n\t\t2\n\n\t} else if Register::EAX <= reg && reg <= Register::R15D || reg == Register::EIP {\n\n\t\t4\n\n\t} else if Register::RAX <= reg && reg <= Register::R15 || reg == Register::RIP {\n\n\t\t8\n\n\t} else {\n\n\t\tpanic!()\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 22, "score": 196652.2743222175 }, { "content": "fn create_fails_if_invalid_bitness_core(tests: Vec<fn(u32) -> Result<Instruction, IcedError>>) {\n\n\tfor f in tests {\n\n\t\tassert!(f(128).is_err());\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/create.rs", "rank": 23, "score": 195500.3549965581 }, { "content": "fn get_bitness(code_size: CodeSize) -> u32 {\n\n\tmatch code_size {\n\n\t\tCodeSize::Code16 => 16,\n\n\t\tCodeSize::Code32 => 32,\n\n\t\tCodeSize::Code64 => 64,\n\n\t\t_ => 0,\n\n\t}\n\n}\n\n\n\npub(super) struct SimpleInstrInfo {\n\n\tmnemonic: FormatterString,\n\n\tflags: u32,\n\n}\n\n\n\nimpl SimpleInstrInfo {\n\n\tpub(super) fn with_mnemonic(mnemonic: String) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic), flags: InstrOpInfoFlags::NONE }\n\n\t}\n\n\tpub(super) fn new(mnemonic: String, flags: u32) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic), flags }\n", "file_path": "src/rust/iced-x86/src/formatter/nasm/info.rs", "rank": 24, "score": 194276.9174152391 }, { "content": "fn get_bitness(code_size: CodeSize) -> u32 {\n\n\tmatch code_size {\n\n\t\tCodeSize::Code16 => 16,\n\n\t\tCodeSize::Code32 => 32,\n\n\t\tCodeSize::Code64 => 64,\n\n\t\t_ => 0,\n\n\t}\n\n}\n\n\n\npub(super) struct SimpleInstrInfo {\n\n\tmnemonic: FormatterString,\n\n\tflags: u32,\n\n}\n\n\n\nimpl SimpleInstrInfo {\n\n\tpub(super) fn with_mnemonic(mnemonic: String) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic), flags: InstrOpInfoFlags::NONE }\n\n\t}\n\n\tpub(super) fn new(mnemonic: String, flags: u32) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic), flags }\n", "file_path": "src/rust/iced-x86/src/formatter/intel/info.rs", "rank": 25, "score": 194276.9174152391 }, { "content": "fn get_bitness(code_size: CodeSize) -> u32 {\n\n\tmatch code_size {\n\n\t\tCodeSize::Code16 => 16,\n\n\t\tCodeSize::Code32 => 32,\n\n\t\tCodeSize::Code64 => 64,\n\n\t\t_ => 0,\n\n\t}\n\n}\n\n\n\npub(super) struct SimpleInstrInfo {\n\n\tmnemonic: FormatterString,\n\n\tflags: u32,\n\n}\n\n\n\nimpl SimpleInstrInfo {\n\n\tpub(super) fn with_mnemonic(mnemonic: String) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic), flags: InstrOpInfoFlags::NONE }\n\n\t}\n\n\tpub(super) fn new(mnemonic: String, flags: u32) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic), flags }\n", "file_path": "src/rust/iced-x86/src/formatter/masm/info.rs", "rank": 26, "score": 194276.9174152391 }, { "content": "fn get_bitness(code_size: CodeSize) -> u32 {\n\n\tmatch code_size {\n\n\t\tCodeSize::Code16 => 16,\n\n\t\tCodeSize::Code32 => 32,\n\n\t\tCodeSize::Code64 => 64,\n\n\t\t_ => 0,\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/formatter/gas/info.rs", "rank": 27, "score": 194276.9174152391 }, { "content": "fn check_equal(info1: &InstructionInfo, info2: &InstructionInfo, has_regs2: bool, has_mem2: bool) {\n\n\tif has_regs2 {\n\n\t\tassert_eq!(info2.used_registers(), info1.used_registers());\n\n\t} else {\n\n\t\tassert!(info2.used_registers().is_empty());\n\n\t}\n\n\tif has_mem2 {\n\n\t\tassert_eq!(info2.used_memory(), info1.used_memory());\n\n\t} else {\n\n\t\tassert!(info2.used_memory().is_empty());\n\n\t}\n\n\t#[allow(deprecated)]\n\n\t{\n\n\t\tassert_eq!(info2.is_privileged(), info1.is_privileged());\n\n\t\tassert_eq!(info2.is_stack_instruction(), info1.is_stack_instruction());\n\n\t\tassert_eq!(info2.is_save_restore_instruction(), info1.is_save_restore_instruction());\n\n\t\tassert_eq!(info2.encoding(), info1.encoding());\n\n\t\tassert_eq!(info2.cpuid_features(), info1.cpuid_features());\n\n\t\tassert_eq!(info2.flow_control(), info1.flow_control());\n\n\t}\n", "file_path": "src/rust/iced-x86/src/info/tests/mod.rs", "rank": 28, "score": 193399.5874537184 }, { "content": "fn is_vsib(op_code: &OpCodeInfo) -> bool {\n\n\tget_vsib(op_code).is_some()\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 29, "score": 192017.54602633795 }, { "content": "fn get_memory_register_size(instruction: &Instruction) -> u32 {\n\n\tfor op_kind in instruction.op_kinds() {\n\n\t\tmatch op_kind {\n\n\t\t\tOpKind::Register\n\n\t\t\t| OpKind::NearBranch16\n\n\t\t\t| OpKind::NearBranch32\n\n\t\t\t| OpKind::NearBranch64\n\n\t\t\t| OpKind::FarBranch16\n\n\t\t\t| OpKind::FarBranch32\n\n\t\t\t| OpKind::Immediate8\n\n\t\t\t| OpKind::Immediate8_2nd\n\n\t\t\t| OpKind::Immediate16\n\n\t\t\t| OpKind::Immediate32\n\n\t\t\t| OpKind::Immediate64\n\n\t\t\t| OpKind::Immediate8to16\n\n\t\t\t| OpKind::Immediate8to32\n\n\t\t\t| OpKind::Immediate8to64\n\n\t\t\t| OpKind::Immediate32to64 => {}\n\n\t\t\tOpKind::MemorySegSI | OpKind::MemorySegDI | OpKind::MemoryESDI => return 16,\n\n\t\t\tOpKind::MemorySegESI | OpKind::MemorySegEDI | OpKind::MemoryESEDI => return 32,\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 30, "score": 192004.9644714875 }, { "content": "fn read_code_values(name: &str) -> HashSet<Code> {\n\n\tlet mut filename = get_decoder_unit_tests_dir();\n\n\tfilename.push(name);\n\n\tlet display_filename = filename.display();\n\n\tlet file = File::open(filename.as_path()).unwrap_or_else(|_| panic!(\"Couldn't open file {}\", display_filename));\n\n\tlet mut h = HashSet::new();\n\n\tfor (info, line_number) in BufReader::new(file).lines().zip(1..) {\n\n\t\tlet err = match info {\n\n\t\t\tOk(line) => {\n\n\t\t\t\tif line.is_empty() || line.starts_with('#') || is_ignored_code(&line) {\n\n\t\t\t\t\tNone\n\n\t\t\t\t} else {\n\n\t\t\t\t\tmatch to_code(&line) {\n\n\t\t\t\t\t\tOk(code) => {\n\n\t\t\t\t\t\t\tlet _ = h.insert(code);\n\n\t\t\t\t\t\t\tNone\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tErr(err) => Some(err),\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n", "file_path": "src/rust/iced-x86/src/decoder/tests/test_utils.rs", "rank": 31, "score": 191531.84521447404 }, { "content": "fn get_address_size_flags(op_kind: OpKind) -> u32 {\n\n\tmatch op_kind {\n\n\t\tOpKind::MemorySegSI | OpKind::MemorySegDI | OpKind::MemoryESDI => InstrOpInfoFlags::ADDR_SIZE16,\n\n\t\tOpKind::MemorySegESI | OpKind::MemorySegEDI | OpKind::MemoryESEDI => InstrOpInfoFlags::ADDR_SIZE32,\n\n\t\tOpKind::MemorySegRSI | OpKind::MemorySegRDI | OpKind::MemoryESRDI => InstrOpInfoFlags::ADDR_SIZE64,\n\n\t\t_ => 0,\n\n\t}\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\npub(super) struct SimpleInstrInfo_String {\n\n\tmnemonic: FormatterString,\n\n}\n\n\n\nimpl SimpleInstrInfo_String {\n\n\tpub(super) fn new(mnemonic: String) -> Self {\n\n\t\tSelf { mnemonic: FormatterString::new(mnemonic) }\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/formatter/nasm/info.rs", "rank": 32, "score": 189829.80725846707 }, { "content": "fn has_is4_or_is5_operands(op_code: &OpCodeInfo) -> bool {\n\n\tfor &op_kind in op_code.op_kinds() {\n\n\t\tmatch op_kind {\n\n\t\t\tOpCodeOperandKind::xmm_is4 | OpCodeOperandKind::xmm_is5 | OpCodeOperandKind::ymm_is4 | OpCodeOperandKind::ymm_is5 => return true,\n\n\t\t\t_ => {}\n\n\t\t}\n\n\t}\n\n\tfalse\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 33, "score": 187757.86068827994 }, { "content": "fn decode(bitness: u32, rip: u64, data: &[u8], options: u32) -> Vec<Instruction> {\n\n\tlet mut decoder = create_decoder(bitness, data, get_default_ip(bitness), options).0;\n\n\tdecoder.set_ip(rip);\n\n\tdecoder.into_iter().collect()\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/block_enc/tests/mod.rs", "rank": 34, "score": 186385.42459040755 }, { "content": "fn decode_test(bitness: u32, tc: &DecoderTestCase) {\n\n\tlet bytes = to_vec_u8(&tc.hex_bytes).unwrap();\n\n\tlet (mut decoder, len, can_read) = create_decoder(bitness, &bytes, tc.ip, tc.decoder_options);\n\n\tassert_eq!(decoder.position(), 0);\n\n\tassert_eq!(decoder.max_position(), bytes.len());\n\n\tlet rip = decoder.ip();\n\n\tlet instr = decoder.decode();\n\n\tassert_eq!(decoder.last_error(), tc.decoder_error);\n\n\tassert_eq!(decoder.position(), len);\n\n\tassert_eq!(decoder.can_decode(), can_read);\n\n\tassert_eq!(instr.code(), tc.code);\n\n\tassert_eq!(instr.is_invalid(), tc.code == Code::INVALID);\n\n\tassert_eq!(instr.mnemonic(), tc.mnemonic);\n\n\tassert_eq!(instr.code().mnemonic(), instr.mnemonic());\n\n\tassert_eq!(instr.len(), len);\n\n\tassert_eq!(instr.ip(), rip);\n\n\tassert_eq!(instr.next_ip(), decoder.ip());\n\n\tassert_eq!(instr.next_ip(), rip.wrapping_add(len as u64));\n\n\tmatch bitness {\n\n\t\t16 => assert_eq!(instr.code_size(), CodeSize::Code16),\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 35, "score": 185510.02466217364 }, { "content": "// All Vk_VSIB instructions, eg. EVEX_Vpgatherdd_xmm_k1_vm32x\n\nfn can_have_invalid_index_register_evex(op_code: &OpCodeInfo) -> bool {\n\n\tif op_code.encoding() != EncodingKind::EVEX {\n\n\t\treturn false;\n\n\t}\n\n\n\n\tmatch op_code.op0_kind() {\n\n\t\tOpCodeOperandKind::xmm_reg | OpCodeOperandKind::ymm_reg | OpCodeOperandKind::zmm_reg => {}\n\n\t\t_ => return false,\n\n\t}\n\n\top_code.requires_unique_reg_nums()\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 36, "score": 183839.33741729957 }, { "content": "fn decode_mem_test(bitness: u32, tc: &DecoderMemoryTestCase) {\n\n\tlet bytes = to_vec_u8(&tc.hex_bytes).unwrap();\n\n\tlet (mut decoder, len, can_read) = create_decoder(bitness, &bytes, tc.ip, tc.decoder_options);\n\n\tassert_eq!(decoder.position(), 0);\n\n\tassert_eq!(decoder.max_position(), bytes.len());\n\n\tlet instr = decoder.decode();\n\n\tassert_eq!(decoder.last_error(), DecoderError::None);\n\n\tassert_eq!(decoder.position(), len);\n\n\tassert_eq!(decoder.can_decode(), can_read);\n\n\n\n\tassert_eq!(instr.code(), tc.code);\n\n\tassert_eq!(instr.is_invalid(), tc.code == Code::INVALID);\n\n\tassert_eq!(instr.op_count(), 2);\n\n\tassert_eq!(instr.len(), len);\n\n\tassert!(!instr.has_rep_prefix());\n\n\tassert!(!instr.has_repe_prefix());\n\n\tassert!(!instr.has_repne_prefix());\n\n\tassert!(!instr.has_lock_prefix());\n\n\tassert_eq!(instr.segment_prefix(), tc.prefix_segment);\n\n\tif instr.segment_prefix() == Register::None {\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 37, "score": 181250.44907228727 }, { "content": "fn get_invalid_test_cases() -> Vec<(u32, Rc<DecoderTestInfo>)> {\n\n\tlet mut result: Vec<(u32, Rc<DecoderTestInfo>)> = Vec::new();\n\n\tfor tc in encoder_tests(false, false) {\n\n\t\tlet tc = Rc::new(tc);\n\n\t\tif code32_only().contains(&tc.code()) {\n\n\t\t\tresult.push((64, tc.clone()));\n\n\t\t}\n\n\t\tif code64_only().contains(&tc.code()) {\n\n\t\t\tresult.push((16, tc.clone()));\n\n\t\t\tresult.push((32, tc.clone()));\n\n\t\t}\n\n\t}\n\n\tresult\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 38, "score": 181250.44907228727 }, { "content": "// All VX_VSIB_HX instructions, eg. VEX_Vpgatherdd_xmm_vm32x_xmm\n\nfn can_have_invalid_index_mask_dest_register_vex(op_code: &OpCodeInfo) -> bool {\n\n\tif op_code.encoding() != EncodingKind::VEX && op_code.encoding() != EncodingKind::XOP {\n\n\t\treturn false;\n\n\t}\n\n\n\n\tmatch op_code.op0_kind() {\n\n\t\tOpCodeOperandKind::xmm_reg | OpCodeOperandKind::ymm_reg | OpCodeOperandKind::zmm_reg => {}\n\n\t\t_ => return false,\n\n\t}\n\n\n\n\top_code.requires_unique_reg_nums()\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/dec_enc.rs", "rank": 39, "score": 180222.5683495405 }, { "content": "// Read-only data which is needed a couple of times due to borrow checker\n\nstruct SelfData {\n\n\toptions: FastFormatterOptions,\n\n\tall_registers: &'static [FastStringRegister; IcedConstants::REGISTER_ENUM_COUNT],\n\n\tcode_mnemonics: &'static [FastStringMnemonic; IcedConstants::CODE_ENUM_COUNT],\n\n\tcode_flags: &'static [u8; IcedConstants::CODE_ENUM_COUNT],\n\n\tall_memory_sizes: &'static [FastStringMemorySize; IcedConstants::MEMORY_SIZE_ENUM_COUNT],\n\n}\n\n\n\nimpl<TraitOptions: SpecializedFormatterTraitOptions> SpecializedFormatter<TraitOptions> {\n\n\tconst SHOW_USELESS_PREFIXES: bool = true;\n\n\n\n\t/// Creates a new instance of this formatter\n\n\t#[must_use]\n\n\t#[inline]\n\n\t#[allow(clippy::unwrap_used)]\n\n\tpub fn new() -> Self {\n\n\t\t// This never panics\n\n\t\tSpecializedFormatter::<TraitOptions>::try_with_options(None).unwrap()\n\n\t}\n\n\n", "file_path": "src/rust/iced-x86/src/formatter/fast.rs", "rank": 40, "score": 179315.3544610583 }, { "content": "// Read-only data which is needed a couple of times due to borrow checker\n\nstruct SelfData {\n\n\toptions: FormatterOptions,\n\n\tall_registers: &'static [FormatterString; IcedConstants::REGISTER_ENUM_COUNT],\n\n\tinstr_infos: &'static [Box<dyn InstrInfo + Send + Sync>; IcedConstants::CODE_ENUM_COUNT],\n\n\tall_memory_sizes: &'static [Info; IcedConstants::MEMORY_SIZE_ENUM_COUNT],\n\n\tstr_: &'static FormatterConstants,\n\n\tvec_: &'static FormatterArrayConstants,\n\n}\n\n\n\nimpl IntelFormatter {\n\n\t/// Creates an Intel (XED) formatter\n\n\t#[must_use]\n\n\t#[inline]\n\n\tpub fn new() -> Self {\n\n\t\tIntelFormatter::with_options(None, None)\n\n\t}\n\n\n\n\t/// Creates an Intel (XED) formatter\n\n\t///\n\n\t/// # Arguments\n", "file_path": "src/rust/iced-x86/src/formatter/intel.rs", "rank": 41, "score": 179315.3544610583 }, { "content": "// Read-only data which is needed a couple of times due to borrow checker\n\nstruct SelfData {\n\n\toptions: FormatterOptions,\n\n\tall_registers: &'static [FormatterString; IcedConstants::REGISTER_ENUM_COUNT],\n\n\tall_registers_naked: &'static [FormatterString; IcedConstants::REGISTER_ENUM_COUNT],\n\n\tinstr_infos: &'static [Box<dyn InstrInfo + Send + Sync>; IcedConstants::CODE_ENUM_COUNT],\n\n\tall_memory_sizes: &'static [&'static FormatterString; IcedConstants::MEMORY_SIZE_ENUM_COUNT],\n\n\tstr_: &'static FormatterConstants,\n\n\tvec_: &'static FormatterArrayConstants,\n\n}\n\n\n\nimpl GasFormatter {\n\n\tconst IMMEDIATE_VALUE_PREFIX: &'static str = \"$\";\n\n\n\n\t/// Creates a gas (AT&T) formatter\n\n\t#[must_use]\n\n\t#[inline]\n\n\tpub fn new() -> Self {\n\n\t\tGasFormatter::with_options(None, None)\n\n\t}\n\n\n", "file_path": "src/rust/iced-x86/src/formatter/gas.rs", "rank": 42, "score": 179315.3544610583 }, { "content": "// Read-only data which is needed a couple of times due to borrow checker\n\nstruct SelfData {\n\n\toptions: FormatterOptions,\n\n\tall_registers: &'static [FormatterString; IcedConstants::REGISTER_ENUM_COUNT],\n\n\tinstr_infos: &'static [Box<dyn InstrInfo + Send + Sync>; IcedConstants::CODE_ENUM_COUNT],\n\n\tall_memory_sizes: &'static [Info; IcedConstants::MEMORY_SIZE_ENUM_COUNT],\n\n\tstr_: &'static FormatterConstants,\n\n\tvec_: &'static FormatterArrayConstants,\n\n}\n\n\n\nimpl NasmFormatter {\n\n\t/// Creates a nasm formatter\n\n\t#[must_use]\n\n\t#[inline]\n\n\tpub fn new() -> Self {\n\n\t\tNasmFormatter::with_options(None, None)\n\n\t}\n\n\n\n\t/// Creates a nasm formatter\n\n\t///\n\n\t/// # Arguments\n", "file_path": "src/rust/iced-x86/src/formatter/nasm.rs", "rank": 43, "score": 179315.3544610583 }, { "content": "// Read-only data which is needed a couple of times due to borrow checker\n\nstruct SelfData {\n\n\toptions: FormatterOptions,\n\n\tall_registers: &'static [FormatterString; IcedConstants::REGISTER_ENUM_COUNT],\n\n\tinstr_infos: &'static [Box<dyn InstrInfo + Send + Sync>; IcedConstants::CODE_ENUM_COUNT],\n\n\tall_memory_sizes: &'static [Info; IcedConstants::MEMORY_SIZE_ENUM_COUNT],\n\n\tstr_: &'static FormatterConstants,\n\n\tvec_: &'static FormatterArrayConstants,\n\n}\n\n\n\nimpl MasmFormatter {\n\n\t/// Creates a masm formatter\n\n\t#[must_use]\n\n\t#[inline]\n\n\tpub fn new() -> Self {\n\n\t\tMasmFormatter::with_options(None, None)\n\n\t}\n\n\n\n\t/// Creates a masm formatter\n\n\t///\n\n\t/// # Arguments\n", "file_path": "src/rust/iced-x86/src/formatter/masm.rs", "rank": 44, "score": 179315.3544610583 }, { "content": "fn read_decoder_test_cases(bitness: u32) -> Vec<DecoderTestCase> {\n\n\tread_decoder_test_cases_core(bitness, format!(\"DecoderTest{}.txt\", bitness))\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/test_cases.rs", "rank": 45, "score": 179251.2076507107 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_code_values() {\n\n\tlet mut iter = Code::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CODE_ENUM_COUNT, Some(IcedConstants::CODE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CODE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CODE_ENUM_COUNT - 1, Some(IcedConstants::CODE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CODE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<Code> = Code::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CODE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<Code> = Code::values().collect();\n\n\tlet mut values2: Vec<Code> = Code::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/code.rs", "rank": 46, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(feature = \"instr_info\")]\n\n#[rustfmt::skip]\n\nfn test_flowcontrol_values() {\n\n\tlet mut iter = FlowControl::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::FLOW_CONTROL_ENUM_COUNT, Some(IcedConstants::FLOW_CONTROL_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::FLOW_CONTROL_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::FLOW_CONTROL_ENUM_COUNT - 1, Some(IcedConstants::FLOW_CONTROL_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::FLOW_CONTROL_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<FlowControl> = FlowControl::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::FLOW_CONTROL_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<FlowControl> = FlowControl::values().collect();\n\n\tlet mut values2: Vec<FlowControl> = FlowControl::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(feature = \"instr_info\")]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 47, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_register_values() {\n\n\tlet mut iter = Register::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::REGISTER_ENUM_COUNT, Some(IcedConstants::REGISTER_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::REGISTER_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::REGISTER_ENUM_COUNT - 1, Some(IcedConstants::REGISTER_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::REGISTER_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<Register> = Register::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::REGISTER_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<Register> = Register::values().collect();\n\n\tlet mut values2: Vec<Register> = Register::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/register.rs", "rank": 48, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(any(feature = \"decoder\", feature = \"encoder\"))]\n\n#[rustfmt::skip]\n\nfn test_tupletype_values() {\n\n\tlet mut iter = TupleType::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::TUPLE_TYPE_ENUM_COUNT, Some(IcedConstants::TUPLE_TYPE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::TUPLE_TYPE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::TUPLE_TYPE_ENUM_COUNT - 1, Some(IcedConstants::TUPLE_TYPE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::TUPLE_TYPE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<TupleType> = TupleType::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::TUPLE_TYPE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<TupleType> = TupleType::values().collect();\n\n\tlet mut values2: Vec<TupleType> = TupleType::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(any(feature = \"decoder\", feature = \"encoder\"))]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 49, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_mnemonic_values() {\n\n\tlet mut iter = Mnemonic::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MNEMONIC_ENUM_COUNT, Some(IcedConstants::MNEMONIC_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::MNEMONIC_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MNEMONIC_ENUM_COUNT - 1, Some(IcedConstants::MNEMONIC_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::MNEMONIC_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<Mnemonic> = Mnemonic::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::MNEMONIC_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<Mnemonic> = Mnemonic::values().collect();\n\n\tlet mut values2: Vec<Mnemonic> = Mnemonic::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/mnemonic.rs", "rank": 50, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(any(feature = \"decoder\", feature = \"encoder\", feature = \"instr_info\"))]\n\n#[rustfmt::skip]\n\nfn test_encodingkind_values() {\n\n\tlet mut iter = EncodingKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::ENCODING_KIND_ENUM_COUNT, Some(IcedConstants::ENCODING_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::ENCODING_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::ENCODING_KIND_ENUM_COUNT - 1, Some(IcedConstants::ENCODING_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::ENCODING_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<EncodingKind> = EncodingKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::ENCODING_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<EncodingKind> = EncodingKind::values().collect();\n\n\tlet mut values2: Vec<EncodingKind> = EncodingKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(any(feature = \"decoder\", feature = \"encoder\", feature = \"instr_info\"))]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 51, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(feature = \"instr_info\")]\n\n#[rustfmt::skip]\n\nfn test_conditioncode_values() {\n\n\tlet mut iter = ConditionCode::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CONDITION_CODE_ENUM_COUNT, Some(IcedConstants::CONDITION_CODE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CONDITION_CODE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CONDITION_CODE_ENUM_COUNT - 1, Some(IcedConstants::CONDITION_CODE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CONDITION_CODE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<ConditionCode> = ConditionCode::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CONDITION_CODE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<ConditionCode> = ConditionCode::values().collect();\n\n\tlet mut values2: Vec<ConditionCode> = ConditionCode::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(feature = \"instr_info\")]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 52, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(feature = \"instr_info\")]\n\n#[rustfmt::skip]\n\nfn test_cpuidfeature_values() {\n\n\tlet mut iter = CpuidFeature::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CPUID_FEATURE_ENUM_COUNT, Some(IcedConstants::CPUID_FEATURE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CPUID_FEATURE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CPUID_FEATURE_ENUM_COUNT - 1, Some(IcedConstants::CPUID_FEATURE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CPUID_FEATURE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CpuidFeature> = CpuidFeature::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CPUID_FEATURE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CpuidFeature> = CpuidFeature::values().collect();\n\n\tlet mut values2: Vec<CpuidFeature> = CpuidFeature::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(feature = \"instr_info\")]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 53, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(all(feature = \"encoder\", feature = \"op_code_info\"))]\n\n#[rustfmt::skip]\n\nfn test_mandatoryprefix_values() {\n\n\tlet mut iter = MandatoryPrefix::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MANDATORY_PREFIX_ENUM_COUNT, Some(IcedConstants::MANDATORY_PREFIX_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::MANDATORY_PREFIX_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MANDATORY_PREFIX_ENUM_COUNT - 1, Some(IcedConstants::MANDATORY_PREFIX_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::MANDATORY_PREFIX_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<MandatoryPrefix> = MandatoryPrefix::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::MANDATORY_PREFIX_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<MandatoryPrefix> = MandatoryPrefix::values().collect();\n\n\tlet mut values2: Vec<MandatoryPrefix> = MandatoryPrefix::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(all(feature = \"encoder\", feature = \"op_code_info\"))]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 54, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_decodererror_values() {\n\n\tlet mut iter = DecoderError::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::DECODER_ERROR_ENUM_COUNT, Some(IcedConstants::DECODER_ERROR_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::DECODER_ERROR_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::DECODER_ERROR_ENUM_COUNT - 1, Some(IcedConstants::DECODER_ERROR_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::DECODER_ERROR_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<DecoderError> = DecoderError::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::DECODER_ERROR_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<DecoderError> = DecoderError::values().collect();\n\n\tlet mut values2: Vec<DecoderError> = DecoderError::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/decoder.rs", "rank": 55, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_codesize_values() {\n\n\tlet mut iter = CodeSize::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CODE_SIZE_ENUM_COUNT, Some(IcedConstants::CODE_SIZE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CODE_SIZE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CODE_SIZE_ENUM_COUNT - 1, Some(IcedConstants::CODE_SIZE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CODE_SIZE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CodeSize> = CodeSize::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CODE_SIZE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CodeSize> = CodeSize::values().collect();\n\n\tlet mut values2: Vec<CodeSize> = CodeSize::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 56, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_roundingcontrol_values() {\n\n\tlet mut iter = RoundingControl::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::ROUNDING_CONTROL_ENUM_COUNT, Some(IcedConstants::ROUNDING_CONTROL_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::ROUNDING_CONTROL_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::ROUNDING_CONTROL_ENUM_COUNT - 1, Some(IcedConstants::ROUNDING_CONTROL_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::ROUNDING_CONTROL_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<RoundingControl> = RoundingControl::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::ROUNDING_CONTROL_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<RoundingControl> = RoundingControl::values().collect();\n\n\tlet mut values2: Vec<RoundingControl> = RoundingControl::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 57, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(all(feature = \"encoder\", feature = \"op_code_info\"))]\n\n#[rustfmt::skip]\n\nfn test_opcodeoperandkind_values() {\n\n\tlet mut iter = OpCodeOperandKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT, Some(IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT - 1, Some(IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<OpCodeOperandKind> = OpCodeOperandKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<OpCodeOperandKind> = OpCodeOperandKind::values().collect();\n\n\tlet mut values2: Vec<OpCodeOperandKind> = OpCodeOperandKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(all(feature = \"encoder\", feature = \"op_code_info\"))]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 58, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_opkind_values() {\n\n\tlet mut iter = OpKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_KIND_ENUM_COUNT, Some(IcedConstants::OP_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_KIND_ENUM_COUNT - 1, Some(IcedConstants::OP_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<OpKind> = OpKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::OP_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<OpKind> = OpKind::values().collect();\n\n\tlet mut values2: Vec<OpKind> = OpKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 59, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(all(feature = \"encoder\", feature = \"op_code_info\"))]\n\n#[rustfmt::skip]\n\nfn test_opcodetablekind_values() {\n\n\tlet mut iter = OpCodeTableKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT, Some(IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT - 1, Some(IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<OpCodeTableKind> = OpCodeTableKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<OpCodeTableKind> = OpCodeTableKind::values().collect();\n\n\tlet mut values2: Vec<OpCodeTableKind> = OpCodeTableKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(all(feature = \"encoder\", feature = \"op_code_info\"))]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 60, "score": 178977.12937384253 }, { "content": "#[test]\n\n#[cfg(feature = \"instr_info\")]\n\n#[rustfmt::skip]\n\nfn test_opaccess_values() {\n\n\tlet mut iter = OpAccess::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_ACCESS_ENUM_COUNT, Some(IcedConstants::OP_ACCESS_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_ACCESS_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::OP_ACCESS_ENUM_COUNT - 1, Some(IcedConstants::OP_ACCESS_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::OP_ACCESS_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<OpAccess> = OpAccess::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::OP_ACCESS_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<OpAccess> = OpAccess::values().collect();\n\n\tlet mut values2: Vec<OpAccess> = OpAccess::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[cfg(feature = \"instr_info\")]\n", "file_path": "src/rust/iced-x86/src/enums.rs", "rank": 61, "score": 178977.12937384253 }, { "content": "fn read_instr_info_test_cases(bitness: u32) -> Vec<InstrInfoTestCase> {\n\n\tlet mut path = get_instr_info_unit_tests_dir();\n\n\tpath.push(format!(\"InstructionInfoTest_{}.txt\", bitness));\n\n\tInstrInfoTestParser::new(bitness, &path).into_iter().collect()\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/info/tests/mod.rs", "rank": 62, "score": 177332.02675964776 }, { "content": "fn read_decoder_misc_test_cases(bitness: u32) -> Vec<DecoderTestCase> {\n\n\tread_decoder_test_cases_core(bitness, format!(\"DecoderTestMisc{}.txt\", bitness))\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/test_cases.rs", "rank": 63, "score": 177332.02675964776 }, { "content": "#[test]\n\nfn decoder_new() {\n\n\tlet decoder = Decoder::new(64, b\"\", DecoderOptions::NONE);\n\n\tassert_eq!(decoder.ip(), 0);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 64, "score": 175967.91978876645 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_e_values() {\n\n\tlet mut iter = CC_e::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_E_ENUM_COUNT, Some(IcedConstants::CC_E_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_E_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_E_ENUM_COUNT - 1, Some(IcedConstants::CC_E_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_E_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_e> = CC_e::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_E_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_e> = CC_e::values().collect();\n\n\tlet mut values2: Vec<CC_e> = CC_e::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 65, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_b_values() {\n\n\tlet mut iter = CC_b::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_B_ENUM_COUNT, Some(IcedConstants::CC_B_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_B_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_B_ENUM_COUNT - 1, Some(IcedConstants::CC_B_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_B_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_b> = CC_b::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_B_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_b> = CC_b::values().collect();\n\n\tlet mut values2: Vec<CC_b> = CC_b::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 66, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_p_values() {\n\n\tlet mut iter = CC_p::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_P_ENUM_COUNT, Some(IcedConstants::CC_P_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_P_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_P_ENUM_COUNT - 1, Some(IcedConstants::CC_P_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_P_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_p> = CC_p::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_P_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_p> = CC_p::values().collect();\n\n\tlet mut values2: Vec<CC_p> = CC_p::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 67, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_repprefixkind_values() {\n\n\tlet mut iter = RepPrefixKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::REP_PREFIX_KIND_ENUM_COUNT, Some(IcedConstants::REP_PREFIX_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::REP_PREFIX_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::REP_PREFIX_KIND_ENUM_COUNT - 1, Some(IcedConstants::REP_PREFIX_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::REP_PREFIX_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<RepPrefixKind> = RepPrefixKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::REP_PREFIX_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<RepPrefixKind> = RepPrefixKind::values().collect();\n\n\tlet mut values2: Vec<RepPrefixKind> = RepPrefixKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/encoder/enums.rs", "rank": 68, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_decoratorkind_values() {\n\n\tlet mut iter = DecoratorKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::DECORATOR_KIND_ENUM_COUNT, Some(IcedConstants::DECORATOR_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::DECORATOR_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::DECORATOR_KIND_ENUM_COUNT - 1, Some(IcedConstants::DECORATOR_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::DECORATOR_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<DecoratorKind> = DecoratorKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::DECORATOR_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<DecoratorKind> = DecoratorKind::values().collect();\n\n\tlet mut values2: Vec<DecoratorKind> = DecoratorKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 69, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_prefixkind_values() {\n\n\tlet mut iter = PrefixKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::PREFIX_KIND_ENUM_COUNT, Some(IcedConstants::PREFIX_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::PREFIX_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::PREFIX_KIND_ENUM_COUNT - 1, Some(IcedConstants::PREFIX_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::PREFIX_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<PrefixKind> = PrefixKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::PREFIX_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<PrefixKind> = PrefixKind::values().collect();\n\n\tlet mut values2: Vec<PrefixKind> = PrefixKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 70, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_be_values() {\n\n\tlet mut iter = CC_be::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_BE_ENUM_COUNT, Some(IcedConstants::CC_BE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_BE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_BE_ENUM_COUNT - 1, Some(IcedConstants::CC_BE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_BE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_be> = CC_be::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_BE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_be> = CC_be::values().collect();\n\n\tlet mut values2: Vec<CC_be> = CC_be::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 71, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_numberkind_values() {\n\n\tlet mut iter = NumberKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::NUMBER_KIND_ENUM_COUNT, Some(IcedConstants::NUMBER_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::NUMBER_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::NUMBER_KIND_ENUM_COUNT - 1, Some(IcedConstants::NUMBER_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::NUMBER_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<NumberKind> = NumberKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::NUMBER_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<NumberKind> = NumberKind::values().collect();\n\n\tlet mut values2: Vec<NumberKind> = NumberKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 72, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_l_values() {\n\n\tlet mut iter = CC_l::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_L_ENUM_COUNT, Some(IcedConstants::CC_L_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_L_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_L_ENUM_COUNT - 1, Some(IcedConstants::CC_L_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_L_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_l> = CC_l::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_L_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_l> = CC_l::values().collect();\n\n\tlet mut values2: Vec<CC_l> = CC_l::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 73, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_numberbase_values() {\n\n\tlet mut iter = NumberBase::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::NUMBER_BASE_ENUM_COUNT, Some(IcedConstants::NUMBER_BASE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::NUMBER_BASE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::NUMBER_BASE_ENUM_COUNT - 1, Some(IcedConstants::NUMBER_BASE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::NUMBER_BASE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<NumberBase> = NumberBase::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::NUMBER_BASE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<NumberBase> = NumberBase::values().collect();\n\n\tlet mut values2: Vec<NumberBase> = NumberBase::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 74, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_a_values() {\n\n\tlet mut iter = CC_a::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_A_ENUM_COUNT, Some(IcedConstants::CC_A_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_A_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_A_ENUM_COUNT - 1, Some(IcedConstants::CC_A_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_A_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_a> = CC_a::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_A_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_a> = CC_a::values().collect();\n\n\tlet mut values2: Vec<CC_a> = CC_a::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 75, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_memorysize_values() {\n\n\tlet mut iter = MemorySize::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MEMORY_SIZE_ENUM_COUNT, Some(IcedConstants::MEMORY_SIZE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::MEMORY_SIZE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MEMORY_SIZE_ENUM_COUNT - 1, Some(IcedConstants::MEMORY_SIZE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::MEMORY_SIZE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<MemorySize> = MemorySize::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::MEMORY_SIZE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<MemorySize> = MemorySize::values().collect();\n\n\tlet mut values2: Vec<MemorySize> = MemorySize::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/memory_size.rs", "rank": 76, "score": 175961.38073374273 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_g_values() {\n\n\tlet mut iter = CC_g::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_G_ENUM_COUNT, Some(IcedConstants::CC_G_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_G_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_G_ENUM_COUNT - 1, Some(IcedConstants::CC_G_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_G_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_g> = CC_g::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_G_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_g> = CC_g::values().collect();\n\n\tlet mut values2: Vec<CC_g> = CC_g::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 77, "score": 175961.38073374273 }, { "content": "fn read_decoder_mem_test_cases(bitness: u32) -> Vec<DecoderMemoryTestCase> {\n\n\tlet mut filename = get_decoder_unit_tests_dir();\n\n\tfilename.push(format!(\"MemoryTest{}.txt\", bitness));\n\n\tDecoderMemoryTestParser::new(bitness, filename.as_path()).into_iter().collect()\n\n}\n\n\n\npub(crate) fn get_test_cases(bitness: u32) -> &'static Vec<DecoderTestCase> {\n\n\tmatch bitness {\n\n\t\t16 => &*TEST_CASES_16,\n\n\t\t32 => &*TEST_CASES_32,\n\n\t\t64 => &*TEST_CASES_64,\n\n\t\t_ => unreachable!(),\n\n\t}\n\n}\n\n\n\npub(crate) fn get_misc_test_cases(bitness: u32) -> &'static Vec<DecoderTestCase> {\n\n\tmatch bitness {\n\n\t\t16 => &*MISC_TEST_CASES_16,\n\n\t\t32 => &*MISC_TEST_CASES_32,\n\n\t\t64 => &*MISC_TEST_CASES_64,\n", "file_path": "src/rust/iced-x86/src/decoder/tests/test_cases.rs", "rank": 78, "score": 175488.1917095938 }, { "content": "fn encode_invalid_test(invalid_bitness: u32, tc: Rc<DecoderTestInfo>) {\n\n\tlet orig_bytes = to_vec_u8(tc.hex_bytes()).unwrap();\n\n\tlet mut decoder = create_decoder(tc.bitness(), orig_bytes.as_slice(), tc.ip(), tc.decoder_options()).0;\n\n\tlet orig_rip = decoder.ip();\n\n\tlet orig_instr = decoder.decode();\n\n\tassert_eq!(orig_instr.code(), tc.code());\n\n\tassert_eq!(orig_instr.len(), orig_bytes.len());\n\n\tassert!(orig_instr.len() <= IcedConstants::MAX_INSTRUCTION_LENGTH);\n\n\tassert_eq!(orig_instr.ip16(), orig_rip as u16);\n\n\tassert_eq!(orig_instr.ip32(), orig_rip as u32);\n\n\tassert_eq!(orig_instr.ip(), orig_rip);\n\n\tlet after_rip = decoder.ip();\n\n\tassert_eq!(orig_instr.next_ip16(), after_rip as u16);\n\n\tassert_eq!(orig_instr.next_ip32(), after_rip as u32);\n\n\tassert_eq!(orig_instr.next_ip(), after_rip);\n\n\n\n\tlet mut encoder = Encoder::new(invalid_bitness);\n\n\tmatch encoder.encode(&orig_instr, orig_rip) {\n\n\t\tOk(_) => unreachable!(),\n\n\t\tErr(err) => {\n\n\t\t\tlet expected_err = if invalid_bitness == 64 { Encoder::ERROR_ONLY_1632_BIT_MODE } else { Encoder::ERROR_ONLY_64_BIT_MODE };\n\n\t\t\tassert_eq!(format!(\"{}\", err), expected_err);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 79, "score": 173631.5416674912 }, { "content": "#[test]\n\n#[should_panic]\n\nfn new_panics_if_bitness_128() {\n\n\tlet _ = Encoder::new(128);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 80, "score": 173099.32504289705 }, { "content": "#[test]\n\n#[should_panic]\n\nfn new_panics_if_bitness_0() {\n\n\tlet _ = Encoder::new(0);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 81, "score": 173099.32504289705 }, { "content": "#[test]\n\nfn decoder_try_new() {\n\n\tlet decoder = Decoder::try_new(64, b\"\", DecoderOptions::NONE).unwrap();\n\n\tassert_eq!(decoder.ip(), 0);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/mod.rs", "rank": 82, "score": 173099.32504289705 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_ne_values() {\n\n\tlet mut iter = CC_ne::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_NE_ENUM_COUNT, Some(IcedConstants::CC_NE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_NE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_NE_ENUM_COUNT - 1, Some(IcedConstants::CC_NE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_NE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_ne> = CC_ne::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_NE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_ne> = CC_ne::values().collect();\n\n\tlet mut values2: Vec<CC_ne> = CC_ne::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 83, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_relockind_values() {\n\n\tlet mut iter = RelocKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::RELOC_KIND_ENUM_COUNT, Some(IcedConstants::RELOC_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::RELOC_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::RELOC_KIND_ENUM_COUNT - 1, Some(IcedConstants::RELOC_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::RELOC_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<RelocKind> = RelocKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::RELOC_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<RelocKind> = RelocKind::values().collect();\n\n\tlet mut values2: Vec<RelocKind> = RelocKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/block_enc/enums.rs", "rank": 84, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_memorysizeoptions_values() {\n\n\tlet mut iter = MemorySizeOptions::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT, Some(IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT - 1, Some(IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<MemorySizeOptions> = MemorySizeOptions::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::MEMORY_SIZE_OPTIONS_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<MemorySizeOptions> = MemorySizeOptions::values().collect();\n\n\tlet mut values2: Vec<MemorySizeOptions> = MemorySizeOptions::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums_shared.rs", "rank": 85, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_ge_values() {\n\n\tlet mut iter = CC_ge::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_GE_ENUM_COUNT, Some(IcedConstants::CC_GE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_GE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_GE_ENUM_COUNT - 1, Some(IcedConstants::CC_GE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_GE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_ge> = CC_ge::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_GE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_ge> = CC_ge::values().collect();\n\n\tlet mut values2: Vec<CC_ge> = CC_ge::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 86, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_le_values() {\n\n\tlet mut iter = CC_le::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_LE_ENUM_COUNT, Some(IcedConstants::CC_LE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_LE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_LE_ENUM_COUNT - 1, Some(IcedConstants::CC_LE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_LE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_le> = CC_le::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_LE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_le> = CC_le::values().collect();\n\n\tlet mut values2: Vec<CC_le> = CC_le::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 87, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_formattertextkind_values() {\n\n\tlet mut iter = FormatterTextKind::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT, Some(IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT - 1, Some(IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<FormatterTextKind> = FormatterTextKind::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::FORMATTER_TEXT_KIND_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<FormatterTextKind> = FormatterTextKind::values().collect();\n\n\tlet mut values2: Vec<FormatterTextKind> = FormatterTextKind::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums_shared.rs", "rank": 88, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_np_values() {\n\n\tlet mut iter = CC_np::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_NP_ENUM_COUNT, Some(IcedConstants::CC_NP_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_NP_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_NP_ENUM_COUNT - 1, Some(IcedConstants::CC_NP_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_NP_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_np> = CC_np::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_NP_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_np> = CC_np::values().collect();\n\n\tlet mut values2: Vec<CC_np> = CC_np::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 89, "score": 173092.9456980946 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn test_cc_ae_values() {\n\n\tlet mut iter = CC_ae::values();\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_AE_ENUM_COUNT, Some(IcedConstants::CC_AE_ENUM_COUNT)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_AE_ENUM_COUNT);\n\n\tassert!(iter.next().is_some());\n\n\tassert_eq!(iter.size_hint(), (IcedConstants::CC_AE_ENUM_COUNT - 1, Some(IcedConstants::CC_AE_ENUM_COUNT - 1)));\n\n\tassert_eq!(iter.len(), IcedConstants::CC_AE_ENUM_COUNT - 1);\n\n\n\n\tlet values: Vec<CC_ae> = CC_ae::values().collect();\n\n\tassert_eq!(values.len(), IcedConstants::CC_AE_ENUM_COUNT);\n\n\tfor (i, value) in values.into_iter().enumerate() {\n\n\t\tassert_eq!(i, value as usize);\n\n\t}\n\n\n\n\tlet values1: Vec<CC_ae> = CC_ae::values().collect();\n\n\tlet mut values2: Vec<CC_ae> = CC_ae::values().rev().collect();\n\n\tvalues2.reverse();\n\n\tassert_eq!(values1, values2);\n\n}\n\n#[rustfmt::skip]\n", "file_path": "src/rust/iced-x86/src/formatter/enums.rs", "rank": 90, "score": 173092.9456980946 }, { "content": "fn add_tests(v: &mut Vec<DecoderTestInfo>, tests: &[DecoderTestCase], include_invalid: bool, can_encode: Option<bool>) {\n\n\tfor tc in tests {\n\n\t\tif !include_invalid && tc.code == Code::INVALID {\n\n\t\t\tcontinue;\n\n\t\t}\n\n\t\tif let Some(can_encode) = can_encode {\n\n\t\t\tlet tc_can_encode = (tc.test_options & DecoderTestOptions::NO_ENCODE) == 0;\n\n\t\t\tif tc_can_encode != can_encode {\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\t\t}\n\n\t\tv.push(DecoderTestInfo {\n\n\t\t\tbitness: tc.bitness,\n\n\t\t\tcode: tc.code,\n\n\t\t\thex_bytes: tc.hex_bytes.clone(),\n\n\t\t\tip: tc.ip,\n\n\t\t\tencoded_hex_bytes: tc.encoded_hex_bytes.clone(),\n\n\t\t\tdecoder_options: tc.decoder_options,\n\n\t\t\tdecoder_test_options: tc.test_options,\n\n\t\t});\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/test_utils.rs", "rank": 91, "score": 171707.39989688474 }, { "content": "struct SymbolResolverImpl {\n\n\tflags: u32,\n\n}\n\n\n\nimpl SymbolResolver for SymbolResolverImpl {\n\n\tfn symbol(\n\n\t\t&mut self, _instruction: &Instruction, _operand: u32, instruction_operand: Option<u32>, address: u64, _address_size: u32,\n\n\t) -> Option<SymbolResult<'_>> {\n\n\t\tif instruction_operand == Some(1) && (self.flags & SymbolTestFlags::SYMBOL) != 0 {\n\n\t\t\tSome(SymbolResult::with_str_kind_flags(\n\n\t\t\t\taddress,\n\n\t\t\t\t\"symbol\",\n\n\t\t\t\tFormatterTextKind::Data,\n\n\t\t\t\tif (self.flags & SymbolTestFlags::SIGNED) != 0 { SymbolFlags::SIGNED } else { SymbolFlags::NONE },\n\n\t\t\t))\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/formatter/masm/tests/symres.rs", "rank": 92, "score": 170693.3289577114 }, { "content": "#[test]\n\n#[should_panic]\n\nfn decoder_new_panics_128() {\n\n\tdecoder_new_panics(128);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/misc_tests.rs", "rank": 93, "score": 170367.5148789201 }, { "content": "#[test]\n\nfn try_new_fails_if_bitness_0() {\n\n\tassert!(Encoder::try_new(0).is_err());\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 94, "score": 170367.5148789201 }, { "content": "#[test]\n\nfn try_new_fails_if_bitness_128() {\n\n\tassert!(Encoder::try_new(128).is_err());\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/encoder/tests/mod.rs", "rank": 95, "score": 170367.5148789201 }, { "content": "#[test]\n\n#[should_panic]\n\nfn decoder_new_panics_0() {\n\n\tdecoder_new_panics(0);\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/decoder/tests/misc_tests.rs", "rank": 96, "score": 170367.5148789201 }, { "content": "#[test]\n\nfn invalid_code_value_is_zero() {\n\n\t// A 'default' Instruction should be an invalid instruction\n\n\tconst_assert_eq!(Code::INVALID as u32, 0);\n\n\tlet instr1 = Instruction::default();\n\n\tassert_eq!(instr1.code(), Code::INVALID);\n\n\tlet instr2 = Instruction::new();\n\n\tassert_eq!(instr2.code(), Code::INVALID);\n\n\tassert!(instr1.eq_all_bits(&instr2));\n\n}\n\n\n", "file_path": "src/rust/iced-x86/src/test/instr_misc.rs", "rank": 97, "score": 170361.28762879974 }, { "content": "\t#[inline]\n\n\tpub fn uppercase_mnemonics(&self) -> bool {\n\n\t\t(self.options1 & Flags1::UPPERCASE_MNEMONICS) != 0\n\n\t}\n\n\n\n\t/// Mnemonics are uppercased\n\n\t///\n\n\t/// Default | Value | Example\n\n\t/// --------|-------|--------\n\n\t/// &nbsp; | `true` | `MOV rcx,rax`\n\n\t/// ✔️ | `false` | `mov rcx,rax`\n\n\t///\n\n\t/// # Arguments\n\n\t///\n\n\t/// * `value`: New value\n\n\t#[inline]\n\n\tpub fn set_uppercase_mnemonics(&mut self, value: bool) {\n\n\t\tif value {\n\n\t\t\tself.options1 |= Flags1::UPPERCASE_MNEMONICS;\n\n\t\t} else {\n", "file_path": "src/rust/iced-x86/src/formatter/fmt_opts.rs", "rank": 98, "score": 50.339060723828446 }, { "content": "\t#[inline]\n\n\tpub fn masm_displ_in_brackets(&self) -> bool {\n\n\t\t(self.options1 & Flags1::MASM_DISPL_IN_BRACKETS) != 0\n\n\t}\n\n\n\n\t/// (masm only): Show displacements in brackets\n\n\t///\n\n\t/// Default | Value | Example\n\n\t/// --------|-------|--------\n\n\t/// ✔️ | `true` | `[ecx+1234h]`\n\n\t/// &nbsp; | `false` | `1234h[ecx]`\n\n\t///\n\n\t/// # Arguments\n\n\t///\n\n\t/// * `value`: New value\n\n\t#[inline]\n\n\tpub fn set_masm_displ_in_brackets(&mut self, value: bool) {\n\n\t\tif value {\n\n\t\t\tself.options1 |= Flags1::MASM_DISPL_IN_BRACKETS;\n\n\t\t} else {\n", "file_path": "src/rust/iced-x86/src/formatter/fmt_opts.rs", "rank": 99, "score": 49.764233802226315 } ]
Rust
src/num_traits.rs
icewind1991/bitbuffer-rs
96c37a0bc32cac5c44b77c5c0b05b053735c2942
use crate::Endianness; use num_traits::PrimInt; use std::array::TryFromSliceError; use std::convert::TryFrom; use std::fmt::Debug; use std::ops::{BitOrAssign, BitXor}; pub trait UncheckedPrimitiveFloat: Sized { type BYTES: AsRef<[u8]> + for<'a> TryFrom<&'a [u8], Error = TryFromSliceError>; type INT: PrimInt + BitOrAssign + IsSigned + UncheckedPrimitiveInt + BitXor + Debug + SplitFitUsize; fn from_f32_unchecked(n: f32) -> Self; fn from_f64_unchecked(n: f64) -> Self; fn to_bytes<E: Endianness>(self) -> Self::BYTES; fn from_bytes<E: Endianness>(bytes: Self::BYTES) -> Self; fn to_int(self) -> Self::INT; fn from_int(int: Self::INT) -> Self; } impl UncheckedPrimitiveFloat for f32 { type BYTES = [u8; 4]; type INT = u32; #[inline(always)] fn from_f32_unchecked(n: f32) -> Self { n } #[inline(always)] fn from_f64_unchecked(n: f64) -> Self { n as f32 } fn to_bytes<E: Endianness>(self) -> Self::BYTES { if E::is_le() { self.to_le_bytes() } else { self.to_be_bytes() } } fn from_bytes<E: Endianness>(bytes: Self::BYTES) -> Self { if E::is_le() { Self::from_le_bytes(bytes) } else { Self::from_be_bytes(bytes) } } fn to_int(self) -> Self::INT { Self::INT::from_le_bytes(self.to_le_bytes()) } fn from_int(int: Self::INT) -> Self { Self::from_le_bytes(int.to_le_bytes()) } } impl UncheckedPrimitiveFloat for f64 { type BYTES = [u8; 8]; type INT = u64; #[inline(always)] fn from_f32_unchecked(n: f32) -> Self { n as f64 } #[inline(always)] fn from_f64_unchecked(n: f64) -> Self { n } fn to_bytes<E: Endianness>(self) -> Self::BYTES { if E::is_le() { self.to_le_bytes() } else { self.to_be_bytes() } } fn from_bytes<E: Endianness>(bytes: Self::BYTES) -> Self { if E::is_le() { Self::from_le_bytes(bytes) } else { Self::from_be_bytes(bytes) } } fn to_int(self) -> Self::INT { Self::INT::from_le_bytes(self.to_le_bytes()) } fn from_int(int: Self::INT) -> Self { Self::from_le_bytes(int.to_le_bytes()) } } pub trait UncheckedPrimitiveInt: Sized { fn from_u8_unchecked(n: u8) -> Self; fn from_i8_unchecked(n: i8) -> Self; fn from_u16_unchecked(n: u16) -> Self; fn from_i16_unchecked(n: i16) -> Self; fn from_u32_unchecked(n: u32) -> Self; fn from_i32_unchecked(n: i32) -> Self; fn from_u64_unchecked(n: u64) -> Self; fn from_i64_unchecked(n: i64) -> Self; fn from_u128_unchecked(n: u128) -> Self; fn from_i128_unchecked(n: i128) -> Self; fn from_usize_unchecked(n: usize) -> Self; fn from_isize_unchecked(n: isize) -> Self; fn into_u8_unchecked(self) -> u8; fn into_i8_unchecked(self) -> i8; fn into_u16_unchecked(self) -> u16; fn into_i16_unchecked(self) -> i16; fn into_u32_unchecked(self) -> u32; fn into_i32_unchecked(self) -> i32; fn into_u64_unchecked(self) -> u64; fn into_i64_unchecked(self) -> i64; fn into_u128_unchecked(self) -> u128; fn into_i128_unchecked(self) -> i128; fn into_usize_unchecked(self) -> usize; fn into_isize_unchecked(self) -> isize; fn from_unchecked<N: UncheckedPrimitiveInt>(n: N) -> Self; } macro_rules! impl_unchecked_int { ($type:ty, $conv:ident) => { impl UncheckedPrimitiveInt for $type { #[inline(always)] fn from_u8_unchecked(n: u8) -> Self { n as $type } #[inline(always)] fn from_i8_unchecked(n: i8) -> Self { n as $type } #[inline(always)] fn from_u16_unchecked(n: u16) -> Self { n as $type } #[inline(always)] fn from_i16_unchecked(n: i16) -> Self { n as $type } #[inline(always)] fn from_u32_unchecked(n: u32) -> Self { n as $type } #[inline(always)] fn from_i32_unchecked(n: i32) -> Self { n as $type } #[inline(always)] fn from_u64_unchecked(n: u64) -> Self { n as $type } #[inline(always)] fn from_i64_unchecked(n: i64) -> Self { n as $type } #[inline(always)] fn from_u128_unchecked(n: u128) -> Self { n as $type } #[inline(always)] fn from_i128_unchecked(n: i128) -> Self { n as $type } #[inline(always)] fn from_usize_unchecked(n: usize) -> Self { n as $type } #[inline(always)] fn from_isize_unchecked(n: isize) -> Self { n as $type } fn into_u8_unchecked(self) -> u8 { self as u8 } #[inline(always)] fn into_i8_unchecked(self) -> i8 { self as i8 } #[inline(always)] fn into_u16_unchecked(self) -> u16 { self as u16 } #[inline(always)] fn into_i16_unchecked(self) -> i16 { self as i16 } #[inline(always)] fn into_u32_unchecked(self) -> u32 { self as u32 } #[inline(always)] fn into_i32_unchecked(self) -> i32 { self as i32 } #[inline(always)] fn into_u64_unchecked(self) -> u64 { self as u64 } #[inline(always)] fn into_i64_unchecked(self) -> i64 { self as i64 } #[inline(always)] fn into_u128_unchecked(self) -> u128 { self as u128 } #[inline(always)] fn into_i128_unchecked(self) -> i128 { self as i128 } #[inline(always)] fn into_usize_unchecked(self) -> usize { self as usize } #[inline(always)] fn into_isize_unchecked(self) -> isize { self as isize } #[inline(always)] fn from_unchecked<N: UncheckedPrimitiveInt>(n: N) -> Self { n.$conv() } } }; } impl_unchecked_int!(u8, into_u8_unchecked); impl_unchecked_int!(i8, into_i8_unchecked); impl_unchecked_int!(u16, into_u16_unchecked); impl_unchecked_int!(i16, into_i16_unchecked); impl_unchecked_int!(u32, into_u32_unchecked); impl_unchecked_int!(i32, into_i32_unchecked); impl_unchecked_int!(u64, into_u64_unchecked); impl_unchecked_int!(i64, into_i64_unchecked); impl_unchecked_int!(u128, into_u128_unchecked); impl_unchecked_int!(i128, into_i128_unchecked); impl_unchecked_int!(usize, into_usize_unchecked); impl_unchecked_int!(isize, into_isize_unchecked); pub trait IsSigned { fn is_signed() -> bool; } macro_rules! impl_is_signed { ($type:ty, $signed:expr) => { impl IsSigned for $type { #[inline(always)] fn is_signed() -> bool { $signed } } }; } impl_is_signed!(u8, false); impl_is_signed!(u16, false); impl_is_signed!(u32, false); impl_is_signed!(u64, false); impl_is_signed!(u128, false); impl_is_signed!(usize, false); impl_is_signed!(i8, true); impl_is_signed!(i16, true); impl_is_signed!(i32, true); impl_is_signed!(i64, true); impl_is_signed!(i128, true); impl_is_signed!(isize, true); pub trait SplitFitUsize { type Iter: Iterator<Item = (usize, u8)> + ExactSizeIterator + DoubleEndedIterator; fn split_fit_usize<E: Endianness>(self) -> Self::Iter; } use std::array; use std::mem::size_of; macro_rules! impl_split_fit { ($type:ty) => { impl SplitFitUsize for $type { type Iter = array::IntoIter<(usize, u8), 1>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { assert!(size_of::<Self>() < size_of::<usize>()); [(self as usize, size_of::<Self>() as u8 * 8)].into_iter() } } }; } macro_rules! impl_split_fit_signed { ($signed_type:ty, $unsigned_type:ty) => { impl SplitFitUsize for $signed_type { type Iter = <$unsigned_type as SplitFitUsize>::Iter; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { let unsigned = <$unsigned_type>::from_ne_bytes(self.to_ne_bytes()); unsigned.split_fit_usize::<E>() } } }; } impl_split_fit!(u8); impl_split_fit!(u16); impl_split_fit!(i8); impl_split_fit!(i16); #[cfg(target_pointer_width = "64")] impl_split_fit!(u32); #[cfg(target_pointer_width = "32")] impl SplitFitUsize for u32 { type Iter = array::IntoIter<(usize, u8), 2>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { Self::Iter::new(if E::is_le() { [ ((self & (Self::MAX >> 8)) as usize, 24), ((self >> 24) as usize, 8), ] } else { [ ((self >> 24) as usize, 8), ((self & (Self::MAX >> 8)) as usize, 24), ] }) } } impl_split_fit_signed!(i32, u32); impl SplitFitUsize for u64 { type Iter = array::IntoIter<(usize, u8), 3>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { (if E::is_le() { [ ((self & (Self::MAX >> 40)) as usize, 24), ((self >> 24 & (Self::MAX >> 16)) as usize, 24), ((self >> 48) as usize, 16), ] } else { [ ((self >> 48) as usize, 16), ((self >> 24 & (Self::MAX >> 16)) as usize, 24), ((self & (Self::MAX >> 40)) as usize, 24), ] }) .into_iter() } } impl_split_fit_signed!(i64, u64); impl SplitFitUsize for u128 { type Iter = array::IntoIter<(usize, u8), 6>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { (if E::is_le() { [ ((self & (Self::MAX >> 104)) as usize, 24), ((self >> 24 & (Self::MAX >> 80)) as usize, 24), ((self >> 48 & (Self::MAX >> 56)) as usize, 24), ((self >> 72 & (Self::MAX >> 32)) as usize, 24), ((self >> 96 & (Self::MAX >> 8)) as usize, 24), ((self >> 120) as usize, 8), ] } else { [ ((self >> 120) as usize, 8), ((self >> 96 & (Self::MAX >> 8)) as usize, 24), ((self >> 72 & (Self::MAX >> 32)) as usize, 24), ((self >> 48 & (Self::MAX >> 56)) as usize, 24), ((self >> 24 & (Self::MAX >> 80)) as usize, 24), ((self & (Self::MAX >> 104)) as usize, 24), ] }) .into_iter() } } impl_split_fit_signed!(i128, u128); impl SplitFitUsize for usize { type Iter = array::IntoIter<(usize, u8), 2>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { (if E::is_le() { [ ( (self & (Self::MAX >> (usize::BITS - 8))) as usize, usize::BITS as u8 - 8, ), ((self >> (usize::BITS - 8)) as usize, 8), ] } else { [ ((self >> (usize::BITS - 8)) as usize, 8), ( (self & (Self::MAX >> (usize::BITS - 8))) as usize, usize::BITS as u8 - 8, ), ] }) .into_iter() } } impl_split_fit_signed!(isize, usize);
use crate::Endianness; use num_traits::PrimInt; use std::array::TryFromSliceError; use std::convert::TryFrom; use std::fmt::Debug; use std::ops::{BitOrAssign, BitXor}; pub trait UncheckedPrimitiveFloat: Sized { type BYTES: AsRef<[u8]> + for<'a> TryFrom<&'a [u8], Error = TryFromSliceError>; type INT: PrimInt + BitOrAssign + IsSigned + UncheckedPrimitiveInt + BitXor + Debug + SplitFitUsize; fn from_f32_unchecked(n: f32) -> Self; fn from_f64_unchecked(n: f64) -> Self; fn to_bytes<E: Endianness>(self) -> Self::BYTES; fn from_bytes<E: Endianness>(bytes: Self::BYTES) -> Self; fn to_int(self) -> Self::INT; fn from_int(int: Self::INT) -> Self; } impl UncheckedPrimitiveFloat for f32 { type BYTES = [u8; 4]; type INT = u32; #[inline(always)] fn from_f32_unchecked(n: f32) -> Self { n } #[inline(always)] fn from_f64_unchecked(n: f64) -> Self { n as f32 } fn to_bytes<E: Endianness>(self) -> Self::BYTES { if E::is_le() { self.to_le_bytes() } else { self.to_be_bytes() } } fn from_bytes<E: Endianness>(bytes: Self::BYTES) -> Self { if E::is_le() { Self::from_le_bytes(bytes) } else { Self::from_be_bytes(bytes) } } fn to_int(self) -> Self::INT { Self::INT::from_le_bytes(self.to_le_bytes()) } fn from_int(int: Self::INT) -> Self { Self::from_le_bytes(int.to_le_bytes()) } } impl UncheckedPrimitiveFloat for f64 { type BYTES = [u8; 8]; type INT = u64; #[inline(always)] fn from_f32_unchecked(n: f32) -> Self { n as f64 } #[inline(always)] fn from_f64_unchecked(n: f64) -> Self { n } fn to_bytes<E: Endianness>(self) -> Self::BYTES { if E::is_le() { self.to_le_bytes() } else { self.to_be_bytes() } } fn from_bytes<E: Endianness>(bytes: Self::BYTES) -> Self { if E::is_le() { Self::from_le_bytes(bytes) } else { Self::from_be_bytes(bytes) } } fn to_int(self) -> Self::INT { Self::INT::from_le_bytes(self.to_le_bytes()) } fn from_int(int: Self::INT) -> Self { Self::from_le_bytes(int.to_le_bytes()) } } pub trait UncheckedPrimitiveInt: Sized { fn from_u8_unchecked(n: u8) -> Self; fn from_i8_unchecked(n: i8) -> Self; fn from_u16_unchecked(n: u16) -> Self; fn from_i16_unchecked(n: i16) -> Self; fn from_u32_unchecked(n: u32) -> Self; fn from_i32_unchecked(n: i32) -> Self; fn from_u64_unchecked(n: u64) -> Self; fn from_i64_unchecked(n: i64) -> Self; fn from_u128_unchecked(n: u128) -> Self; fn from_i128_unchecked(n: i128) -> Self; fn from_usize_unchecked(n: usize) -> Self; fn from_isize_unchecked(n: isize) -> Self; fn into_u8_unchecked(self) -> u8; fn into_i8_unchecked(self) -> i8; fn into_u16_unchecked(self) -> u16; fn into_i16_unchecked(self) -> i16; fn into_u32_unchecked(self) -> u32; fn into_i32_unchecked(self) -> i32; fn into_u64_unchecked(self) -> u64; fn into_i64_unchecked(self) -> i64; fn into_u128_unchecked(self) -> u128; fn into_i128_unchecked(self) -> i128; fn into_usize_unchecked(self) -> usize; fn into_isize_unchecked(self) -> isize; fn from_unchecked<N: UncheckedPrimitiveInt>(n: N) -> Self; } macro_rules! impl_unchecked_int { ($type:ty, $conv:ident) => { impl UncheckedPrimitiveInt for $type { #[inline(always)] fn from_u8_unchecked(n: u8) -> Self { n as $type } #[inline(always)] fn from_i8_unchecked(n: i8) -> Self { n as $type } #[inline(always)] fn from_u16_unchecked(n: u16) -> Self { n as $type } #[inline(always)] fn from_i16_unchecked(n: i16) -> Self { n as $type } #[inline(always)] fn from_u32_unchecked(n: u32) -> Self { n as $type } #[inline(always)] fn from_i32_unchecked(n: i32) -> Self { n as $type } #[inline(always)] fn from_u64_unchecked(n: u64) -> Self { n as $type } #[inline(always)] fn from_i64_unchecked(n: i64) -> Self { n as $type } #[inline(always)] fn from_u128_unchecked(n: u128) -> Self { n as $type } #[inline(always)] fn from_i128_unchecked(n: i128) -> Self { n as $type } #[inline(always)] fn from_usize_unchecked(n: usize) -> Self { n as $type } #[inline(always)] fn from_isize_unchecked(n: isize) -> Self { n as $type } fn into_u8_unchecked(self) -> u8 { self as u8 } #[inline(always)] fn into_i8_unchecked(self) -> i8 { self as i8 } #[inline(always)] fn into_u16_unchecked(self) -> u16 { self as u16 } #[inline(always)] fn into_i16_unchecked(self) -> i16 { self as i16 } #[inline(always)] fn into_u32_unchecked(self) -> u32 { self as u32 } #[inline(always)] fn into_i32_unchecked(self) -> i32 { self as i32 } #[inline(always)] fn into_u64_unchecked(self) -> u64 { self as u64 } #[inline(always)] fn into_i64_unchecked(self) -> i64 { self as i64 } #[inline(always)] fn into_u128_unchecked(self) -> u128 { self as u128 } #[inline(always)] fn into_i128_unchecked(self) -> i128 { self as i128 } #[inline(always)] fn into_usize_unchecked(self) -> usize { self as usize } #[inline(always)] fn into_isize_unchecked(self) -> isize { self as isize } #[inline(always)] fn from_unchecked<N: UncheckedPrimitiveInt>(n: N) -> Self { n.$conv() } } }; } impl_unchecked_int!(u8, into_u8_unchecked); impl_unchecked_int!(i8, into_i8_unchecked); impl_unchecked_int!(u16, into_u16_unchecked); impl_unchecked_int!(i16, into_i16_unchecked); impl_unchecked_int!(u32, into_u32_unchecked); impl_unchecked_int!(i32, into_i32_unchecked); impl_unchecked_int!(u64, into_u64_unchecked); impl_unchecked_int!(i64, into_i64_unchecked); impl_unchecked_int!(u128, into_u128_unchecked); impl_unchecked_int!(i128, into_i128_unchecked); impl_unchecked_int!(usize, into_usize_unchecked); impl_unchecked_int!(isize, into_isize_unchecked); pub trait IsSigned { fn is_signed() -> bool; } macro_rules! impl_is_signed { ($type:ty, $signed:expr) => { impl IsSigned for $type { #[inline(always)] fn is_signed() -> bool { $signed } } }; } impl_is_signed!(u8, false); impl_is_signed!(u16, false); impl_is_signed!(u32, false); impl_is_signed!(u64, false); impl_is_signed!(u128, false); impl_is_signed!(usize, false); impl_is_signed!(i8, true); impl_is_signed!(i16, true); impl_is_signed!(i32, true); impl_is_signed!(i64, true); impl_is_signed!(i128, true); impl_is_signed!(isize, true); pub trait SplitFitUsize { type Iter: Iterator<Item = (usize, u8)> + ExactSizeIterator + DoubleEndedIterator; fn split_fit_usize<E: Endianness>(self) -> Self::Iter; } use std::array; use std::mem::size_of; macro_rules! impl_split_fit { ($type:ty) => { impl SplitFitUsize for $type { type Iter = array::IntoIter<(usize, u8), 1>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { assert!(size_of::<Self>() < size_of::<usize>()); [(self as usize, size_of::<Self>() as u8 * 8)].into_iter() } } }; } macro_rules! impl_split_fit_signed { ($signed_type:ty, $unsigned_type:ty) => { impl SplitFitUsize for $signed_type { type Iter = <$unsigned_type as SplitFitUsize>::Iter; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { let unsigned = <$unsigned_type>::from_ne_bytes(self.to_ne_bytes()); unsigned.split_fit_usize::<E>() } } }; } impl_split_fit!(u8); impl_split_fit!(u16); impl_split_fit!(i8); impl_split_fit!(i16); #[cfg(target_pointer_width = "64")] impl_split_fit!(u32); #[cfg(target_pointer_width = "32")] impl SplitFitUsize for u32 { type Iter = array::IntoIter<(usize, u8), 2>;
} impl_split_fit_signed!(i32, u32); impl SplitFitUsize for u64 { type Iter = array::IntoIter<(usize, u8), 3>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { (if E::is_le() { [ ((self & (Self::MAX >> 40)) as usize, 24), ((self >> 24 & (Self::MAX >> 16)) as usize, 24), ((self >> 48) as usize, 16), ] } else { [ ((self >> 48) as usize, 16), ((self >> 24 & (Self::MAX >> 16)) as usize, 24), ((self & (Self::MAX >> 40)) as usize, 24), ] }) .into_iter() } } impl_split_fit_signed!(i64, u64); impl SplitFitUsize for u128 { type Iter = array::IntoIter<(usize, u8), 6>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { (if E::is_le() { [ ((self & (Self::MAX >> 104)) as usize, 24), ((self >> 24 & (Self::MAX >> 80)) as usize, 24), ((self >> 48 & (Self::MAX >> 56)) as usize, 24), ((self >> 72 & (Self::MAX >> 32)) as usize, 24), ((self >> 96 & (Self::MAX >> 8)) as usize, 24), ((self >> 120) as usize, 8), ] } else { [ ((self >> 120) as usize, 8), ((self >> 96 & (Self::MAX >> 8)) as usize, 24), ((self >> 72 & (Self::MAX >> 32)) as usize, 24), ((self >> 48 & (Self::MAX >> 56)) as usize, 24), ((self >> 24 & (Self::MAX >> 80)) as usize, 24), ((self & (Self::MAX >> 104)) as usize, 24), ] }) .into_iter() } } impl_split_fit_signed!(i128, u128); impl SplitFitUsize for usize { type Iter = array::IntoIter<(usize, u8), 2>; fn split_fit_usize<E: Endianness>(self) -> Self::Iter { (if E::is_le() { [ ( (self & (Self::MAX >> (usize::BITS - 8))) as usize, usize::BITS as u8 - 8, ), ((self >> (usize::BITS - 8)) as usize, 8), ] } else { [ ((self >> (usize::BITS - 8)) as usize, 8), ( (self & (Self::MAX >> (usize::BITS - 8))) as usize, usize::BITS as u8 - 8, ), ] }) .into_iter() } } impl_split_fit_signed!(isize, usize);
fn split_fit_usize<E: Endianness>(self) -> Self::Iter { Self::Iter::new(if E::is_le() { [ ((self & (Self::MAX >> 8)) as usize, 24), ((self >> 24) as usize, 8), ] } else { [ ((self >> 24) as usize, 8), ((self & (Self::MAX >> 8)) as usize, 24), ] }) }
function_block-full_function
[ { "content": "fn type_is_int(ty: &Type) -> bool {\n\n if let Type::Path(path) = ty {\n\n if let Some(ident) = path.path.get_ident() {\n\n let name = ident.to_string();\n\n matches!(\n\n name.as_str(),\n\n \"u8\" | \"u16\" | \"u32\" | \"u64\" | \"usize\" | \"i8\" | \"i16\" | \"i32\" | \"i64\" | \"isize\"\n\n )\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n }\n\n}\n", "file_path": "bitbuffer_derive/src/write.rs", "rank": 1, "score": 169021.86473668562 }, { "content": "#[inline(always)]\n\npub fn bit_size_of_sized<'a, T: BitReadSized<'a, LittleEndian>>(size: usize) -> Option<usize> {\n\n T::bit_size_sized(size)\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 143632.23945624896 }, { "content": "#[inline(always)]\n\nfn contains_zero_byte_non_top(x: usize) -> bool {\n\n #[cfg(target_pointer_width = \"64\")]\n\n const LO_USIZE: usize = 0x0001_0101_0101_0101;\n\n #[cfg(target_pointer_width = \"64\")]\n\n const HI_USIZE: usize = 0x0080_8080_8080_8080;\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n const LO_USIZE: usize = 0x000_10101;\n\n #[cfg(target_pointer_width = \"32\")]\n\n const HI_USIZE: usize = 0x0080_8080;\n\n\n\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n\nuse serde::{de, ser::SerializeStruct, Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'a, E: Endianness> Serialize for BitReadBuffer<'a, E> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n", "file_path": "src/readbuffer.rs", "rank": 6, "score": 138290.64638118155 }, { "content": "#[derive(BitWrite, PartialEq, Debug)]\n\nstruct UnnamedSize(u8, #[size = 5] String, bool);\n\n\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 7, "score": 136386.9401435574 }, { "content": "#[derive(BitRead, PartialEq, Debug)]\n\nstruct UnnamedSize(u8, #[size = 5] String, bool);\n\n\n", "file_path": "bitbuffer_derive/tests/read.rs", "rank": 8, "score": 136386.9401435574 }, { "content": "#[derive(BitRead)]\n\nstruct UnnamedSizeStruct(u8, #[size = 6] String, bool);\n\n\n", "file_path": "bitbuffer_derive/tests/read.rs", "rank": 9, "score": 133665.448202985 }, { "content": "fn is_const_size(attrs: &[Attribute], has_input_size: bool) -> bool {\n\n if get_attribute_value::<Lit>(attrs, &[\"size_bits\"]).is_some() {\n\n return false;\n\n }\n\n get_attribute_value(attrs, &[\"size\"])\n\n .map(|size_lit| match size_lit {\n\n Lit::Int(_) => true,\n\n Lit::Str(size_field) => &size_field.value() == \"input_size\" && has_input_size,\n\n _ => panic!(\"Unsupported value for size attribute\"),\n\n })\n\n .unwrap_or(true)\n\n}\n\n\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 10, "score": 126795.1596095457 }, { "content": "pub fn derive_bitwrite_trait(\n\n input: proc_macro::TokenStream,\n\n trait_name: String,\n\n write_method_name: String,\n\n extra_param: Option<TokenStream>,\n\n) -> proc_macro::TokenStream {\n\n let input: DeriveInput = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = &input.ident;\n\n\n\n let endianness = get_attribute_value(&input.attrs, &[\"endianness\"]);\n\n let mut trait_generics = input.generics.clone();\n\n // we need these separate generics to only add out Endianness param to the 'impl'\n\n let (_, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let lifetime: Option<&GenericParam> = trait_generics\n\n .params\n\n .iter()\n\n .find(|param| matches!(param, GenericParam::Lifetime(_)));\n\n let _lifetime = match lifetime {\n\n Some(GenericParam::Lifetime(lifetime)) => lifetime.lifetime.clone(),\n", "file_path": "bitbuffer_derive/src/write.rs", "rank": 11, "score": 122703.36575190542 }, { "content": "/// Trait for types that can be read from a stream, requiring the size to be configured\n\n///\n\n/// The meaning of the set sized depends on the type being read (e.g, number of bits for integers,\n\n/// number of bytes for strings, number of items for Vec's, etc)\n\n///\n\n/// The `BitReadSized` trait can be used with `#[derive]` on structs\n\n///\n\n/// The implementation can be derived for a struct as long as every field in the struct implements [`BitRead`] or `BitReadSized`\n\n///\n\n/// The struct is read field by field in the order they are defined in, if the size for a field is set [`stream.read_sized()`][read_sized]\n\n/// will be used, otherwise [`stream_read()`][read] will be used.\n\n///\n\n/// The size for a field can be set using 4 different methods\n\n/// - set the size as an integer using the `size` attribute,\n\n/// - use a previously defined field as the size using the `size` attribute\n\n/// - based on the input size by setting `size` attribute to `\"input_size\"`\n\n/// - read a set number of bits as an integer, using the resulting value as size using the `size_bits` attribute\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitReadSized;\n\n/// #\n\n/// #[derive(BitReadSized, PartialEq, Debug)]\n\n/// struct TestStructSized {\n\n/// foo: u8,\n\n/// #[size = \"input_size\"]\n\n/// string: String,\n\n/// #[size = \"input_size\"]\n\n/// int: u8,\n\n/// }\n\n/// ```\n\n///\n\n/// # Enums\n\n///\n\n/// The implementation can be derived for an enum as long as every variant of the enum either has no field, or an unnamed field that implements [`BitRead`] or `BitReadSized`\n\n///\n\n/// The enum is read by first reading a set number of bits as the discriminant of the enum, then the variant for the read discriminant is read.\n\n///\n\n/// For details about setting the input size for fields implementing `BitReadSized` see the block about size in the `Structs` section above.\n\n///\n\n/// The discriminant for the variants defaults to incrementing by one for every field, starting with `0`.\n\n/// You can overwrite the discriminant for a field, which will also change the discriminant for every following field.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitReadSized;\n\n/// #\n\n/// #[derive(BitReadSized)]\n\n/// #[discriminant_bits = 2]\n\n/// enum TestUnnamedFieldEnum {\n\n/// #[size = 5]\n\n/// Foo(i8),\n\n/// Bar(bool),\n\n/// #[discriminant = 3] // since rust only allows setting the discriminant on field-less enums, you can use an attribute instead\n\n/// #[size = \"input_size\"]\n\n/// Asd(u8),\n\n/// }\n\n/// ```\n\n///\n\n/// [read_sized]: BitReadStream::read_sized\n\n/// [read]: BitReadStream::read\n\npub trait BitReadSized<'a, E: Endianness>: Sized {\n\n /// Read the type from stream\n\n fn read(stream: &mut BitReadStream<'a, E>, size: usize) -> Result<Self>;\n\n\n\n #[doc(hidden)]\n\n #[inline]\n\n unsafe fn read_unchecked(\n\n stream: &mut BitReadStream<'a, E>,\n\n size: usize,\n\n _end: bool,\n\n ) -> Result<Self> {\n\n Self::read(stream, size)\n\n }\n\n\n\n /// Skip the type\n\n ///\n\n /// This might be faster than reading it if the size is known beforehand\n\n #[inline]\n\n fn skip(stream: &mut BitReadStream<'a, E>, size: usize) -> Result<()> {\n\n match Self::bit_size_sized(size) {\n", "file_path": "src/read.rs", "rank": 12, "score": 121368.11480387583 }, { "content": "/// Trait for types that can be written to a stream, requiring the size to be configured\n\n///\n\n/// The meaning of the set sized depends on the type being written (e.g, number of bits for integers,\n\n/// number of bytes for strings, number of items for Vec's, etc)\n\n///\n\n/// The `BitReadSized` trait can be used with `#[derive]` on structs\n\n///\n\n/// The implementation can be derived for a struct as long as every field in the struct implements [`BitWrite`] or `BitWriteSized`\n\n///\n\n/// The struct is written field by field in the order they are defined in, if the size for a field is set [`stream.write_sized()`][write_sized]\n\n/// will be used, otherwise [`stream.write()`][write] will be used.\n\n///\n\n/// The size for a field can be set using 4 different methods\n\n/// - set the size as an integer using the `size` attribute,\n\n/// - use a previously defined field as the size using the `size` attribute\n\n/// - based on the input size by setting `size` attribute to `\"input_size\"`\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitWriteSized;\n\n/// #\n\n/// #[derive(BitWriteSized, PartialEq, Debug)]\n\n/// struct TestStructSized {\n\n/// foo: u8,\n\n/// #[size = \"input_size\"]\n\n/// string: String,\n\n/// #[size = \"input_size\"]\n\n/// int: u8,\n\n/// }\n\n/// ```\n\n///\n\n/// # Enums\n\n///\n\n/// The implementation can be derived for an enum as long as every variant of the enum either has no field, or an unnamed field that implements [`BitWrite`] or `BitWriteSized`\n\n///\n\n/// The enum is written by first writing a set number of bits as the discriminant of the enum, then the variant is written.\n\n///\n\n/// For details about setting the input size for fields implementing `BitWriteSized` see the block about size in the `Structs` section above.\n\n///\n\n/// The discriminant for the variants defaults to incrementing by one for every field, starting with `0`.\n\n/// You can overwrite the discriminant for a field, which will also change the discriminant for every following field.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitWriteSized;\n\n/// #\n\n/// #[derive(BitWriteSized)]\n\n/// #[discriminant_bits = 2]\n\n/// enum TestUnnamedFieldEnum {\n\n/// #[size = 5]\n\n/// Foo(i8),\n\n/// Bar(bool),\n\n/// #[discriminant = 3] // since rust only allows setting the discriminant on field-less enums, you can use an attribute instead\n\n/// #[size = \"input_size\"]\n\n/// Asd(u8),\n\n/// }\n\n/// ```\n\n///\n\n/// [write_sized]: BitReadStream::write_sized\n\n/// [write]: BitReadStream::write\n\npub trait BitWriteSized<E: Endianness> {\n\n /// Write the type to stream\n\n fn write_sized(&self, stream: &mut BitWriteStream<E>, len: usize) -> Result<()>;\n\n}\n\n\n\nimpl<E: Endianness> BitWriteSized<E> for str {\n\n #[inline]\n\n fn write_sized(&self, stream: &mut BitWriteStream<E>, len: usize) -> Result<()> {\n\n stream.write_string(self, Some(len))\n\n }\n\n}\n\n\n\nimpl<E: Endianness> BitWriteSized<E> for String {\n\n #[inline]\n\n fn write_sized(&self, stream: &mut BitWriteStream<E>, len: usize) -> Result<()> {\n\n stream.write_string(self, Some(len))\n\n }\n\n}\n\n\n\nmacro_rules! impl_write_sized_int {\n", "file_path": "src/write.rs", "rank": 13, "score": 120767.49256894758 }, { "content": "#[inline(always)]\n\npub fn bit_size_of<'a, T: BitRead<'a, LittleEndian>>() -> Option<usize> {\n\n T::bit_size()\n\n}\n\n\n\n/// Get the number of bits required to read a type from stream given an input size\n\n///\n\n/// If the number of bits needed can not be determined beforehand `None` is returned\n", "file_path": "src/lib.rs", "rank": 14, "score": 115712.3055984431 }, { "content": "/// Trait for types that can be read from a stream without requiring the size to be configured\n\n///\n\n/// The `BitRead` trait can be used with `#[derive]` on structs and enums\n\n///\n\n/// # Structs\n\n///\n\n/// The implementation can be derived for a struct as long as every field in the struct implements `BitRead` or [`BitReadSized`]\n\n///\n\n/// The struct is read field by field in the order they are defined in, if the size for a field is set [`stream.read_sized()`][read_sized]\n\n/// will be used, otherwise [`stream_read()`][read] will be used.\n\n///\n\n/// The size for a field can be set using 3 different methods\n\n/// - set the size as an integer using the `size` attribute,\n\n/// - use a previously defined field as the size using the `size` attribute\n\n/// - read a set number of bits as an integer, using the resulting value as size using the `size_bits` attribute\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitRead;\n\n/// #\n\n/// #[derive(BitRead)]\n\n/// struct TestStruct {\n\n/// foo: u8,\n\n/// str: String,\n\n/// #[size = 2] // when `size` is set, the attributed will be read using `read_sized`\n\n/// truncated: String,\n\n/// bar: u16,\n\n/// float: f32,\n\n/// #[size = 3]\n\n/// asd: u8,\n\n/// #[size_bits = 2] // first read 2 bits as unsigned integer, then use the resulting value as size for the read\n\n/// dynamic_length: u8,\n\n/// #[size = \"asd\"] // use a previously defined field as size\n\n/// previous_field: u8,\n\n/// }\n\n/// ```\n\n///\n\n/// # Enums\n\n///\n\n/// The implementation can be derived for an enum as long as every variant of the enum either has no field, or an unnamed field that implements `BitRead` or [`BitReadSized`]\n\n///\n\n/// The enum is read by first reading a set number of bits as the discriminant of the enum, then the variant for the read discriminant is read.\n\n///\n\n/// For details about setting the input size for fields implementing [`BitReadSized`] see the block about size in the `Structs` section above.\n\n///\n\n/// The discriminant for the variants defaults to incrementing by one for every field, starting with `0`.\n\n/// You can overwrite the discriminant for a field, which will also change the discriminant for every following field.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitRead;\n\n/// #\n\n/// #[derive(BitRead)]\n\n/// #[discriminant_bits = 2]\n\n/// enum TestBareEnum {\n\n/// Foo,\n\n/// Bar,\n\n/// Asd = 3, // manually set the discriminant value for a field\n\n/// }\n\n/// ```\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitRead;\n\n/// #\n\n/// #[derive(BitRead)]\n\n/// #[discriminant_bits = 2]\n\n/// enum TestUnnamedFieldEnum {\n\n/// #[size = 5]\n\n/// Foo(i8),\n\n/// Bar(bool),\n\n/// #[discriminant = 3] // since rust only allows setting the discriminant on field-less enums, you can use an attribute instead\n\n/// Asd(u8),\n\n/// }\n\n/// ```\n\n///\n\n/// [read_sized]: BitReadStream::read_sized\n\n/// [read]: BitReadStream::read\n\npub trait BitRead<'a, E: Endianness>: Sized {\n\n /// Read the type from stream\n\n fn read(stream: &mut BitReadStream<'a, E>) -> Result<Self>;\n\n\n\n /// Note: only the bounds are unchecked\n\n ///\n\n /// any other validations (e.g. checking for valid utf8) still needs to be done\n\n #[doc(hidden)]\n\n #[inline]\n\n unsafe fn read_unchecked(stream: &mut BitReadStream<'a, E>, _end: bool) -> Result<Self> {\n\n Self::read(stream)\n\n }\n\n\n\n /// Skip the type\n\n ///\n\n /// This might be faster than reading it if the size is known beforehand\n\n #[inline]\n\n fn skip(stream: &mut BitReadStream<'a, E>) -> Result<()> {\n\n match Self::bit_size() {\n\n Some(size) => stream.skip_bits(size),\n", "file_path": "src/read.rs", "rank": 15, "score": 114459.45228806548 }, { "content": " pub trait Sealed {}\n\n\n\n // Implement for those same types, but no others.\n\n impl Sealed for super::BigEndian {}\n\n\n\n impl Sealed for super::LittleEndian {}\n\n}\n", "file_path": "src/endianness.rs", "rank": 16, "score": 113608.7488053659 }, { "content": "#[test]\n\nfn read_sized_trait() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n let mut stream = BitReadStream::new(buffer);\n\n let a: u8 = stream.read_sized(4).unwrap();\n\n assert_eq!(0b1011, a);\n\n stream.set_pos(0).unwrap();\n\n let vec: Vec<u16> = stream.read_sized(3).unwrap();\n\n assert_eq!(\n\n vec![\n\n 0b1011_0101_0110_1010,\n\n 0b1010_1100_1001_1001,\n\n 0b1001_1001_1001_1001\n\n ],\n\n vec\n\n );\n\n stream.set_pos(0).unwrap();\n\n let vec: Vec<u8> = stream.read_sized(3).unwrap();\n\n assert_eq!(vec![0b1011_0101, 0b0110_1010, 0b1010_1100], vec);\n\n stream.set_pos(0).unwrap();\n\n let result: HashMap<u8, u8> = stream.read_sized(2).unwrap();\n\n assert_eq!(\n\n hashmap!(0b1011_0101 => 0b0110_1010, 0b1010_1100 => 0b1001_1001),\n\n result\n\n );\n\n stream.set_pos(0).unwrap();\n\n let mut result: BitReadStream<BigEndian> = stream.read_sized(4).unwrap();\n\n assert_eq!(0b10u8, result.read_int::<u8>(2).unwrap());\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 17, "score": 107408.37709198947 }, { "content": "fn size(data: Data, struct_name: &Ident, attrs: &[Attribute], has_input_size: bool) -> TokenStream {\n\n let span = struct_name.span();\n\n\n\n match data {\n\n Data::Struct(DataStruct { fields, .. }) => {\n\n let sizes = fields.iter().map(|f| {\n\n // Get attributes `#[..]` on each field\n\n if is_const_size(&f.attrs, has_input_size) {\n\n let size = get_field_size(&f.attrs, f.span());\n\n let field_type = &f.ty;\n\n let span = f.span();\n\n match size {\n\n Some(size) => {\n\n quote_spanned! { span =>\n\n <#field_type as ::bitbuffer::BitReadSized<'_, ::bitbuffer::LittleEndian>>::bit_size_sized(#size)\n\n }\n\n }\n\n None => {\n\n quote_spanned! { span =>\n\n <#field_type as ::bitbuffer::BitRead<'_, ::bitbuffer::LittleEndian>>::bit_size()\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 18, "score": 103809.60725482632 }, { "content": "#[test]\n\nfn read_sized_trait_unchecked() {\n\n unsafe {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n let mut stream = BitReadStream::new(buffer);\n\n let a: u8 = stream.read_sized_unchecked(4, true).unwrap();\n\n assert_eq!(0b1011, a);\n\n stream.set_pos(0).unwrap();\n\n let vec: Vec<u16> = stream.read_sized_unchecked(3, true).unwrap();\n\n assert_eq!(\n\n vec![\n\n 0b1011_0101_0110_1010,\n\n 0b1010_1100_1001_1001,\n\n 0b1001_1001_1001_1001\n\n ],\n\n vec\n\n );\n\n stream.set_pos(0).unwrap();\n\n let vec: Vec<u8> = stream.read_sized_unchecked(3, true).unwrap();\n\n assert_eq!(vec![0b1011_0101, 0b0110_1010, 0b1010_1100], vec);\n\n stream.set_pos(0).unwrap();\n", "file_path": "tests/read_tests.rs", "rank": 19, "score": 103741.98633693007 }, { "content": "/// Trait for specifying endianness of bit buffer\n\npub trait Endianness: private::Sealed {\n\n /// Get the endianness as string, either LittleEndian or BigEndian\n\n fn as_string() -> &'static str {\n\n if Self::is_le() {\n\n \"LittleEndian\"\n\n } else {\n\n \"BigEndian\"\n\n }\n\n }\n\n\n\n /// Input is little endian\n\n fn is_le() -> bool;\n\n /// Input is big endian\n\n fn is_be() -> bool;\n\n /// Get an instance of the endianness\n\n fn endianness() -> Self;\n\n}\n\n\n\n/// Marks the buffer or stream as big endian\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n", "file_path": "src/endianness.rs", "rank": 20, "score": 98721.82095213664 }, { "content": "/// Trait for types that can be written to a stream without requiring the size to be configured\n\n///\n\n/// The `BitWrite` trait can be used with `#[derive]` on structs and enums\n\n///\n\n/// # Structs\n\n///\n\n/// The implementation can be derived for a struct as long as every field in the struct implements `BitWrite` or [`BitWriteSized`]\n\n///\n\n/// The struct is written field by field in the order they are defined in, if the size for a field is set [`stream.write_sized()`][write_sized]\n\n/// will be used, otherwise [`write_read()`][write] will be used.\n\n///\n\n/// The size for a field can be set using 3 different methods\n\n/// - set the size as an integer using the `size` attribute,\n\n/// - use a previously defined field as the size using the `size` attribute\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitWrite;\n\n/// #\n\n/// #[derive(BitWrite)]\n\n/// struct TestStruct {\n\n/// foo: u8,\n\n/// str: String,\n\n/// #[size = 2] // when `size` is set, the attributed will be read using `read_sized`\n\n/// truncated: String,\n\n/// bar: u16,\n\n/// float: f32,\n\n/// #[size = 3]\n\n/// asd: u8,\n\n/// #[size = \"asd\"] // use a previously defined field as size\n\n/// previous_field: u8,\n\n/// }\n\n/// ```\n\n///\n\n/// # Enums\n\n///\n\n/// The implementation can be derived for an enum as long as every variant of the enum either has no field, or an unnamed field that implements `BitWrite` or [`BitWriteSized`]\n\n///\n\n/// The enum is written by first writing a set number of bits as the discriminant of the enum, then the variant written.\n\n///\n\n/// For details about setting the input size for fields implementing [`BitWriteSized`] see the block about size in the `Structs` section above.\n\n///\n\n/// The discriminant for the variants defaults to incrementing by one for every field, starting with `0`.\n\n/// You can overwrite the discriminant for a field, which will also change the discriminant for every following field.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitWrite;\n\n/// #\n\n/// #[derive(BitWrite)]\n\n/// #[discriminant_bits = 2]\n\n/// enum TestBareEnum {\n\n/// Foo,\n\n/// Bar,\n\n/// Asd = 3, // manually set the discriminant value for a field\n\n/// }\n\n/// ```\n\n///\n\n/// ```\n\n/// # use bitbuffer::BitWrite;\n\n/// #\n\n/// #[derive(BitWrite)]\n\n/// #[discriminant_bits = 2]\n\n/// enum TestUnnamedFieldEnum {\n\n/// #[size = 5]\n\n/// Foo(i8),\n\n/// Bar(bool),\n\n/// #[discriminant = 3] // since rust only allows setting the discriminant on field-less enums, you can use an attribute instead\n\n/// Asd(u8),\n\n/// }\n\n/// ```\n\n///\n\n/// [write_sized]: BitWriteStream::write_sized\n\n/// [write]: BitWriteStream::write\n\npub trait BitWrite<E: Endianness> {\n\n /// Write the type to stream\n\n fn write(&self, stream: &mut BitWriteStream<E>) -> Result<()>;\n\n}\n\n\n\nmacro_rules! impl_write_int {\n\n ($type:ty) => {\n\n impl<E: Endianness> BitWrite<E> for $type {\n\n #[inline]\n\n fn write(&self, stream: &mut BitWriteStream<E>) -> Result<()> {\n\n stream.write_int::<$type>(*self, <$type>::BITS as usize)\n\n }\n\n }\n\n };\n\n}\n\n\n\nimpl_write_int!(u8);\n\nimpl_write_int!(u16);\n\nimpl_write_int!(u32);\n\nimpl_write_int!(u64);\n", "file_path": "src/write.rs", "rank": 21, "score": 96247.95230264854 }, { "content": "pub fn derive_bitwrite_sized(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let extra_param = parse_str::<TokenStream>(\", input_size: usize\").unwrap();\n\n derive_bitwrite_trait(\n\n input,\n\n \"BitWriteSized\".into(),\n\n \"write_sized\".into(),\n\n Some(extra_param),\n\n )\n\n}\n\n\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 22, "score": 85668.00856643911 }, { "content": "pub fn derive_bitread_sized(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let extra_param = parse_str::<TokenStream>(\", input_size: usize\").unwrap();\n\n derive_bitread_trait(input, \"BitReadSized\".to_owned(), Some(extra_param))\n\n}\n\n\n\n/// See the [crate documentation](index.html) for details\n\n#[proc_macro_derive(\n\n BitWrite,\n\n attributes(size, size_bits, discriminant_bits, discriminant, endianness)\n\n)]\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 23, "score": 85668.00856643911 }, { "content": "fn perf_f64() {\n\n let buffer = BitReadBuffer::new(black_box(&ONES), BigEndian);\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n let mut result: f64 = 0.0;\n\n loop {\n\n if pos + 64 > len {\n\n break;\n\n }\n\n let num = buffer.read_float::<f64>(pos).unwrap();\n\n result += num;\n\n pos += 64;\n\n }\n\n assert_eq!(result, F64_RESULT);\n\n black_box(result);\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 24, "score": 84252.15635583308 }, { "content": "fn perf_f32_be() {\n\n let buffer = BitReadBuffer::new(black_box(&ONES), BigEndian);\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n let mut result: f32 = 0.0;\n\n loop {\n\n if pos + 32 > len {\n\n break;\n\n }\n\n let num = buffer.read_float::<f32>(pos).unwrap();\n\n result += num;\n\n pos += 32;\n\n }\n\n assert_eq!(result, 0.00000000000000000000000000000006170106);\n\n black_box(result);\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 25, "score": 84237.26435437196 }, { "content": "fn perf_bytes_be() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), BigEndian);\n\n\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos + (128 * 8) > len {\n\n break;\n\n }\n\n let result = buffer.read_bytes(pos, 128).unwrap();\n\n pos += (result.len() + 1) * 8;\n\n black_box(result);\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 26, "score": 84208.2417732341 }, { "content": "fn perf_bool() {\n\n let buffer = BitReadBuffer::new(black_box(&ONES), BigEndian);\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos >= len {\n\n break;\n\n }\n\n let num = buffer.read_bool(pos).unwrap();\n\n black_box(num);\n\n pos += 1;\n\n }\n\n}\n\n\n\nconst fn build_string_data<const N: usize>(inputs: &[&str]) -> [u8; N] {\n\n let mut data = [0; N];\n\n let mut i = 0;\n\n loop {\n\n let mut y = 0;\n\n while y < inputs.len() {\n", "file_path": "benches/bench.rs", "rank": 27, "score": 84092.80997073291 }, { "content": "#[test]\n\nfn test_bit_size_sized() {\n\n assert_eq!(bit_size_of_sized::<SizeStructSized>(6), Some(8 + 8 * 6 + 1));\n\n assert_eq!(\n\n bit_size_of_sized::<SizeStructSized>(16),\n\n Some(8 + 8 * 16 + 1)\n\n );\n\n}\n", "file_path": "bitbuffer_derive/tests/read.rs", "rank": 28, "score": 83599.96618539421 }, { "content": "fn repr_for_bits(discriminant_bits: u64) -> TokenStream {\n\n if discriminant_bits <= 8 {\n\n quote!(u8)\n\n } else if discriminant_bits <= 16 {\n\n quote!(u16)\n\n } else if discriminant_bits <= 32 {\n\n quote!(u32)\n\n } else {\n\n quote!(usize)\n\n }\n\n}\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 29, "score": 82771.31342614238 }, { "content": "#[test]\n\nfn read_u32_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(\n\n buffer.read_int::<u32>(6, 24).unwrap(),\n\n 0b01_0110_1010_1010_1100_1001_10\n\n );\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 30, "score": 81319.82317413663 }, { "content": "#[test]\n\nfn read_i8_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(buffer.read_int::<i8>(1, 2).unwrap(), 0b1);\n\n assert_eq!(buffer.read_int::<i8>(0, 3).unwrap(), -0b11);\n\n assert_eq!(buffer.read_int::<i8>(0, 8).unwrap(), -0b100_1011);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 31, "score": 81319.82317413663 }, { "content": "#[test]\n\nfn read_i16_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(buffer.read_int::<i16>(6, 12).unwrap(), 0b1_0110_1010_10);\n\n assert_eq!(buffer.read_int::<i16>(7, 12).unwrap(), -0b1001_0101_011);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 32, "score": 81319.82317413663 }, { "content": "#[test]\n\nfn read_i64_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(buffer.read_int::<i64>(7, 34).unwrap(), -5010541773);\n\n assert_eq!(buffer.read_int::<i64>(7, 60).unwrap(), -336251766397153476);\n\n assert_eq!(buffer.read_int::<i64>(7, 64).unwrap(), -5380028262354455604);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 33, "score": 81319.82317413663 }, { "content": "#[test]\n\nfn read_i32_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(buffer.read_int::<i32>(7, 24).unwrap(), -4893108);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 34, "score": 81319.82317413663 }, { "content": "#[test]\n\nfn read_u16_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(buffer.read_int::<u16>(6, 12).unwrap(), 0b01_0110_1010_10);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 35, "score": 81305.62202036663 }, { "content": "fn write_int_le() {\n\n let mut out = Vec::with_capacity(128);\n\n {\n\n let mut write = BitWriteStream::new(&mut out, LittleEndian);\n\n for i in 0..128 {\n\n write.write_sized(&black_box(i), 7).unwrap();\n\n }\n\n }\n\n black_box(out);\n\n}\n\n\n\niai::main!(write_int_le);\n", "file_path": "benches/write.rs", "rank": 36, "score": 81305.62202036663 }, { "content": "#[test]\n\nfn read_u64_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(\n\n buffer.read_int::<u64>(6, 34).unwrap(),\n\n 0b01_0110_1010_1010_1100_1001_1001_1001_1001\n\n );\n\n assert_eq!(\n\n buffer.read_int::<u64>(6, 60).unwrap(),\n\n 0b01_0110_1010_1010_1100_1001_1001_1001_1001_1001_1001_1001_1001_1110_0111_10\n\n );\n\n assert_eq!(\n\n buffer.read_int::<u64>(6, 64).unwrap(),\n\n 0b01_0110_1010_1010_1100_1001_1001_1001_1001_1001_1001_1001_1001_1110_0111_1001_10\n\n );\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 37, "score": 81305.62202036663 }, { "content": "fn perf_f32_le() {\n\n let buffer = BitReadBuffer::new(black_box(&ONES), BigEndian);\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n let mut result: f32 = 0.0;\n\n loop {\n\n if pos + 32 > len {\n\n break;\n\n }\n\n let num = buffer.read_float::<f32>(pos).unwrap();\n\n result += num;\n\n pos += 32;\n\n }\n\n assert_eq!(result, 0.00000000000000000000000000000006170106);\n\n black_box(result);\n\n}\n\n\n\nconst F64_RESULT: f64 = 0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010156250477904244;\n\n\n", "file_path": "benches/bench.rs", "rank": 38, "score": 81305.62202036663 }, { "content": "fn perf_bytes_le() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), LittleEndian);\n\n\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos + (128 * 8) > len {\n\n break;\n\n }\n\n let result = buffer.read_bytes(pos, 128).unwrap();\n\n pos += (result.len() + 1) * 8;\n\n black_box(result);\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 39, "score": 81277.94581185855 }, { "content": "#[test]\n\nfn read_u8_be() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n\n\n assert_eq!(buffer.read_int::<u8>(0, 1).unwrap(), 0b1);\n\n assert_eq!(buffer.read_int::<u8>(1, 1).unwrap(), 0b0);\n\n assert_eq!(buffer.read_int::<u8>(2, 2).unwrap(), 0b11);\n\n assert_eq!(buffer.read_int::<u8>(0, 3).unwrap(), 0b101);\n\n assert_eq!(buffer.read_int::<u8>(7, 5).unwrap(), 0b1011_0);\n\n assert_eq!(buffer.read_int::<u8>(6, 5).unwrap(), 0b01_011);\n\n\n\n assert_eq!(buffer.read_bool(0).unwrap(), true);\n\n assert_eq!(buffer.read_bool(8).unwrap(), false);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 40, "score": 81277.94581185855 }, { "content": "fn perf_bytes_be_unaligned() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), BigEndian);\n\n\n\n let mut pos = 3;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos + (128 * 8) > len {\n\n break;\n\n }\n\n let result = buffer.read_bytes(pos, 128).unwrap();\n\n pos += (result.len() + 1) * 8;\n\n black_box(result);\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 41, "score": 81277.94581185855 }, { "content": "#[test]\n\nfn read_trait() {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n let mut stream = BitReadStream::new(buffer);\n\n let a: u8 = stream.read().unwrap();\n\n assert_eq!(0b1011_0101, a);\n\n let b: i8 = stream.read().unwrap();\n\n assert_eq!(0b110_1010, b);\n\n let c: i16 = stream.read().unwrap();\n\n assert_eq!(-0b101_0011_0110_0111, c);\n\n let d: bool = stream.read().unwrap();\n\n assert_eq!(true, d);\n\n let e: Option<u8> = stream.read().unwrap();\n\n assert_eq!(None, e);\n\n stream.set_pos(0).unwrap();\n\n let f: Option<u8> = stream.read().unwrap();\n\n assert_eq!(Some(0b011_0101_0), f);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 42, "score": 80883.65980049496 }, { "content": "#[test]\n\nfn read_u32_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(\n\n buffer.read_int::<u32>(6, 24).unwrap(),\n\n 0b01_1001_1010_1100_0110_1010_10\n\n );\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 43, "score": 78647.49322912819 }, { "content": "#[test]\n\nfn read_i64_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_int::<i64>(6, 34).unwrap(), -6871928406);\n\n assert_eq!(buffer.read_int::<i64>(6, 59).unwrap(), -27471957726940758);\n\n assert_eq!(buffer.read_int::<i64>(1, 64).unwrap(), -879102647262104230);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 44, "score": 78647.49322912819 }, { "content": "#[test]\n\nfn read_f64_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_float::<f64>(6).unwrap(), 135447455835963910000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 45, "score": 78647.49322912819 }, { "content": "#[test]\n\nfn read_i8_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_int::<i8>(0, 3).unwrap(), -0b11);\n\n assert_eq!(buffer.read_int::<i8>(0, 8).unwrap(), -0b100_1011);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 46, "score": 78647.49322912819 }, { "content": "#[test]\n\nfn read_i32_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(\n\n buffer.read_int::<i32>(6, 24).unwrap(),\n\n 0b1_1001_1010_1100_0110_1010_10\n\n );\n\n assert_eq!(buffer.read_int::<i32>(6, 26).unwrap(), -26824278);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 47, "score": 78647.49322912819 }, { "content": "#[test]\n\nfn read_i16_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_int::<i16>(6, 12).unwrap(), 0b0_0110_1010_10);\n\n assert_eq!(buffer.read_int::<i16>(6, 13).unwrap(), -0b11_1001_0101_10);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 48, "score": 78647.49322912819 }, { "content": "#[test]\n\nfn read_u16_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_int::<u16>(6, 12).unwrap(), 0b00_0110_1010_10);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 49, "score": 78633.92166717403 }, { "content": "#[test]\n\nfn read_read_signed() {\n\n let bytes = vec![255, 255, 255, 255, 255, 255, 255, 255];\n\n let buffer = BitReadBuffer::new(&bytes, LittleEndian);\n\n\n\n assert_eq!(buffer.read_int::<i32>(0, 32).unwrap(), -1);\n\n\n\n let bytes = (-10i32).to_le_bytes();\n\n let mut byte_vec = Vec::with_capacity(4);\n\n byte_vec.extend_from_slice(&bytes);\n\n let buffer = BitReadBuffer::new(&byte_vec, LittleEndian);\n\n assert_eq!(buffer.read_int::<i32>(0, 32).unwrap(), -10);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 50, "score": 78633.92166717403 }, { "content": "#[test]\n\nfn read_f32_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_float::<f64>(6).unwrap(), 135447455835963910000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 51, "score": 78633.92166717403 }, { "content": "#[test]\n\nfn test_write_signed() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, LittleEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_int(-17i32, 32).unwrap();\n\n stream.write_int(-9i32, 8).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, LittleEndian));\n\n\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(-17i32, read.read_int::<i32>(32).unwrap());\n\n assert_eq!(-9i32, read.read_int::<i32>(8).unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 52, "score": 78633.92166717403 }, { "content": "#[test]\n\nfn read_u64_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(\n\n buffer.read_int::<u64>(6, 34).unwrap(),\n\n 0b1001_1001_1001_1001_1010_1100_0110_1010_10\n\n );\n\n assert_eq!(\n\n buffer.read_int::<u64>(6, 60).unwrap(),\n\n 0b01_1110_0111_1001_1001_1001_1001_1001_1001_1001_1001_1010_1100_0110_1010_10\n\n );\n\n assert_eq!(\n\n buffer.read_int::<u64>(6, 64).unwrap(),\n\n 0b01_1001_1110_0111_1001_1001_1001_1001_1001_1001_1001_1001_1010_1100_0110_1010_10\n\n );\n\n assert_eq!(\n\n buffer.read_int::<u64>(8, 62).unwrap(),\n\n 0b01_1001_1110_0111_1001_1001_1001_1001_1001_1001_1001_1001_1010_1100_0110_1010\n\n );\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 53, "score": 78633.92166717403 }, { "content": "#[test]\n\nfn read_u8_le() {\n\n let buffer = BitReadBuffer::new(BYTES, LittleEndian);\n\n\n\n assert_eq!(buffer.read_int::<u8>(0, 1).unwrap(), 0b1);\n\n assert_eq!(buffer.read_bool(0).unwrap(), true);\n\n assert_eq!(buffer.read_int::<u8>(1, 1).unwrap(), 0b0);\n\n assert_eq!(buffer.read_bool(1).unwrap(), false);\n\n assert_eq!(buffer.read_int::<u8>(2, 2).unwrap(), 0b01);\n\n assert_eq!(buffer.read_int::<u8>(0, 3).unwrap(), 0b101);\n\n assert_eq!(buffer.read_int::<u8>(7, 5).unwrap(), 0b1010_1);\n\n assert_eq!(buffer.read_int::<u8>(6, 5).unwrap(), 0b010_10);\n\n assert_eq!(buffer.read_int::<u8>(12, 5).unwrap(), 0b0_0110);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 54, "score": 78607.47245153226 }, { "content": "fn perf_bytes_le_unaligned() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), LittleEndian);\n\n\n\n let mut pos = 3;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos + (128 * 8) > len {\n\n break;\n\n }\n\n let result = buffer.read_bytes(pos, 128).unwrap();\n\n pos += (result.len() + 1) * 8;\n\n black_box(result);\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 55, "score": 78607.47245153226 }, { "content": "#[test]\n\nfn test_write_bool_be() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_bool(true).unwrap();\n\n stream.write_bool(false).unwrap();\n\n stream.write_bool(true).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, BigEndian));\n\n\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(false, read.read_bool().unwrap());\n\n assert_eq!(true, read.read_bool().unwrap());\n\n\n\n // 0 padded\n\n assert_eq!(false, read.read_bool().unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 56, "score": 78502.27572138043 }, { "content": "#[test]\n\nfn read_trait_unchecked() {\n\n unsafe {\n\n let buffer = BitReadBuffer::new(BYTES, BigEndian);\n\n let mut stream = BitReadStream::new(buffer);\n\n let a: u8 = stream.read_unchecked(true).unwrap();\n\n assert_eq!(0b1011_0101, a);\n\n let b: i8 = stream.read_unchecked(true).unwrap();\n\n assert_eq!(0b110_1010, b);\n\n let c: i16 = stream.read_unchecked(true).unwrap();\n\n assert_eq!(-0b101_0011_0110_0111, c);\n\n let d: bool = stream.read_unchecked(true).unwrap();\n\n assert_eq!(true, d);\n\n let e: Option<u8> = stream.read_unchecked(true).unwrap();\n\n assert_eq!(None, e);\n\n stream.set_pos(0).unwrap();\n\n let f: Option<u8> = stream.read_unchecked(true).unwrap();\n\n assert_eq!(Some(0b011_0101_0), f);\n\n }\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 57, "score": 78230.66665690791 }, { "content": "#[test]\n\nfn test_write_bool_number_be() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_int(3253u16, 16).unwrap();\n\n stream.write_int(13253u64, 64).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, BigEndian));\n\n\n\n assert_eq!(1u8, read.read_int::<u8>(1).unwrap());\n\n assert_eq!(3253u16, read.read::<u16>().unwrap());\n\n assert_eq!(13253u64, read.read::<u64>().unwrap());\n\n\n\n // 0 padded\n\n assert_eq!(false, read.read_bool().unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 58, "score": 76063.0009427144 }, { "content": "#[test]\n\nfn test_write_bool_le() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, LittleEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_bool(true).unwrap();\n\n stream.write_bool(false).unwrap();\n\n stream.write_bool(true).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, LittleEndian));\n\n\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(false, read.read_bool().unwrap());\n\n assert_eq!(true, read.read_bool().unwrap());\n\n\n\n // 0 padded\n\n assert_eq!(false, read.read_bool().unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 59, "score": 76063.0009427144 }, { "content": "#[test]\n\nfn test_bit_size() {\n\n assert_eq!(bit_size_of::<SizeStruct>(), Some(8 + 8 * 6 + 1));\n\n assert_eq!(bit_size_of::<UnnamedSizeStruct>(), Some(8 + 8 * 6 + 1));\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/read.rs", "rank": 60, "score": 75816.26108011804 }, { "content": "fn derive_bitread_trait(\n\n input: proc_macro::TokenStream,\n\n trait_name: String,\n\n extra_param: Option<TokenStream>,\n\n) -> proc_macro::TokenStream {\n\n let input: DeriveInput = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = &input.ident;\n\n\n\n let endianness = get_attribute_value(&input.attrs, &[\"endianness\"]);\n\n let mut trait_generics = input.generics.clone();\n\n // we need these separate generics to only add out Endianness param to the 'impl'\n\n let (_, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let lifetime: Option<&GenericParam> = trait_generics\n\n .params\n\n .iter()\n\n .find(|param| matches!(param, GenericParam::Lifetime(_)));\n\n let lifetime = match lifetime {\n\n Some(GenericParam::Lifetime(lifetime)) => lifetime.lifetime.clone(),\n\n _ => {\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 61, "score": 75802.92217138223 }, { "content": "#[test]\n\nfn test_write_bool_number_le() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, LittleEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_int(3253u16, 16).unwrap();\n\n stream.write_int(13253u64, 64).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, LittleEndian));\n\n\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(3253u16, read.read::<u16>().unwrap());\n\n assert_eq!(13253u64, read.read::<u64>().unwrap());\n\n\n\n // 0 padded\n\n assert_eq!(false, read.read_bool().unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 62, "score": 73822.39549412654 }, { "content": "#[test]\n\nfn test_read_struct_sized() {\n\n let bytes = vec![\n\n 12, 'h' as u8, 'e' as u8, 'l' as u8, 'l' as u8, 'o' as u8, 0, 0, 0, 0, 0, 0,\n\n ];\n\n let buffer = BitReadBuffer::new(&bytes, LittleEndian);\n\n let mut stream = BitReadStream::from(buffer);\n\n assert_eq!(\n\n TestStructSized {\n\n foo: 12,\n\n string: \"hel\".to_owned(),\n\n int: 4,\n\n },\n\n stream.read_sized(3).unwrap()\n\n );\n\n assert_eq!(Some(8 + 2 * 8 + 2), bit_size_of_sized::<TestStructSized>(2));\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/read.rs", "rank": 63, "score": 73585.70362686765 }, { "content": "#[test]\n\nfn test_read_struct_sized() {\n\n let bytes = vec![12, 'h' as u8, 'e' as u8, 'l' as u8, 0b1000_0000];\n\n let mut data = Vec::new();\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n let val = TestStructSized {\n\n foo: 12,\n\n string: \"hel\".to_owned(),\n\n int: 4,\n\n };\n\n stream.write_sized(&val, 3).unwrap();\n\n let mut read = BitReadStream::<BigEndian>::from(data.as_slice());\n\n\n\n assert_eq!(val, read.read_sized(3).unwrap());\n\n\n\n assert_eq!(bytes, data);\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 64, "score": 73585.70362686765 }, { "content": "#[test]\n\nfn test_read_size_expression() {\n\n let bytes = vec![0b0000_0011, b'a', b'b', b'c', b'd', b'e'];\n\n let mut data = Vec::new();\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n\n\n let val = TestSizeExpression {\n\n size: 3,\n\n str: String::from(\"abcde\"),\n\n };\n\n stream.write(&val).unwrap();\n\n assert_eq!(bytes, data);\n\n}\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 65, "score": 73585.70362686765 }, { "content": "fn read_perf<E: Endianness>(buffer: &BitReadBuffer<E>) -> u16 {\n\n let size = 5;\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n let mut result: u16 = 0;\n\n loop {\n\n if pos + size > len {\n\n return result;\n\n }\n\n let data = buffer.read_int::<u64>(pos, size).unwrap() as u16;\n\n result = result.wrapping_add(data);\n\n pos += size;\n\n }\n\n}\n\n\n\nconst ONES: [u8; 1024 * 1024 * 10] = [1u8; 1024 * 1024 * 10];\n\n\n", "file_path": "benches/bench.rs", "rank": 66, "score": 73339.89926306711 }, { "content": "#[test]\n\nfn test_read_unnamed_field_enum_sized() {\n\n let bytes = vec![0b1100_0110];\n\n let mut data = Vec::new();\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n stream\n\n .write_sized(&TestUnnamedFieldEnumSized::Asd(0b_00_0110), 6)\n\n .unwrap();\n\n assert_eq!(bytes, data);\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 67, "score": 69628.55317526474 }, { "content": "#[test]\n\nfn test_read_unnamed_field_enum_sized() {\n\n let bytes = vec![\n\n 0b1100_0110,\n\n 0b1000_0100,\n\n 0b1000_0100,\n\n 0b1000_0100,\n\n 0b1000_0100,\n\n 0b1000_0100,\n\n 0b1000_0100,\n\n 0b1000_0100,\n\n ];\n\n let buffer = BitReadBuffer::new(&bytes, BigEndian);\n\n let mut stream = BitReadStream::from(buffer);\n\n assert_eq!(\n\n TestUnnamedFieldEnumSized::Asd(0b_00_0110),\n\n stream.read_sized(6).unwrap()\n\n );\n\n assert_eq!(8, stream.pos());\n\n assert_eq!(None, bit_size_of_sized::<TestUnnamedFieldEnumSized>(6));\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/read.rs", "rank": 68, "score": 69628.55317526474 }, { "content": "pub fn derive_bitwrite(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n derive_bitwrite_trait(input, \"BitWrite\".into(), \"write\".into(), None)\n\n}\n\n\n\n//\n\n/// See the [crate documentation](index.html) for details\n\n#[proc_macro_derive(\n\n BitWriteSized,\n\n attributes(size, size_bits, discriminant_bits, discriminant, endianness)\n\n)]\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 69, "score": 67240.29716446919 }, { "content": "pub fn derive_bitread(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n derive_bitread_trait(input, \"BitRead\".to_owned(), None)\n\n}\n\n\n\n//\n\n/// See the [crate documentation](index.html) for details\n\n#[proc_macro_derive(\n\n BitReadSized,\n\n attributes(size, size_bits, discriminant_bits, discriminant, endianness)\n\n)]\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 70, "score": 67240.29716446919 }, { "content": "fn parse(data: Data, struct_name: &Ident, attrs: &[Attribute], unchecked: bool) -> TokenStream {\n\n let span = struct_name.span();\n\n\n\n match data {\n\n Data::Struct(DataStruct { fields, .. }) => {\n\n let values = fields.iter().map(|f| {\n\n // Get attributes `#[..]` on each field\n\n let size = get_field_size(&f.attrs, f.span());\n\n let field_type = &f.ty;\n\n let span = f.span();\n\n if unchecked {\n\n match size {\n\n Some(size) => {\n\n quote_spanned! { span =>\n\n {\n\n let _size: usize = #size;\n\n stream.read_sized_unchecked::<#field_type>(_size, end)?\n\n }\n\n }\n\n }\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 71, "score": 62532.57075254097 }, { "content": "fn get_field_size(attrs: &[Attribute], span: Span) -> Option<TokenStream> {\n\n get_attribute_value(attrs, &[\"size\"])\n\n .map(|size_lit| match size_lit {\n\n Lit::Int(size) => {\n\n quote_spanned! {span =>\n\n #size\n\n }\n\n }\n\n Lit::Str(size_field) => {\n\n let size = parse_str::<Expr>(&size_field.value()).expect(\"size\");\n\n quote_spanned! {span =>\n\n (#size) as usize\n\n }\n\n }\n\n _ => panic!(\"Unsupported value for size attribute\"),\n\n })\n\n .or_else(|| {\n\n get_attribute_value::<Lit>(attrs, &[\"size_bits\"]).map(|_| {\n\n quote_spanned! {span =>\n\n compile_error!(\"#[size_bits] is not supported when deriving BitWrite or BitWriteSized\")\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "bitbuffer_derive/src/write.rs", "rank": 72, "score": 54078.096567094886 }, { "content": "fn get_field_size(attrs: &[Attribute], span: Span) -> Option<TokenStream> {\n\n get_attribute_value(attrs, &[\"size\"])\n\n .map(|size_lit| match size_lit {\n\n Lit::Int(size) => {\n\n quote_spanned! {span =>\n\n #size\n\n }\n\n }\n\n Lit::Str(size_field) => {\n\n let size = parse_str::<Expr>(&size_field.value()).unwrap();\n\n quote_spanned! {span =>\n\n (#size) as usize\n\n }\n\n }\n\n _ => panic!(\"Unsupported value for size attribute\"),\n\n })\n\n .or_else(|| {\n\n get_attribute_value::<Lit>(attrs, &[\"size_bits\"]).map(|size_bits_lit| {\n\n quote_spanned! {span =>\n\n stream.read_int::<usize> (#size_bits_lit) ?\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "bitbuffer_derive/src/lib.rs", "rank": 73, "score": 54078.096567094886 }, { "content": "fn perf_be() {\n\n let buffer = BitReadBuffer::new(black_box(&ONES), BigEndian);\n\n let data = read_perf(&buffer);\n\n assert_eq!(data, 0);\n\n black_box(data);\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 74, "score": 53365.97064718866 }, { "content": "#[track_caller]\n\nfn roundtrip<\n\n T: BitRead<'static, BigEndian>\n\n + BitWrite<BigEndian>\n\n + BitRead<'static, LittleEndian>\n\n + BitWrite<LittleEndian>\n\n + Debug\n\n + PartialEq,\n\n>(\n\n val: T,\n\n) {\n\n {\n\n let mut data = Vec::new();\n\n let size = {\n\n let mut stream = BitWriteStream::new(&mut data, LittleEndian);\n\n stream.write(&val).unwrap();\n\n stream.bit_len()\n\n };\n\n let mut read = BitReadStream::new(BitReadBuffer::new_owned(data, LittleEndian));\n\n assert_eq!(val, read.read().unwrap());\n\n assert_eq!(size, read.pos());\n", "file_path": "tests/roundtrip.rs", "rank": 75, "score": 53365.97064718866 }, { "content": "fn perf_string_be() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), BigEndian);\n\n\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos + (128 * 8) > len {\n\n break;\n\n }\n\n let result = buffer.read_string(pos, None).unwrap();\n\n pos += (result.len() + 1) * 8;\n\n black_box(result);\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 76, "score": 51793.51672525235 }, { "content": "#[test]\n\nfn test_array() {\n\n roundtrip([1, 2, 3, 4, 5]);\n\n roundtrip([String::from(\"asd\"), String::from(\"foobar\")]);\n\n}\n\n\n", "file_path": "tests/roundtrip.rs", "rank": 77, "score": 51793.51672525235 }, { "content": "fn perf_le() {\n\n let buffer = BitReadBuffer::new(black_box(&ONES), BigEndian);\n\n let data = read_perf(&buffer);\n\n assert_eq!(data, 0);\n\n black_box(data);\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 78, "score": 51793.51672525235 }, { "content": "fn perf_struct() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), LittleEndian);\n\n\n\n let mut stream: BitReadStream<LittleEndian> = buffer.clone().into();\n\n while stream.bits_left() > 40 {\n\n let result = stream.read::<BasicStruct>().unwrap();\n\n black_box(result);\n\n }\n\n}\n\n\n\niai::main!(\n\n perf_be,\n\n perf_bool,\n\n perf_bytes_be,\n\n perf_bytes_be_unaligned,\n\n perf_bytes_le,\n\n perf_bytes_le_unaligned,\n\n perf_f32_be,\n\n perf_f32_le,\n\n perf_f64,\n\n perf_le,\n\n perf_string_be,\n\n perf_string_le,\n\n perf_struct\n\n);\n", "file_path": "benches/bench.rs", "rank": 79, "score": 51793.51672525235 }, { "content": "#[test]\n\nfn test_from() {\n\n let buffer: BitReadBuffer<LittleEndian> = BitReadBuffer::from(BYTES);\n\n let _: BitReadStream<LittleEndian> = BitReadStream::from(buffer);\n\n let _: BitReadStream<LittleEndian> = BitReadStream::from(BYTES);\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 80, "score": 51793.51672525235 }, { "content": "#[test]\n\nfn test_tuple() {\n\n roundtrip((1, false));\n\n roundtrip((1, 10.12, String::from(\"asd\")));\n\n}\n", "file_path": "tests/roundtrip.rs", "rank": 81, "score": 51793.51672525235 }, { "content": "#[test]\n\nfn test_field_enum() {\n\n #[derive(Debug, PartialEq, BitRead, BitWrite)]\n\n struct CompoundVariant(#[size = 15] u16, bool);\n\n\n\n #[derive(Debug, PartialEq, BitRead, BitWrite)]\n\n #[discriminant_bits = 4]\n\n enum Enum {\n\n A,\n\n B(String),\n\n C(f32),\n\n D(#[size = 15] i64),\n\n E(CompoundVariant),\n\n }\n\n roundtrip(Enum::A);\n\n roundtrip(Enum::B(\"foobar\".into()));\n\n roundtrip(Enum::C(12.0));\n\n roundtrip(Enum::D(-12345));\n\n roundtrip(Enum::E(CompoundVariant(6789, true)));\n\n}\n\n\n", "file_path": "tests/roundtrip.rs", "rank": 82, "score": 50366.95671344175 }, { "content": "fn perf_string_le() {\n\n let buffer = BitReadBuffer::new(black_box(&STRING_DATA), LittleEndian);\n\n\n\n let mut pos = 0;\n\n let len = buffer.bit_len();\n\n loop {\n\n if pos + (128 * 8) > len {\n\n break;\n\n }\n\n let result = buffer.read_string(pos, None).unwrap();\n\n pos += (result.len() + 1) * 8;\n\n black_box(result);\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 83, "score": 50366.95671344175 }, { "content": "#[test]\n\nfn test_bare_enum() {\n\n #[derive(Debug, PartialEq, BitRead, BitWrite)]\n\n #[discriminant_bits = 4]\n\n enum Enum {\n\n A,\n\n B,\n\n C,\n\n D,\n\n }\n\n roundtrip(Enum::A);\n\n roundtrip(Enum::B);\n\n roundtrip(Enum::C);\n\n roundtrip(Enum::D);\n\n}\n\n\n", "file_path": "tests/roundtrip.rs", "rank": 84, "score": 50366.95671344175 }, { "content": "#[test]\n\nfn test_basic_struct() {\n\n #[derive(Debug, PartialEq, BitRead, BitWrite)]\n\n struct Foo {\n\n int: u32,\n\n float: f64,\n\n #[size = 2]\n\n smaller_int: u8,\n\n signed: i32,\n\n #[size = 3]\n\n smaller_signed: i32,\n\n dynamic_string: String,\n\n #[size = 3]\n\n fixed_string: String,\n\n }\n\n roundtrip(Foo {\n\n int: 1234,\n\n float: 10.2,\n\n smaller_int: 3,\n\n signed: -3,\n\n smaller_signed: -1,\n\n dynamic_string: \"Foobar\".to_string(),\n\n fixed_string: \"asd\".to_string(),\n\n });\n\n}\n\n\n", "file_path": "tests/roundtrip.rs", "rank": 85, "score": 50366.95671344175 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn test_serde_roundtrip() {\n\n use crate::LittleEndian;\n\n\n\n let mut buffer = BitReadBuffer::new_owned(vec![55; 8], LittleEndian);\n\n buffer.truncate(61).unwrap();\n\n\n\n let json = serde_json::to_string(&buffer).unwrap();\n\n\n\n let result: BitReadBuffer<LittleEndian> = serde_json::from_str(&json).unwrap();\n\n\n\n assert_eq!(result, buffer);\n\n}\n", "file_path": "src/readbuffer.rs", "rank": 86, "score": 50366.95671344175 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn test_serde_roundtrip() {\n\n use crate::LittleEndian;\n\n\n\n let mut buffer = BitReadBuffer::new_owned(vec![55; 8], LittleEndian);\n\n buffer.truncate(61).unwrap();\n\n let stream = BitReadStream::new(buffer);\n\n assert_eq!(61, stream.bit_len());\n\n\n\n let json = serde_json::to_string(&stream).unwrap();\n\n\n\n let result: BitReadStream<LittleEndian> = serde_json::from_str(&json).unwrap();\n\n\n\n assert_eq!(result, stream);\n\n}\n\n\n\n#[cfg(feature = \"schemars\")]\n\nimpl<'a, E: Endianness> schemars::JsonSchema for BitReadStream<'a, E> {\n\n fn schema_name() -> String {\n\n \"BitReadStream\".into()\n\n }\n", "file_path": "src/readstream.rs", "rank": 87, "score": 50366.95671344175 }, { "content": "#[test]\n\nfn test_write_float_be() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_float(3253.12f32).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, BigEndian));\n\n\n\n assert_eq!(1u8, read.read_int::<u8>(1).unwrap());\n\n assert_eq!(3253.12f32, read.read::<f32>().unwrap());\n\n\n\n // 0 padded\n\n assert_eq!(false, read.read_bool().unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 88, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_read_nonzero() {\n\n let bytes = vec![12, 0, 0, 0];\n\n let buffer = BitReadBuffer::new(&bytes, LittleEndian);\n\n let mut stream = BitReadStream::from(buffer);\n\n assert_eq!(NonZeroU16::new(12), stream.read().unwrap());\n\n assert_eq!(None, stream.read::<Option<NonZeroU16>>().unwrap());\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 89, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_invalid_utf8() {\n\n let bytes = vec![b'b', b'a', 129, b'c', 0, 0, 0];\n\n let buffer = BitReadBuffer::new(&bytes, LittleEndian);\n\n let mut stream = BitReadStream::new(buffer.clone());\n\n\n\n assert!(matches!(\n\n stream.read_string(None),\n\n Err(BitError::Utf8Error(_, 4))\n\n ));\n\n\n\n assert_eq!(stream.pos(), 5 * 8);\n\n\n\n let mut stream = BitReadStream::new(buffer);\n\n\n\n assert!(matches!(\n\n stream.read_string(Some(6)),\n\n Err(BitError::Utf8Error(_, 6))\n\n ));\n\n\n\n assert_eq!(stream.pos(), 6 * 8);\n\n}\n", "file_path": "tests/read_tests.rs", "rank": 90, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_to_owned_stream() {\n\n let bytes = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n let buffer = BitReadBuffer::new(&bytes, LittleEndian);\n\n let mut stream = BitReadStream::new(buffer);\n\n let mut stream = stream.read_bits(15 * 7).unwrap();\n\n stream.skip_bits(25).unwrap();\n\n\n\n let mut owned = stream.to_owned();\n\n\n\n assert_eq!(stream.read::<u8>().unwrap(), owned.read::<u8>().unwrap());\n\n assert_eq!(stream.read::<u16>().unwrap(), owned.read::<u16>().unwrap());\n\n assert_eq!(stream.read::<u8>().unwrap(), owned.read::<u8>().unwrap());\n\n\n\n assert_eq!(stream.bit_len(), owned.bit_len());\n\n assert_eq!(stream.bits_left(), owned.bits_left());\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 91, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_write_to_slice() {\n\n let mut data = [0; 32];\n\n {\n\n let mut stream = BitWriteStream::from_slice(&mut data[..], LittleEndian);\n\n\n\n stream.write_bool(true).unwrap();\n\n stream.write_int(3253u16, 16).unwrap();\n\n stream.write_int(13253u64, 64).unwrap();\n\n }\n\n\n\n dbg!(&data);\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data[..], LittleEndian));\n\n\n\n assert_eq!(true, read.read_bool().unwrap());\n\n assert_eq!(3253u16, read.read::<u16>().unwrap());\n\n assert_eq!(13253u64, read.read::<u64>().unwrap());\n\n\n\n // 0 padded\n\n assert_eq!(false, read.read_bool().unwrap());\n\n}\n", "file_path": "tests/write_tests.rs", "rank": 92, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_read_struct() {\n\n let float: [u8; 4] = 12.5f32.to_bits().to_le_bytes();\n\n let bytes = vec![\n\n 12,\n\n 'h' as u8,\n\n 'e' as u8,\n\n 'l' as u8,\n\n 'l' as u8,\n\n 'o' as u8,\n\n 0,\n\n 'f' as u8,\n\n 'o' as u8,\n\n 'o' as u8,\n\n 0,\n\n float[0],\n\n float[1],\n\n float[2],\n\n float[3],\n\n 0b0101_0101,\n\n 0b1010_1010,\n", "file_path": "tests/read_tests.rs", "rank": 93, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_write_container() {\n\n let mut data = Vec::new();\n\n {\n\n let mut stream = BitWriteStream::new(&mut data, LittleEndian);\n\n\n\n stream.write(&Box::new(true)).unwrap();\n\n stream.write(&Rc::new(true)).unwrap();\n\n stream.write(&Arc::new(true)).unwrap();\n\n }\n\n\n\n let mut read = BitReadStream::from(BitReadBuffer::new(&data, LittleEndian));\n\n\n\n assert_eq!(Box::new(true), read.read().unwrap());\n\n assert_eq!(Rc::new(true), read.read().unwrap());\n\n assert_eq!(Arc::new(true), read.read().unwrap());\n\n}\n\n\n", "file_path": "tests/write_tests.rs", "rank": 94, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_read_str_be() {\n\n let bytes = vec![\n\n 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, 0, 0, 0, 0, 0,\n\n ];\n\n let buffer = BitReadBuffer::new(&bytes, BigEndian);\n\n assert_eq!(\n\n buffer.read_string(0, Some(13)).unwrap(),\n\n \"Hello world\".to_owned()\n\n );\n\n assert_eq!(\n\n buffer.read_string(0, Some(16)).unwrap(),\n\n \"Hello world\".to_owned()\n\n );\n\n assert_eq!(\n\n buffer.read_string(0, None).unwrap(),\n\n \"Hello world\".to_owned()\n\n );\n\n}\n\n\n", "file_path": "tests/read_tests.rs", "rank": 95, "score": 49066.8864957363 }, { "content": "#[test]\n\nfn test_read_struct() {\n\n let float: [u8; 4] = 12.5f32.to_bits().to_le_bytes();\n\n let bytes = vec![\n\n 12,\n\n 'h' as u8,\n\n 'e' as u8,\n\n 'l' as u8,\n\n 'l' as u8,\n\n 'o' as u8,\n\n 0,\n\n 'f' as u8,\n\n 'o' as u8,\n\n 'o' as u8,\n\n 0,\n\n float[0],\n\n float[1],\n\n float[2],\n\n float[3],\n\n 0b1010_0101,\n\n ];\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 96, "score": 47877.1969145702 }, { "content": "#[test]\n\nfn test_read_struct3() {\n\n let bytes = vec![0b0000_0101, 0b1010_1000];\n\n let mut data = Vec::new();\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n let mut inner = BitReadStream::from(BitReadBuffer::new(&[0b1010_1010], BigEndian));\n\n\n\n let inner = inner.read_bits(5).unwrap();\n\n\n\n let val: TestStruct3<BigEndian> = TestStruct3 {\n\n size: 5,\n\n stream: inner,\n\n };\n\n stream.write(&val).unwrap();\n\n assert_eq!(bytes, data);\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 97, "score": 47877.1969145702 }, { "content": "#[test]\n\nfn test_read_struct2() {\n\n let bytes = vec![\n\n 0b0000_0101,\n\n 'h' as u8,\n\n 'e' as u8,\n\n 'l' as u8,\n\n 'l' as u8,\n\n 'o' as u8,\n\n ' ' as u8,\n\n 'w' as u8,\n\n 'o' as u8,\n\n 'r' as u8,\n\n 'l' as u8,\n\n ];\n\n let mut data = Vec::new();\n\n let mut stream = BitWriteStream::new(&mut data, BigEndian);\n\n stream\n\n .write(&TestStruct2 {\n\n size: 5,\n\n str: \"hello worl\".to_owned(),\n\n })\n\n .unwrap();\n\n assert_eq!(bytes, data);\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 98, "score": 47877.1969145702 }, { "content": "fn test_unnamed_struct() {\n\n let bytes = vec![\n\n 12, 'h' as u8, 'e' as u8, 'l' as u8, 'l' as u8, 'o' as u8, 0, 0, 0, 0, 0, 0,\n\n ];\n\n let mut data = Vec::new();\n\n let mut stream = BitWriteStream::new(&mut data, LittleEndian);\n\n stream\n\n .write(&UnnamedSize(12, \"hello\".to_string(), false))\n\n .unwrap();\n\n\n\n assert_eq!(bytes, data);\n\n}\n\n\n", "file_path": "bitbuffer_derive/tests/write.rs", "rank": 99, "score": 47877.1969145702 } ]
Rust
quibitous/src/blockchain/bootstrap.rs
The-Blockchain-Company/quibitous
a93cedb5c9d833f6e82429286faaf4f15e9e15a0
use super::tip::TipUpdater; use crate::blockcfg::{Block, HeaderHash}; use crate::blockchain::{ chain::{CheckHeaderProof, StreamInfo, StreamReporter}, Blockchain, Ref, Tip, }; use crate::metrics::Metrics; use chain_core::property::Deserialize; use chain_network::data as net_data; use chain_network::error::Error as NetworkError; use futures::prelude::*; use tokio_util::sync::CancellationToken; use std::sync::Arc; #[derive(Debug, thiserror::Error)] pub enum Error { #[error(transparent)] BlockchainError(Box<super::Error>), #[error("received block {0} is not connected to the block chain")] BlockMissingParent(HeaderHash), #[error("bootstrap pull stream failed")] PullStreamFailed(#[source] NetworkError), #[error("failures while deserializing block from stream")] BlockDeserialize(#[from] std::io::Error), #[error("the bootstrap process was interrupted")] Interrupted, } pub async fn bootstrap_from_stream<S>( blockchain: Blockchain, branch: Tip, stream: S, cancellation_token: CancellationToken, ) -> Result<Option<Arc<Ref>>, Error> where S: Stream<Item = Result<net_data::Block, NetworkError>> + Unpin, { let block0 = *blockchain.block0(); let mut tip_updater = TipUpdater::new( branch, blockchain.clone(), None, None, Metrics::builder().build(), ); let mut bootstrap_info = StreamReporter::new(report); let mut maybe_parent_tip = None; let cancel = cancellation_token.cancelled(); tokio::pin!(cancel); let mut stream = stream .map_err(Error::PullStreamFailed) .map(|maybe_block| maybe_block.and_then(|b| Ok(Block::deserialize(b.as_bytes())?))) .take_until(cancel); while let Some(block_result) = stream.next().await { let maybe_tip = match block_result { Ok(block) => { if block.header().hash() == block0 { continue; } bootstrap_info.append_block(&block); blockchain .handle_bootstrap_block(block, CheckHeaderProof::Enabled) .await .map_err(|e| Error::BlockchainError(Box::new(e))) } Err(err) => Err(err), }; match maybe_tip { Ok(parent_tip) => { maybe_parent_tip = Some(parent_tip); } Err(err) => { if let Some(bootstrap_tip) = maybe_parent_tip { tip_updater .process_new_ref(bootstrap_tip) .await .map_err(|e| Error::BlockchainError(Box::new(e)))?; } return Err(err); } } } if let Some(ref bootstrap_tip) = maybe_parent_tip { tip_updater .process_new_ref(bootstrap_tip.clone()) .await .map_err(|e| Error::BlockchainError(Box::new(e)))?; } else { tracing::info!("no new blocks received from the network"); } if stream.take_result().is_some() { return Err(Error::Interrupted); } Ok(maybe_parent_tip) } fn report(stream_info: &StreamInfo) { fn print_sz(n: f64) -> String { if n > 1_000_000.0 { format!("{:.2}mb", n / (1024 * 1024) as f64) } else if n > 1_000.0 { format!("{:.2}kb", n / 1024_f64) } else { format!("{:.2}b", n) } } let current = std::time::SystemTime::now(); let time_diff = current.duration_since(stream_info.last_reported); let bytes_diff = stream_info.bytes_received - stream_info.last_bytes_received; let bytes = print_sz(bytes_diff as f64); let kbs = time_diff .map(|td| { let v = (bytes_diff as f64) / td.as_secs_f64(); print_sz(v) }) .unwrap_or_else(|_| "N/A".to_string()); tracing::info!( "receiving from network bytes={} {}/s, blockchain {}", bytes, kbs, stream_info .last_block_description .as_ref() .map(|lbd| lbd.to_string()) .expect("append_block should always be called before report") ) }
use super::tip::TipUpdater; use crate::blockcfg::{Block, HeaderHash}; use crate::blockchain::{ chain::{CheckHeaderProof, StreamInfo, StreamReporter}, Blockchain, Ref, Tip, }; use crate::metrics::Metrics; use chain_core::property::Deserialize; use chain_network::data as net_data; use chain_network::error::Error as NetworkError; use futures::prelude::*; use tokio_util::sync::CancellationToken; use std::sync::Arc; #[derive(Debug, thiserror::Error)] pub enum Error { #[error(transparent)] BlockchainError(Box<super::Error>), #[error("received block {0} is not connected to the block chain")] BlockMissingParent(HeaderHash),
map_err(|e| Error::BlockchainError(Box::new(e))) } Err(err) => Err(err), }; match maybe_tip { Ok(parent_tip) => { maybe_parent_tip = Some(parent_tip); } Err(err) => { if let Some(bootstrap_tip) = maybe_parent_tip { tip_updater .process_new_ref(bootstrap_tip) .await .map_err(|e| Error::BlockchainError(Box::new(e)))?; } return Err(err); } } } if let Some(ref bootstrap_tip) = maybe_parent_tip { tip_updater .process_new_ref(bootstrap_tip.clone()) .await .map_err(|e| Error::BlockchainError(Box::new(e)))?; } else { tracing::info!("no new blocks received from the network"); } if stream.take_result().is_some() { return Err(Error::Interrupted); } Ok(maybe_parent_tip) } fn report(stream_info: &StreamInfo) { fn print_sz(n: f64) -> String { if n > 1_000_000.0 { format!("{:.2}mb", n / (1024 * 1024) as f64) } else if n > 1_000.0 { format!("{:.2}kb", n / 1024_f64) } else { format!("{:.2}b", n) } } let current = std::time::SystemTime::now(); let time_diff = current.duration_since(stream_info.last_reported); let bytes_diff = stream_info.bytes_received - stream_info.last_bytes_received; let bytes = print_sz(bytes_diff as f64); let kbs = time_diff .map(|td| { let v = (bytes_diff as f64) / td.as_secs_f64(); print_sz(v) }) .unwrap_or_else(|_| "N/A".to_string()); tracing::info!( "receiving from network bytes={} {}/s, blockchain {}", bytes, kbs, stream_info .last_block_description .as_ref() .map(|lbd| lbd.to_string()) .expect("append_block should always be called before report") ) }
#[error("bootstrap pull stream failed")] PullStreamFailed(#[source] NetworkError), #[error("failures while deserializing block from stream")] BlockDeserialize(#[from] std::io::Error), #[error("the bootstrap process was interrupted")] Interrupted, } pub async fn bootstrap_from_stream<S>( blockchain: Blockchain, branch: Tip, stream: S, cancellation_token: CancellationToken, ) -> Result<Option<Arc<Ref>>, Error> where S: Stream<Item = Result<net_data::Block, NetworkError>> + Unpin, { let block0 = *blockchain.block0(); let mut tip_updater = TipUpdater::new( branch, blockchain.clone(), None, None, Metrics::builder().build(), ); let mut bootstrap_info = StreamReporter::new(report); let mut maybe_parent_tip = None; let cancel = cancellation_token.cancelled(); tokio::pin!(cancel); let mut stream = stream .map_err(Error::PullStreamFailed) .map(|maybe_block| maybe_block.and_then(|b| Ok(Block::deserialize(b.as_bytes())?))) .take_until(cancel); while let Some(block_result) = stream.next().await { let maybe_tip = match block_result { Ok(block) => { if block.header().hash() == block0 { continue; } bootstrap_info.append_block(&block); blockchain .handle_bootstrap_block(block, CheckHeaderProof::Enabled) .await .
random
[ { "content": "fn network_block_error_into_reply(err: chain::Error) -> intercom::Error {\n\n use super::chain::Error::*;\n\n\n\n match err {\n\n Storage(e) => intercom::Error::failed(e),\n\n Ledger(e) => intercom::Error::failed_precondition(e),\n\n Block0(e) => intercom::Error::failed(e),\n\n MissingParentBlock(_) => intercom::Error::failed_precondition(err.to_string()),\n\n BlockHeaderVerificationFailed(_) => intercom::Error::invalid_argument(err.to_string()),\n\n _ => intercom::Error::failed(err.to_string()),\n\n }\n\n}\n\n\n", "file_path": "quibitous/src/blockchain/process.rs", "rank": 0, "score": 270021.0607784168 }, { "content": "/// chose which of the two Ref is the most interesting to keep as a branch\n\n///\n\n/// i.e. if the two Ref points to the same block date: this allows to make a choice\n\n/// as to which Ref ought to be our preferred choice for a tip.\n\npub fn compare_against(storage: &Storage, current: &Ref, candidate: &Ref) -> ComparisonResult {\n\n let epoch_stability_depth = current.epoch_ledger_parameters().epoch_stability_depth;\n\n\n\n let rollback_possible =\n\n check_rollback_up_to(epoch_stability_depth, storage, current, candidate);\n\n\n\n // returns `true` if the candidate is set in what appears to be in the future\n\n // relative to this node, with a little buffer to accomodate for small inconsistencies\n\n // in time\n\n let in_future = match candidate.elapsed() {\n\n Err(duration) if duration.duration() > ALLOWED_TIME_DISCREPANCY => {\n\n tracing::debug!(\n\n \"candidate block {} appear to be in the future by {}s, will not consider it for updating our current tip\",\n\n candidate.header().description(),\n\n duration.duration().as_secs()\n\n );\n\n true\n\n }\n\n _ => false,\n\n };\n\n\n\n if rollback_possible && !in_future && current.chain_length() < candidate.chain_length() {\n\n ComparisonResult::PreferCandidate\n\n } else {\n\n ComparisonResult::PreferCurrent\n\n }\n\n}\n\n\n", "file_path": "quibitous/src/blockchain/chain_selection.rs", "rank": 1, "score": 244267.0128679359 }, { "content": "pub fn slot_duration(block0: &Block) -> Result<Duration, Block0Error> {\n\n for config in initial(block0)?.iter() {\n\n if let ConfigParam::SlotDuration(duration) = config {\n\n return Ok(Duration::from_secs(*duration as u64));\n\n }\n\n }\n\n Err(Block0Malformed::NoSlotDuration.into())\n\n}\n\n\n", "file_path": "modules/blockchain/src/block0.rs", "rank": 2, "score": 230955.89187644672 }, { "content": "pub fn load_block(block_reader: impl BufRead) -> Result<Block, Error> {\n\n Block::deserialize(block_reader).map_err(Error::BlockFileCorrupted)\n\n}\n\n\n\n#[derive(StructOpt)]\n\npub struct Common {\n\n #[structopt(flatten)]\n\n pub input: Input,\n\n\n\n /// the file path to the block to create\n\n ///\n\n /// If not available the command will expect to write the block to\n\n /// to the standard output\n\n #[structopt(long = \"output\", parse(from_os_str), name = \"FILE_OUTPUT\")]\n\n pub output_file: Option<std::path::PathBuf>,\n\n}\n\n\n\nimpl Common {\n\n pub fn open_output(&self) -> Result<impl Write, Error> {\n\n io::open_file_write(&self.output_file).map_err(|source| Error::OutputInvalid {\n\n source,\n\n path: self.output_file.clone().unwrap_or_default(),\n\n })\n\n }\n\n}\n", "file_path": "qcli/src/qcli_lib/block/mod.rs", "rank": 3, "score": 229261.9480687612 }, { "content": "pub fn start_time(block0: &Block) -> Result<SystemTime, Block0Error> {\n\n for config in initial(block0)?.iter() {\n\n if let ConfigParam::Block0Date(date) = config {\n\n return Ok(SystemTime::UNIX_EPOCH + Duration::from_secs(date.0));\n\n }\n\n }\n\n Err(Block0Malformed::NoStartTime.into())\n\n}\n\n\n", "file_path": "modules/blockchain/src/block0.rs", "rank": 4, "score": 227230.8774115587 }, { "content": "fn get_current_block_date(tip: &Ref) -> BlockDate {\n\n let time = std::time::SystemTime::now();\n\n let era = tip.epoch_leadership_schedule().era();\n\n let epoch_position = tip\n\n .time_frame()\n\n .slot_at(&time)\n\n .and_then(|slot| era.from_slot_to_era(slot))\n\n .expect(\"the current time and blockchain state should produce a valid blockchain date\");\n\n let block_date: BlockDate = epoch_position.into();\n\n BlockDate {\n\n slot_id: block_date.slot_id + 1,\n\n ..block_date\n\n }\n\n}\n\n\n", "file_path": "quibitous/src/fragment/pool.rs", "rank": 5, "score": 220059.0969380432 }, { "content": "#[derive(Debug, thiserror::Error)]\n\nenum StreamingError {\n\n #[error(\"error accessing storage\")]\n\n Storage(#[from] Error),\n\n #[error(\"failed to send block\")]\n\n Sending(#[from] ReplySendError),\n\n}\n\n\n\nimpl Storage {\n\n pub fn file<P: AsRef<Path>>(path: P, span: Span) -> Result<Self, Error> {\n\n let storage = BlockStore::file(path, HeaderHash::zero_hash().as_bytes().to_vec())?;\n\n Ok(Storage { storage, span })\n\n }\n\n\n\n pub fn memory(span: Span) -> Result<Self, Error> {\n\n let storage = BlockStore::memory(HeaderHash::zero_hash().as_bytes().to_vec())?;\n\n Ok(Storage { storage, span })\n\n }\n\n\n\n pub fn get_tag(&self, tag: &str) -> Result<Option<HeaderHash>, Error> {\n\n self.storage\n", "file_path": "quibitous/src/blockchain/storage.rs", "rank": 6, "score": 198003.03984199482 }, { "content": "fn chain_header_error_into_reply(err: candidate::Error) -> intercom::Error {\n\n use super::candidate::Error::*;\n\n\n\n // TODO: more detailed error case matching\n\n match err {\n\n Blockchain(e) => intercom::Error::failed(e.to_string()),\n\n EmptyHeaderStream => intercom::Error::invalid_argument(err.to_string()),\n\n MissingParentBlock(_) => intercom::Error::failed_precondition(err.to_string()),\n\n BrokenHeaderChain(_) => intercom::Error::invalid_argument(err.to_string()),\n\n HeaderChainVerificationFailed(e) => intercom::Error::invalid_argument(e),\n\n }\n\n}\n", "file_path": "quibitous/src/blockchain/process.rs", "rank": 7, "score": 194059.17246704112 }, { "content": "fn get_block_from_storage(storage: &Storage, id: HeaderHash) -> Result<Block, Error> {\n\n match storage.get(id) {\n\n Ok(Some(block)) => Ok(block),\n\n Ok(None) => Err(Error::not_found(format!(\n\n \"block {} is not known to this node\",\n\n id\n\n ))),\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n\n\n\n// Stop after sending the first Err() variant\n\n//\n\n// Common base for GetBlocks and GetHeaders\n\nasync fn fuse_send_items<T, V>(\n\n items: T,\n\n reply_handle: ReplyStreamHandle<V>,\n\n) -> Result<(), ReplySendError>\n\nwhere\n\n T: IntoIterator<Item = Result<V, Error>>,\n", "file_path": "quibitous/src/client.rs", "rank": 8, "score": 186570.54550158328 }, { "content": "#[test]\n\npub fn test_correct_id_is_returned_for_block_tip_if_only_genesis_block_exists() {\n\n let qcli: JCli = Default::default();\n\n let quibitous = Starter::new().start().unwrap();\n\n let block_id = qcli.rest().v0().tip(quibitous.rest_uri());\n\n\n\n assert_ne!(&block_id, \"\", \"empty block hash\");\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/qcli/rest/tip.rs", "rank": 9, "score": 181817.3692452131 }, { "content": "/// Performs lightweight sanity checks on information fields of a block header\n\n/// against those in the header of the block's parent.\n\n/// The `parent` header must have been retrieved based on, or otherwise\n\n/// matched to, the parent block hash of `header`.\n\n///\n\n/// # Panics\n\n///\n\n/// If the parent hash in the header does not match that of the parent,\n\n/// this function may panic.\n\npub fn pre_verify_link(\n\n header: &Header,\n\n parent: &Header,\n\n) -> ::std::result::Result<(), HeaderChainVerifyError> {\n\n use chain_core::property::ChainLength as _;\n\n\n\n debug_assert_eq!(header.block_parent_hash(), parent.hash());\n\n\n\n if header.block_date() <= parent.block_date() {\n\n return Err(HeaderChainVerifyError::BlockDateBeforeParent {\n\n child: header.block_date(),\n\n parent: parent.block_date(),\n\n });\n\n }\n\n if header.chain_length() != parent.chain_length().next() {\n\n return Err(HeaderChainVerifyError::ChainLengthNotIncremental {\n\n child: header.chain_length(),\n\n parent: parent.chain_length(),\n\n });\n\n }\n", "file_path": "quibitous/src/blockchain/chain.rs", "rank": 10, "score": 181591.9569584867 }, { "content": "pub fn new_epoch_leadership_from(\n\n epoch: Epoch,\n\n parent: Arc<Ref>,\n\n rewards_report_all: bool,\n\n) -> EpochLeadership {\n\n let parent_ledger_state = parent.ledger();\n\n let parent_epoch_leadership_schedule = parent.epoch_leadership_schedule().clone();\n\n let parent_epoch_ledger_parameters = parent.epoch_ledger_parameters().clone();\n\n let parent_epoch_rewards_info = parent.epoch_rewards_info().cloned();\n\n let parent_time_frame = parent.time_frame().clone();\n\n\n\n let parent_date = parent.block_date();\n\n\n\n if parent_date.epoch < epoch {\n\n // TODO: the time frame may change in the future, we will need to handle this\n\n // special case but it is not actually clear how to modify the time frame\n\n // for the blockchain\n\n use chain_impl_mockchain::chaintypes::ConsensusVersion;\n\n\n\n let ledger = parent_ledger_state\n", "file_path": "quibitous/src/blockchain/chain.rs", "rank": 11, "score": 181585.68147876678 }, { "content": "pub fn open_block_file(input_file: &Option<PathBuf>) -> Result<impl BufRead, Error> {\n\n io::open_file_read(input_file).map_err(|source| Error::InputInvalid {\n\n source,\n\n path: input_file.clone().unwrap_or_default(),\n\n })\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/block/mod.rs", "rank": 12, "score": 179140.91135986085 }, { "content": "fn apply_block_to_blocks(blocks: Blocks, block: &ExplorerBlock) -> Result<Blocks, Error> {\n\n let block_id = block.id();\n\n blocks\n\n .insert(block_id, Arc::new(block.clone()))\n\n .map_err(|_| Error::BlockAlreadyExists(block_id))\n\n}\n\n\n", "file_path": "explorer/src/db/mod.rs", "rank": 13, "score": 173452.48565891664 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_incorrect_block_id() {\n\n let qcli: JCli = Default::default();\n\n let incorrect_block_id = \"e1049ea45726f0b1fc473af54f706546b3331765abf89ae9e6a8333e49621641aa\";\n\n let quibitous = Starter::new().start().unwrap();\n\n\n\n qcli.rest().v0().block().get_expect_fail(\n\n incorrect_block_id,\n\n quibitous.rest_uri(),\n\n \"node rejected request because of invalid parameters\",\n\n );\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/qcli/rest/block.rs", "rank": 14, "score": 172039.31767793646 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_incorrect_block_id_in_next_block_id_request() {\n\n let qcli: JCli = Default::default();\n\n let incorrect_block_id = \"e1049ea45726f0b1fc473af54f706546b3331765abf89ae9e6a8333e49621641aa\";\n\n\n\n let quibitous = Starter::new().start().unwrap();\n\n\n\n qcli.rest().v0().block().next_expect_fail(\n\n incorrect_block_id,\n\n 1,\n\n quibitous.rest_uri(),\n\n \"node rejected request because of invalid parameters\",\n\n );\n\n}\n", "file_path": "testing/quibitous-integration-tests/src/qcli/rest/block.rs", "rank": 15, "score": 170565.07203261286 }, { "content": "pub fn read_line<P: AsRef<Path>>(path: &Option<P>) -> Result<String, Error> {\n\n let mut line = String::new();\n\n open_file_read(path)?.read_line(&mut line)?;\n\n Ok(line.trim_end().to_string())\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ReadYamlError {\n\n #[error(\"could not read input\")]\n\n Io(#[from] Error),\n\n #[error(\"input contains malformed yaml\")]\n\n Yaml(#[from] serde_yaml::Error),\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/utils/io.rs", "rank": 16, "score": 164979.7285461451 }, { "content": "/// Initiates a client connection, returning a connection handle and\n\n/// the connection future that must be polled to complete the connection.\n\n///\n\n/// Note that this is the only function in this module that is tied to the\n\n/// gRPC protocol, all other code is generic in terms of network-core traits.\n\n/// This is intentional, to facilitate extension to different protocols\n\n/// in the future.\n\npub fn connect(state: ConnectionState, channels: Channels) -> (ConnectHandle, ConnectFuture) {\n\n let (sender, receiver) = oneshot::channel();\n\n let peer = state.peer();\n\n let keypair = state.global.keypair.clone();\n\n let span = state.span().clone();\n\n let async_span = span.clone();\n\n let _enter = span.enter();\n\n let cf = async move {\n\n let mut grpc_client = {\n\n tracing::debug!(\"connecting\");\n\n grpc::connect(&peer).await\n\n }\n\n .map_err(ConnectError::Transport)?;\n\n\n\n let mut nonce = [0u8; NONCE_LEN];\n\n rand::thread_rng().fill(&mut nonce);\n\n\n\n let hr = grpc_client\n\n .handshake(&nonce[..])\n\n .await\n", "file_path": "quibitous/src/network/client/connect.rs", "rank": 17, "score": 164909.30587142217 }, { "content": "pub fn block0() -> Block {\n\n block::builder(\n\n BlockVersion::Genesis,\n\n ContentsBuilder::new().into(),\n\n |hdr| {\n\n Ok::<_, ()>(\n\n hdr.set_genesis()\n\n .set_date(BlockDate::first())\n\n .into_unsigned_header()\n\n .expect(\"internal error cannot build unsigned block\")\n\n .generalize(),\n\n )\n\n },\n\n )\n\n .expect(\"internal error: block builder cannot return error\")\n\n}\n", "file_path": "testing/quibitous-automation/src/quibitous/grpc/server/data.rs", "rank": 18, "score": 162324.51603829858 }, { "content": "#[test]\n\n#[cfg(windows)]\n\npub fn cors_https() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"https://domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let quibitous = Starter::new().config(config).start_async().unwrap();\n\n\n\n let mut rest_client = quibitous.rest();\n\n rest_client.set_origin(\"https://domain.com\");\n\n\n\n assert!(rest_client.raw().stats()?.status().is_success());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 19, "score": 160905.35096424163 }, { "content": "#[test]\n\npub fn pull_blocks_to_tip_correct_hash() {\n\n let setup = setup::client::default();\n\n\n\n std::thread::sleep(Duration::from_secs(10)); // wait for the server to produce some blocks\n\n\n\n let blocks = setup\n\n .client\n\n .pull_blocks_to_tip(Hash::from_str(setup.config.genesis_block_hash()).unwrap())\n\n .unwrap();\n\n\n\n let blocks_hashes: Vec<Hash> = blocks.iter().map(|x| x.header().hash()).collect();\n\n\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n assert!(\n\n is_long_prefix(&block_hashes_from_logs, &blocks_hashes),\n\n \"server blocks: {:?} | client blocks: {:?}\",\n\n block_hashes_from_logs,\n\n blocks_hashes\n\n );\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 20, "score": 160506.16352663896 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_incorrect_path() {\n\n let qcli: JCli = Default::default();\n\n let config = NodeConfigBuilder::new().build();\n\n let incorrect_uri = format!(\"http://{}/api/api\", config.rest.listen);\n\n\n\n qcli.rest()\n\n .v0()\n\n .tip_expect_fail(incorrect_uri, \"tcp connect error\");\n\n}\n", "file_path": "testing/quibitous-integration-tests/src/qcli/rest/tip.rs", "rank": 21, "score": 160491.31125487763 }, { "content": "#[test]\n\n#[cfg(windows)]\n\npub fn cors_single_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let quibitous = Starter::new().config(config).start_async().unwrap();\n\n\n\n let mut rest_client = quibitous.rest();\n\n rest_client.set_origin(\"http://domain.com\");\n\n\n\n assert!(rest_client.raw().stats()?.status().is_success());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 22, "score": 159335.08755759193 }, { "content": "#[test]\n\npub fn cors_illegal_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let quibitous = Starter::new().config(config).start().unwrap();\n\n\n\n let mut rest_client = quibitous.rest();\n\n rest_client.set_origin(\"http://other_domain.com\");\n\n\n\n assert_request_failed_due_to_cors(&rest_client)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 23, "score": 159335.08755759193 }, { "content": "#[test]\n\npub fn cors_multi_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com;http://other_domain.com\"\n\n .to_owned()\n\n .into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n Starter::new()\n\n .config(config)\n\n .start_fail(\"invalid value: string \\\"http://domain.com;http://other_domain.com\\\"\");\n\n\n\n Ok(())\n\n}\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 24, "score": 159335.08755759193 }, { "content": "#[test]\n\npub fn cors_wrong_delimiter() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com,http://other_domain.com\"\n\n .to_owned()\n\n .into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n Starter::new()\n\n .config(config)\n\n .start_fail(\"rest.cors.allowed_origins[0]: invalid value\");\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 25, "score": 159335.08755759193 }, { "content": "/// open the given file path as a writable stream, or stdout if no path\n\n/// provided\n\npub fn open_file_write<P: AsRef<Path>>(path: &Option<P>) -> Result<impl Write, Error> {\n\n match path {\n\n Some(path) => {\n\n let writer = std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .read(false)\n\n .append(false)\n\n .truncate(true)\n\n .open(path)?;\n\n Ok(Box::new(writer) as Box<dyn Write>)\n\n }\n\n None => Ok(Box::new(stdout()) as Box<dyn Write>),\n\n }\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/utils/io.rs", "rank": 26, "score": 158619.3495800232 }, { "content": "#[test]\n\npub fn cors_malformed_domain_no_http() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n Starter::new()\n\n .config(config)\n\n .start_fail(\"invalid value: string \\\"domain.com\\\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 27, "score": 157817.38701522426 }, { "content": "#[test]\n\n#[cfg(windows)]\n\npub fn cors_ip_versus_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://127.0.0.1\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let quibitous = Starter::new().config(config).start_async().unwrap();\n\n\n\n let mut rest_client = quibitous.rest();\n\n rest_client.set_origin(\"http://localhost\");\n\n\n\n assert_eq!(rest_client.raw().stats()?.status(), 403);\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/cors.rs", "rank": 28, "score": 157817.38701522426 }, { "content": "pub fn parse_shift(from: &str) -> Result<(BlockDate, bool), BlockDateParseError> {\n\n if let Some(stripped) = from.strip_prefix('~') {\n\n BlockDate::from_str(stripped).map(|d| (d, true))\n\n } else {\n\n BlockDate::from_str(from).map(|d| (d, false))\n\n }\n\n}\n", "file_path": "testing/mfive/src/mfive_lib/args.rs", "rank": 29, "score": 157239.953042924 }, { "content": "/// open the given file path as a readable stream, or stdin if no path\n\n/// provided\n\npub fn open_file_read<P: AsRef<Path>>(path: &Option<P>) -> Result<impl BufRead, Error> {\n\n match path {\n\n Some(path) => {\n\n let reader = std::fs::OpenOptions::new()\n\n .create(false)\n\n .write(false)\n\n .read(true)\n\n .append(false)\n\n .open(path)?;\n\n Ok(Box::new(BufReader::new(reader)) as Box<dyn BufRead>)\n\n }\n\n None => Ok(Box::new(BufReader::new(stdin())) as Box<dyn BufRead>),\n\n }\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/utils/io.rs", "rank": 30, "score": 156508.6200901315 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_non_existent_genesis_block() {\n\n let temp_dir = TempDir::new().unwrap();\n\n let block_file = temp_dir.child(\"block-0.bin\");\n\n let qcli: JCli = Default::default();\n\n qcli.genesis().hash_expect_fail(block_file.path(), \"file\");\n\n}\n", "file_path": "testing/quibitous-integration-tests/src/qcli/genesis/hash.rs", "rank": 31, "score": 155358.54187351058 }, { "content": "pub fn spawn_network(args: Args) -> Result<(), Error> {\n\n let config: Config = serde_yaml::from_reader(File::open(&args.config)?)?;\n\n let topology = config.build_topology();\n\n\n\n match &config.session.mode {\n\n SessionMode::Standard => standard::spawn_network(config, topology, args),\n\n SessionMode::Monitor => monitor::spawn_network(config, topology, args),\n\n SessionMode::Interactive => interactive::spawn_network(config, topology),\n\n }\n\n}\n", "file_path": "testing/quantricity/src/spawn/mod.rs", "rank": 32, "score": 148500.73174789836 }, { "content": "pub fn gen_pub_key<K>(priv_key_bech32: &[u5]) -> Result<String, Error>\n\nwhere\n\n K: AsymmetricKey,\n\n PublicKey<K::PubAlg>: Bech32,\n\n{\n\n let priv_key_bytes = Vec::<u8>::from_base32(priv_key_bech32).map_err(Bech32Error::from)?;\n\n let priv_key = <SecretKey<K>>::from_binary(&priv_key_bytes)?;\n\n let pub_key = priv_key.to_public();\n\n Ok(pub_key.to_bech32_str())\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/key.rs", "rank": 33, "score": 148034.95087617688 }, { "content": "#[derive(Error, Debug)]\n\nenum PropagateError {\n\n #[error(\"Error sending message to task due to {0}\")]\n\n InternalCommSend(#[from] futures::channel::mpsc::SendError),\n\n #[error(\"Error receving message from task due to {0}\")]\n\n InternalCommRecv(#[from] crate::intercom::Error),\n\n}\n\n\n", "file_path": "quibitous/src/network/mod.rs", "rank": 34, "score": 146900.6657001754 }, { "content": "/// prepare the block storage from the given settings\n\npub fn prepare_storage(setting: &Settings) -> Result<Storage, Error> {\n\n let span = span!(Level::TRACE, \"sub_task\", kind = \"storage\");\n\n let storage_span = span.clone();\n\n let _enter = span.enter();\n\n if let Some(dir) = &setting.storage {\n\n std::fs::create_dir_all(dir).map_err(|err| Error::Io {\n\n source: err,\n\n reason: ErrorKind::BlockStorage,\n\n })?;\n\n\n\n tracing::info!(\"storing blockchain in '{:?}'\", dir);\n\n\n\n Storage::file(dir, storage_span).map_err(Into::into)\n\n } else {\n\n Storage::memory(storage_span).map_err(Into::into)\n\n }\n\n}\n\n\n\n/// Try to fetch the block0_id from the HTTP base URL (services) in the array\n\n///\n", "file_path": "quibitous/src/start_up/mod.rs", "rank": 35, "score": 145525.8274702543 }, { "content": "/// Checks if the multiaddr is valid for contacting a p2p peer\n\n/// and resolves DNS components.\n\n///\n\n/// Note that DNS resolution is performed synchronously by this function,\n\n/// so this should only be used at initialization.\n\npub fn resolve_dns(addr: &Multiaddr) -> Result<Multiaddr, Error> {\n\n let mut components = addr.iter();\n\n\n\n let ip_or_fqdn = components.next().ok_or(Error::InvalidMultiaddr)?;\n\n let port = components\n\n .next()\n\n .and_then(|ac| {\n\n if let Protocol::Tcp(port) = ac {\n\n Some(port)\n\n } else {\n\n None\n\n }\n\n })\n\n .ok_or(Error::InvalidMultiaddr)?;\n\n\n\n let socket_addr = match ip_or_fqdn {\n\n Protocol::Ip4(addr) => SocketAddrV4::new(addr, port).into(),\n\n Protocol::Ip6(addr) => SocketAddrV6::new(addr, port, 0, 0).into(),\n\n Protocol::Dns(fqdn) => (fqdn.borrow(), port)\n\n .to_socket_addrs()\n", "file_path": "quibitous-lib/src/multiaddr.rs", "rank": 36, "score": 145525.13174735187 }, { "content": "enum ApplyFragmentError {\n\n DoesNotFit,\n\n SoftDeadlineReached,\n\n Rejected(String),\n\n}\n\n\n", "file_path": "quibitous/src/fragment/selection.rs", "rank": 37, "score": 144292.09988631494 }, { "content": "pub fn get_block<S: Into<String>>(block0: S) -> Result<Block0Configuration, Block0Error> {\n\n let block0 = block0.into();\n\n let block = {\n\n if Path::new(&block0).exists() {\n\n let reader = std::fs::OpenOptions::new()\n\n .create(false)\n\n .write(false)\n\n .read(true)\n\n .append(false)\n\n .open(&block0)?;\n\n let reader = BufReader::new(reader);\n\n Block::deserialize(reader)?\n\n } else if Url::parse(&block0).is_ok() {\n\n let response = reqwest::blocking::get(&block0)?;\n\n let block0_bytes = response.bytes()?.to_vec();\n\n Block::read(&mut ReadBuf::from(&block0_bytes))?\n\n } else {\n\n panic!(\" block0 should be either path to filesystem or url \");\n\n }\n\n };\n\n Block0Configuration::from_block(&block).map_err(Into::into)\n\n}\n\n\n", "file_path": "testing/quibitous-automation/src/testing/block0.rs", "rank": 38, "score": 140699.12661841096 }, { "content": "#[ignore]\n\npub fn max_connections() {\n\n let mut controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(LEADER1))\n\n .with_node(Node::new(LEADER2).with_trusted_peer(LEADER1))\n\n .with_node(Node::new(LEADER3).with_trusted_peer(LEADER1))\n\n .with_node(Node::new(LEADER4).with_trusted_peer(LEADER1)),\n\n )\n\n .blockchain_config(\n\n Blockchain::default().with_leaders(vec![LEADER1, LEADER2, LEADER3, LEADER4]),\n\n )\n\n .build()\n\n .unwrap();\n\n\n\n let leader1 = controller\n\n .spawn(\n\n SpawnParams::new(LEADER1)\n\n .in_memory()\n\n .max_inbound_connections(2),\n", "file_path": "testing/quibitous-integration-tests/src/networking/p2p/connections.rs", "rank": 39, "score": 139965.2793435349 }, { "content": "pub fn spawn_network(config: Config, topology: Topology) -> Result<(), Error> {\n\n let controller = NetworkBuilder::default()\n\n .topology(topology)\n\n .blockchain_config(config.build_blockchain())\n\n .session_settings(config.session)\n\n .build()?;\n\n\n\n let user_integration = quibitous_user_interaction();\n\n\n\n let mut interactive_commands = QuibitousInteractiveCommandExec {\n\n controller: UserInteractionController::new(controller),\n\n };\n\n\n\n user_integration\n\n .interact(&mut interactive_commands)\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "testing/quantricity/src/spawn/interactive.rs", "rank": 40, "score": 139015.33550606866 }, { "content": "pub fn assert(statement: bool, info: &str) -> Result<(), Error> {\n\n if !statement {\n\n return Err(Error::VerificationFailed(info.to_string()));\n\n }\n\n Ok(())\n\n}\n", "file_path": "testing/quibitous-automation/src/testing/verify.rs", "rank": 41, "score": 139015.33550606866 }, { "content": "pub fn read_initials<P: AsRef<Path>>(initials: P) -> Result<Vec<Initial>, Block0Error> {\n\n let contents = std::fs::read_to_string(&initials)?;\n\n let value: serde_json::Value = serde_json::from_str(&contents)?;\n\n let initial = serde_json::to_string(&value[\"initial\"])?;\n\n serde_json::from_str(&initial).map_err(Into::into)\n\n}\n\n\n", "file_path": "testing/quibitous-automation/src/testing/block0.rs", "rank": 42, "score": 137552.9651578845 }, { "content": "fn match_block0(expected: HeaderHash, peer_responded: HeaderHash) -> Result<(), ConnectError> {\n\n if expected == peer_responded {\n\n Ok(())\n\n } else {\n\n Err(ConnectError::Block0Mismatch {\n\n expected,\n\n peer_responded,\n\n })\n\n }\n\n}\n", "file_path": "quibitous/src/network/client/connect.rs", "rank": 43, "score": 136588.9035701551 }, { "content": "fn encode_block_0(common: Common) -> Result<(), Error> {\n\n let reader = common.input.open()?;\n\n let genesis: Block0Configuration =\n\n serde_yaml::from_reader(reader).map_err(Error::GenesisFileCorrupted)?;\n\n let block = genesis.to_block();\n\n Ledger::new(block.id(), block.fragments())?;\n\n block\n\n .serialize(common.open_output()?)\n\n .map_err(Error::BlockSerializationFailed)\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/block/mod.rs", "rank": 44, "score": 136508.35809684434 }, { "content": "fn decode_block_0(common: Common) -> Result<(), Error> {\n\n let block = common.input.load_block()?;\n\n let yaml = Block0Configuration::from_block(&block)?;\n\n serde_yaml::to_writer(common.open_output()?, &yaml).map_err(Error::GenesisSerializationFailed)\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/block/mod.rs", "rank": 45, "score": 136508.35809684434 }, { "content": "pub fn read_genesis_yaml<P: AsRef<Path>>(genesis: P) -> Result<Block0Configuration, Block0Error> {\n\n let contents = std::fs::read_to_string(&genesis)?;\n\n serde_yaml::from_str(&contents).map_err(Into::into)\n\n}\n\n\n", "file_path": "testing/quibitous-automation/src/testing/block0.rs", "rank": 46, "score": 136321.3536008949 }, { "content": "pub fn request_settings(args: RestArgs) -> Result<SettingsDto, Error> {\n\n serde_json::from_str(&(args.client()?.get(&[\"v0\", \"settings\"]).execute()?.text()?))\n\n .map_err(Error::SerdeError)\n\n}\n", "file_path": "qcli/src/qcli_lib/rest/v0/settings/mod.rs", "rank": 47, "score": 134999.64374086697 }, { "content": "enum GenesisBlockOption<'a> {\n\n None,\n\n Hash(&'a str),\n\n Path(&'a Path),\n\n}\n\n\n\nimpl<'a> CommandBuilder<'a> {\n\n pub fn new(bin: &'a Path) -> Self {\n\n CommandBuilder {\n\n bin,\n\n config: None,\n\n genesis_block: GenesisBlockOption::None,\n\n secret: None,\n\n log_file: None,\n\n rewards_history: false,\n\n faketime: None,\n\n }\n\n }\n\n\n\n pub fn config(mut self, path: &'a Path) -> Self {\n", "file_path": "testing/quibitous-automation/src/quibitous/starter/commands.rs", "rank": 48, "score": 134324.60414364922 }, { "content": "#[test]\n\npub fn pull_blocks_correct_hashes_all_blocks() {\n\n let setup = setup::client::default();\n\n std::thread::sleep(Duration::from_secs(10)); // wait for the server to produce some blocks\n\n\n\n let genesis_block_hash = Hash::from_str(setup.config.genesis_block_hash()).unwrap();\n\n let blocks = setup\n\n .client\n\n .pull_blocks(&[genesis_block_hash], setup.client.tip().id())\n\n .unwrap();\n\n\n\n let blocks_hashes: Vec<Hash> = blocks.iter().map(|x| x.header().hash()).collect();\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n assert!(\n\n is_long_prefix(&block_hashes_from_logs, &blocks_hashes),\n\n \"server blocks: {:?} | client blocks: {:?}\",\n\n block_hashes_from_logs,\n\n blocks_hashes\n\n );\n\n}\n\n\n\n// L1022 PullBlocks correct hashes\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 49, "score": 132734.89888777988 }, { "content": "pub fn decode_block0<Q: AsRef<Path>>(block0: Vec<u8>, genesis_yaml: Q) -> Result<(), Block0Error> {\n\n let writer: std::fs::File = std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .read(false)\n\n .append(false)\n\n .truncate(true)\n\n .open(&genesis_yaml)?;\n\n\n\n let yaml = Block0Configuration::from_block(&Block::deserialize(&*block0)?)?;\n\n Ok(serde_yaml::to_writer(writer, &yaml)?)\n\n}\n\n\n\n#[derive(Error, Debug)]\n\n#[allow(clippy::large_enum_variant)]\n\npub enum Block0Error {\n\n #[error(transparent)]\n\n IapyxStatsCommandError(#[from] reqwest::Error),\n\n #[error(transparent)]\n\n Block0ParseError(#[from] Block0ConfigurationError),\n", "file_path": "testing/quibitous-automation/src/testing/block0.rs", "rank": 50, "score": 132365.6980233431 }, { "content": "#[test]\n\npub fn test_watch_tip_subscription_is_current_tip() {\n\n let setup = setup::client::bootstrap(\n\n ConfigurationBuilder::new()\n\n .with_slot_duration(3u8)\n\n .to_owned(),\n\n );\n\n let rest = setup.server.rest();\n\n let watch_client = setup.watch_client;\n\n\n\n let notif = watch_client.tip_subscription();\n\n\n\n let (watch_tip, cond) = &*notif;\n\n\n\n let mut iters_remaining: usize = 10;\n\n let mut guard = watch_tip.lock().unwrap();\n\n\n\n loop {\n\n println!(\"iter remaining: {}\", iters_remaining);\n\n let header = &*guard;\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 51, "score": 131236.26756803654 }, { "content": "#[test]\n\npub fn test_watch_block_subscription_blocks_are_in_logs() {\n\n use std::collections::HashSet;\n\n\n\n let setup = setup::client::default();\n\n\n\n let watch_client = setup.watch_client;\n\n\n\n let (sender, receiver) = std::sync::mpsc::channel();\n\n\n\n watch_client.block_subscription(sender);\n\n\n\n let mut ids = HashSet::new();\n\n\n\n const BLOCKS_TO_TEST: usize = 20;\n\n\n\n while let Ok(block) = receiver.recv() {\n\n assert!(ids.insert(block.unwrap().id()));\n\n\n\n if ids.len() == BLOCKS_TO_TEST {\n\n break;\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 52, "score": 131108.91084744566 }, { "content": "pub fn deserialize_hash<'de, D>(deserializer: D) -> Result<Blake2b256, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let hash_visitor = HashVisitor::new();\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_str(hash_visitor)\n\n } else {\n\n deserializer.deserialize_bytes(hash_visitor)\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 53, "score": 130829.28420768514 }, { "content": "pub fn compute_interval<I>(\n\n bounds: PaginationInterval<I>,\n\n pagination_arguments: ValidatedPaginationArguments<I>,\n\n) -> FieldResult<(PaginationInterval<I>, PageMeta)>\n\nwhere\n\n I: TryFrom<u64> + Clone,\n\n u64: From<I>,\n\n{\n\n let pagination_arguments = pagination_arguments.cursors_into::<u64>();\n\n let bounds = bounds.bounds_into::<u64>();\n\n\n\n let (page_interval, has_next_page, has_previous_page, total_count) = match bounds {\n\n PaginationInterval::Empty => (PaginationInterval::Empty, false, false, 0u64),\n\n PaginationInterval::Inclusive(total_elements) => {\n\n let InclusivePaginationInterval {\n\n upper_bound,\n\n lower_bound,\n\n } = total_elements;\n\n\n\n let page = compute_range_boundaries(total_elements, pagination_arguments);\n", "file_path": "explorer/src/api/graphql/connections.rs", "rank": 54, "score": 130818.60909900418 }, { "content": "pub fn parse_ed25519_secret_key(bech32_str: &str) -> Result<EitherEd25519SecretKey, Error> {\n\n match SecretKey::try_from_bech32_str(bech32_str) {\n\n Ok(sk) => Ok(EitherEd25519SecretKey::Extended(sk)),\n\n Err(_) => SecretKey::try_from_bech32_str(bech32_str)\n\n .map(EitherEd25519SecretKey::Normal)\n\n .map_err(Error::SecretKeyMalformed),\n\n }\n\n}\n", "file_path": "qcli/src/qcli_lib/utils/key_parser.rs", "rank": 55, "score": 130621.91610830877 }, { "content": "pub fn read_yaml<D: DeserializeOwned>(path: &Option<impl AsRef<Path>>) -> Result<D, ReadYamlError> {\n\n let reader = open_file_read(path)?;\n\n let yaml = serde_yaml::from_reader(reader)?;\n\n Ok(yaml)\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/utils/io.rs", "rank": 56, "score": 130332.21816970293 }, { "content": "#[test]\n\npub fn fully_connected() {\n\n let mut controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(LEADER_3))\n\n .with_node(Node::new(LEADER_1).with_trusted_peer(LEADER_3))\n\n .with_node(Node::new(LEADER_2).with_trusted_peer(LEADER_1))\n\n .with_node(\n\n Node::new(LEADER_4)\n\n .with_trusted_peer(LEADER_2)\n\n .with_trusted_peer(LEADER_3),\n\n ),\n\n )\n\n .blockchain_config(Blockchain::default().with_leaders(vec![LEADER_1, LEADER_2, LEADER_3]))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(ALICE)\n\n .with(2_000_000_000)\n\n .build(),\n\n )\n\n .wallet_template(\n", "file_path": "testing/quibitous-integration-tests/src/networking/topology.rs", "rank": 57, "score": 128446.50938053313 }, { "content": "pub fn post_fragment(args: RestArgs, fragment: Fragment) -> Result<String, Error> {\n\n let fragment_id = args\n\n .client()?\n\n .post(&[\"v0\", \"message\"])\n\n .body(fragment.serialize_as_vec()?)\n\n .execute()?\n\n .text()?;\n\n Ok(fragment_id)\n\n}\n", "file_path": "qcli/src/qcli_lib/rest/v0/message/mod.rs", "rank": 58, "score": 128334.39719399356 }, { "content": "pub fn spawn_network(config: Config, mut topology: Topology, args: Args) -> Result<(), Error> {\n\n let (tx, rx) = channel();\n\n\n\n let mut monitor_controller = MonitorControllerBuilder::new(&config.session.title)\n\n .topology(topology.clone())\n\n .blockchain(config.build_blockchain())\n\n .build(config.session.clone())?;\n\n\n\n let mut processes: HashMap<NodeAlias, MonitorNode> = HashMap::new();\n\n\n\n while !topology.nodes.is_empty() {\n\n let alias = topology\n\n .nodes\n\n .values()\n\n .find(|n| n.trusted_peers.is_empty())\n\n .map(|n| n.alias.clone())\n\n .ok_or(Error::CircularTrust)?;\n\n\n\n let spawn_params = config.node_spawn_params(&alias)?;\n\n\n", "file_path": "testing/quantricity/src/spawn/monitor.rs", "rank": 59, "score": 127134.72540756036 }, { "content": "pub fn spawn_network(config: Config, mut topology: Topology, args: Args) -> Result<(), Error> {\n\n println!(\"Building network...\");\n\n let mut controller = NetworkBuilder::default()\n\n .topology(topology.clone())\n\n .session_settings(config.session.clone())\n\n .blockchain_config(config.build_blockchain())\n\n .build()?;\n\n\n\n let mut processes: HashMap<NodeAlias, QuibitousProcess> = HashMap::new();\n\n\n\n while !topology.nodes.is_empty() {\n\n let alias = topology\n\n .nodes\n\n .values()\n\n .find(|n| n.trusted_peers.is_empty())\n\n .map(|n| n.alias.clone())\n\n .ok_or(Error::CircularTrust)?;\n\n\n\n let spawn_params = config.node_spawn_params(&alias)?;\n\n\n", "file_path": "testing/quantricity/src/spawn/standard.rs", "rank": 60, "score": 127134.72540756036 }, { "content": "#[test]\n\npub fn node_trust_itself() {\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(CLIENT).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(ALICE)\n\n .with(1_000_000)\n\n .delegated_to(CLIENT)\n\n .build(),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(BOB)\n\n .with(1_000_000)\n\n .delegated_to(SERVER)\n\n .build(),\n\n )\n", "file_path": "testing/quibitous-integration-tests/src/networking/p2p/connections.rs", "rank": 61, "score": 126314.19759307092 }, { "content": "pub fn read_secret_key(secret_key_path: Option<PathBuf>) -> Result<EitherEd25519SecretKey, Error> {\n\n match secret_key_path {\n\n Some(path) => read_ed25519_secret_key_from_file(&Some(path)),\n\n None => {\n\n let key =\n\n rpassword::prompt_password_stdout(\"Introduce the bech32 format secret key:\\n\")?;\n\n parse_ed25519_secret_key(&key)\n\n }\n\n }\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/utils/key_parser.rs", "rank": 62, "score": 125268.46962589497 }, { "content": "pub fn deserialize_public<'de, D, A>(deserializer: D) -> Result<PublicKey<A>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n A: AsymmetricPublicKey,\n\n{\n\n let public_key_visitor = PublicKeyVisitor::new();\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_str(public_key_visitor)\n\n } else {\n\n deserializer.deserialize_bytes(public_key_visitor)\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 63, "score": 125245.85740979153 }, { "content": "pub fn deserialize_secret<'de, D, A>(deserializer: D) -> Result<SecretKey<A>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n A: AsymmetricKey,\n\n SecretKey<A>: Bech32,\n\n{\n\n let secret_key_visitor = SecretKeyVisitor::new();\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_str(secret_key_visitor)\n\n } else {\n\n deserializer.deserialize_bytes(secret_key_visitor)\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 64, "score": 125245.85740979153 }, { "content": "#[test]\n\npub fn tip_request() {\n\n let setup = setup::client::bootstrap(\n\n ConfigurationBuilder::new()\n\n .with_slot_duration(15)\n\n .to_owned(),\n\n );\n\n\n\n setup\n\n .client\n\n .wait_for_chain_length(1.into(), CHAIN_GROWTH_TIMEOUT);\n\n\n\n let tip_header = setup.client.tip();\n\n let block_hashes = setup.server.logger.get_created_blocks_hashes();\n\n\n\n if *block_hashes.last().unwrap() != tip_header.hash() {\n\n //if the server produces another block compare with second last\n\n assert_eq!(block_hashes[block_hashes.len() - 2], tip_header.hash());\n\n }\n\n}\n\n\n\n// L1009 GetHeaders correct hash\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 65, "score": 124300.7475374282 }, { "content": "pub fn serialize_hash<S>(hash: &Blake2b256, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if serializer.is_human_readable() {\n\n hash.to_string().serialize(serializer)\n\n } else {\n\n hash.as_ref().serialize(serializer)\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 66, "score": 123669.04536378372 }, { "content": "fn exec_get(block_id: String, args: RestArgs) -> Result<(), Error> {\n\n let response = args\n\n .client()?\n\n .get(&[\"v0\", \"block\", &block_id])\n\n .execute()?\n\n .bytes()?;\n\n println!(\"{}\", hex::encode(&response));\n\n Ok(())\n\n}\n", "file_path": "qcli/src/qcli_lib/rest/v0/block/subcommand.rs", "rank": 67, "score": 123157.44398802223 }, { "content": "pub fn encode_block0<P: AsRef<Path>, Q: AsRef<Path>>(\n\n genesis: P,\n\n block0: Q,\n\n) -> Result<(), Block0Error> {\n\n let input: std::fs::File = std::fs::OpenOptions::new()\n\n .create(false)\n\n .write(false)\n\n .read(true)\n\n .append(false)\n\n .truncate(false)\n\n .open(&genesis)?;\n\n\n\n let output: std::fs::File = std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .read(false)\n\n .append(false)\n\n .truncate(true)\n\n .open(&block0)?;\n\n\n\n let genesis: Block0Configuration = serde_yaml::from_reader(input)?;\n\n let block = genesis.to_block();\n\n Ledger::new(block.id(), block.fragments())?;\n\n block.serialize(&output).map_err(Into::into)\n\n}\n\n\n", "file_path": "testing/quibitous-automation/src/testing/block0.rs", "rank": 68, "score": 122773.64596388368 }, { "content": "fn print_hash(input: Input) -> Result<(), Error> {\n\n let block = input.load_block()?;\n\n println!(\"{}\", block.id());\n\n Ok(())\n\n}\n\n\n\n/// create block 0 of the blockchain (i.e. the genesis block)\n\n#[derive(StructOpt)]\n\n#[structopt(name = \"genesis\", rename_all = \"kebab-case\")]\n\npub enum Genesis {\n\n /// Create a default Genesis file with appropriate documentation\n\n /// to help creating the YAML file\n\n Init,\n\n\n\n /// create the block 0 file (the genesis block of the blockchain)\n\n /// from a given yaml file\n\n ///\n\n Encode(Common),\n\n\n\n /// Decode the block 0 and print the corresponding YAML file\n", "file_path": "qcli/src/qcli_lib/block/mod.rs", "rank": 69, "score": 122728.1419103641 }, { "content": "pub fn _read_secret_key_from_file<A, P>(path: &Option<P>) -> Result<SecretKey<A>, Error>\n\nwhere\n\n A: AsymmetricKey,\n\n SecretKey<A>: Bech32,\n\n P: AsRef<Path>,\n\n{\n\n let bech32_str: String =\n\n io::read_line(path).map_err(|source| Error::SecretKeyFileReadFailed {\n\n source,\n\n path: io::path_to_path_buf(path),\n\n })?;\n\n SecretKey::try_from_bech32_str(&bech32_str).map_err(|source| Error::SecretKeyFileMalformed {\n\n source,\n\n path: io::path_to_path_buf(path),\n\n })\n\n}\n\n\n", "file_path": "qcli/src/qcli_lib/utils/key_parser.rs", "rank": 70, "score": 122583.99327315364 }, { "content": "pub fn copy_initial_storage_if_used(\n\n config: &ClientLoadConfig,\n\n storage_folder: &str,\n\n temp_dir: &TempDir,\n\n) {\n\n if let Some(storage) = config.initial_storage() {\n\n let client_storage: PathBuf = temp_dir.child(storage_folder).path().into();\n\n if client_storage.exists() {\n\n fs::remove_dir_all(&client_storage).expect(\"cannot remove existing client storage\");\n\n }\n\n fs::create_dir(&client_storage).expect(\"cannot create client storage\");\n\n file::copy_folder(storage, &client_storage, true);\n\n }\n\n}\n\n\n", "file_path": "testing/mfive/src/mfive_lib/bootstrap/scenario/mod.rs", "rank": 71, "score": 122375.59742117801 }, { "content": "#[test]\n\npub fn test_blocks_are_being_created_for_more_than_15_minutes() {\n\n let mut sender = silica::Wallet::default();\n\n let mut receiver = silica::Wallet::default();\n\n let qcli: JCli = Default::default();\n\n\n\n let (quibitous, _) = startup::start_stake_pool(\n\n &[sender.clone()],\n\n &[],\n\n ConfigurationBuilder::new()\n\n .with_slots_per_epoch(60)\n\n .with_consensus_genesis_optimum_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_slot_duration(4)\n\n .with_epoch_stability_depth(10)\n\n .with_kes_update_speed(KesUpdateSpeed::new(43200).unwrap()),\n\n )\n\n .unwrap();\n\n\n\n let output_value = 1_u64;\n\n let benchmark = benchmark_endurance(\"test_blocks_are_created_for_more_than_15_minutes\")\n\n .target(Duration::from_secs(900))\n", "file_path": "testing/quibitous-integration-tests/src/non_functional/transaction.rs", "rank": 72, "score": 122251.93381507306 }, { "content": "#[test]\n\npub fn test_blocks_are_being_created_for_7_hours() {\n\n let qcli: JCli = Default::default();\n\n let duration_48_hours = Duration::from_secs(25_200);\n\n\n\n let mut receiver = silica::Wallet::default();\n\n let mut sender = silica::Wallet::default();\n\n let (quibitous, _) = startup::start_stake_pool(\n\n &[sender.clone()],\n\n &[],\n\n ConfigurationBuilder::new()\n\n .with_slots_per_epoch(20)\n\n .with_consensus_genesis_optimum_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_slot_duration(3)\n\n .with_kes_update_speed(KesUpdateSpeed::new(43200).unwrap())\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1_000_000usize.into(),\n\n log_max_entries: 1_000_000usize.into(),\n\n persistent_log: None,\n\n }),\n\n )\n", "file_path": "testing/quibitous-integration-tests/src/non_functional/soak.rs", "rank": 73, "score": 122251.93381507306 }, { "content": "pub fn persist_dir_on_panic<S1: AsRef<str>, S2: AsRef<str>>(\n\n temp_dir: Option<TestingDirectory>,\n\n additional_contents: Vec<(S1, S2)>,\n\n) {\n\n if panicking() {\n\n let logs_dir = match tempfile::Builder::new().prefix(\"quibitous_\").tempdir() {\n\n Ok(dir) => dir.into_path(),\n\n Err(e) => {\n\n eprintln!(\"Could not create logs dir: {}\", e);\n\n return;\n\n }\n\n };\n\n\n\n println!(\n\n \"persisting node temp_dir after panic: {}\",\n\n logs_dir.display()\n\n );\n\n\n\n if let Some(dir) = temp_dir {\n\n let options = CopyOptions {\n", "file_path": "testing/quibitous-automation/src/testing/panic.rs", "rank": 74, "score": 121145.81197125277 }, { "content": "fn initial(block0: &Block) -> Result<&ConfigParams, Block0Malformed> {\n\n for fragment in block0.fragments() {\n\n if let Fragment::Initial(init) = fragment {\n\n return Ok(init);\n\n }\n\n }\n\n Err(Block0Malformed::NoInitialSettings)\n\n}\n", "file_path": "modules/blockchain/src/block0.rs", "rank": 75, "score": 121108.85372060335 }, { "content": "#[test]\n\npub fn passive_node_last_block_info() {\n\n let mut network_controller = build_network!()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(LEADER))\n\n .with_node(Node::new(PASSIVE).with_trusted_peer(LEADER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(LEADER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(\"alice\")\n\n .with(1_000_000)\n\n .delegated_to(LEADER)\n\n .build(),\n\n )\n\n .wallet_template(WalletTemplateBuilder::new(\"bob\").with(1_000_000).build())\n\n .build()\n\n .unwrap();\n\n\n\n let leader = network_controller\n\n .spawn(SpawnParams::new(LEADER).in_memory())\n", "file_path": "testing/quibitous-integration-tests/src/networking/p2p/stats.rs", "rank": 76, "score": 120389.5540176767 }, { "content": "#[test]\n\npub fn get_blocks_incorrect_hash() {\n\n let setup = setup::client::default();\n\n let fake_hash: Hash = TestGen::hash();\n\n assert_eq!(\n\n MockClientError::InvalidRequest(format!(\n\n \"not found (block {} is not known to this node)\",\n\n fake_hash\n\n )),\n\n setup.client.headers(&[fake_hash]).err().unwrap(),\n\n \"wrong error\"\n\n );\n\n}\n\n\n\n// L1013 PullBlocksToTip correct hash\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 77, "score": 120389.5540176767 }, { "content": "#[test]\n\npub fn get_blocks_correct_hash() {\n\n let setup = setup::client::default();\n\n\n\n let tip = setup.client.tip();\n\n assert!(setup.client.get_blocks(&[tip.hash()]).is_ok());\n\n}\n\n\n\n// L1012 GetBlocks incorrect hash\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 78, "score": 120389.5540176767 }, { "content": "#[test]\n\npub fn leader_node_last_block_info() {\n\n let mut network_controller = build_network!()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(LEADER))\n\n .with_node(Node::new(LEADER_CLIENT).with_trusted_peer(LEADER)),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(\"alice\")\n\n .with(1_000_000)\n\n .delegated_to(LEADER)\n\n .build(),\n\n )\n\n .wallet_template(WalletTemplateBuilder::new(\"bob\").with(1_000_000).build())\n\n .blockchain_config(Blockchain::default().with_leader(LEADER))\n\n .build()\n\n .unwrap();\n\n\n\n let leader = network_controller\n\n .spawn(SpawnParams::new(LEADER).in_memory())\n", "file_path": "testing/quibitous-integration-tests/src/networking/p2p/stats.rs", "rank": 79, "score": 120389.5540176767 }, { "content": "pub fn send_transaction_and_ensure_block_was_produced(\n\n transation_messages: &[String],\n\n quibitous: &QuibitousProcess,\n\n) -> Result<(), NodeStuckError> {\n\n let qcli: JCli = Default::default();\n\n let block_tip_before_transaction = qcli.rest().v0().tip(&quibitous.rest_uri());\n\n let block_counter_before_transaction = quibitous.logger.get_created_blocks_counter();\n\n\n\n qcli.fragment_sender(quibitous)\n\n .send_many(transation_messages)\n\n .wait_until_all_processed(&Default::default())\n\n .map_err(NodeStuckError::InternalQcliError)?;\n\n\n\n let block_tip_after_transaction = qcli.rest().v0().tip(quibitous.rest_uri());\n\n let block_counter_after_transaction = quibitous.logger.get_created_blocks_counter();\n\n\n\n if block_tip_before_transaction == block_tip_after_transaction {\n\n return Err(NodeStuckError::TipIsNotMoving {\n\n tip_hash: block_tip_after_transaction,\n\n logs: quibitous.logger.get_log_content(),\n", "file_path": "testing/quibitous-integration-tests/src/non_functional/mod.rs", "rank": 80, "score": 120389.5540176767 }, { "content": "#[test]\n\npub fn upload_block_incompatible_protocol() {\n\n let setup = setup::client::default();\n\n let tip_header = setup.client.tip();\n\n let stake_pool = StakePoolBuilder::new().build();\n\n\n\n let time_era = TimeEra::new(\n\n 0u64.into(),\n\n Epoch(0u32),\n\n setup\n\n .config\n\n .block0_configuration()\n\n .blockchain_configuration\n\n .slots_per_epoch\n\n .into(),\n\n );\n\n\n\n let block = GenesisOptimumBlockBuilder::new()\n\n .with_parent(&tip_header)\n\n .build(&stake_pool, &time_era);\n\n\n\n assert_eq!(\n\n MockClientError::InvalidRequest(\n\n \"invalid request data (the block header verification failed)\".into()\n\n ),\n\n setup.client.upload_blocks(block).err().unwrap()\n\n );\n\n}\n\n\n\n// L1020 Push headers incorrect header\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 81, "score": 120389.5540176767 }, { "content": "pub fn deserialize_signature<'de, D, T, A>(deserializer: D) -> Result<Signature<T, A>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n A: VerificationAlgorithm,\n\n{\n\n let signature_visitor = SignatureVisitor::new();\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_str(signature_visitor)\n\n } else {\n\n deserializer.deserialize_bytes(signature_visitor)\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 82, "score": 120310.62809292071 }, { "content": "type GetNextBlockScheduler = FireForgetScheduler<HeaderHash, NodeId, ()>;\n\n\n\nconst TIP_UPDATE_QUEUE_SIZE: usize = 10;\n\n\n\nconst DEFAULT_TIMEOUT_PROCESS_LEADERSHIP: u64 = 5;\n\nconst DEFAULT_TIMEOUT_PROCESS_ANNOUNCEMENT: u64 = 5;\n\nconst DEFAULT_TIMEOUT_PROCESS_BLOCKS: u64 = 60;\n\nconst DEFAULT_TIMEOUT_PROCESS_HEADERS: u64 = 60;\n\n\n\nconst PULL_HEADERS_SCHEDULER_CONFIG: FireForgetSchedulerConfig = FireForgetSchedulerConfig {\n\n max_running: 16,\n\n max_running_same_task: 2,\n\n command_channel_size: 1024,\n\n timeout: Duration::from_millis(500),\n\n};\n\n\n\nconst GET_NEXT_BLOCK_SCHEDULER_CONFIG: FireForgetSchedulerConfig = FireForgetSchedulerConfig {\n\n max_running: 16,\n\n max_running_same_task: 2,\n\n command_channel_size: 1024,\n", "file_path": "quibitous/src/blockchain/process.rs", "rank": 83, "score": 119346.90772294276 }, { "content": "pub fn wait_for_date(target_block_date: BlockDate, mut rest: QuibitousRest) {\n\n let settings = rest.settings().unwrap();\n\n while is_it_due(get_current_date(&mut rest), target_block_date) {\n\n std::thread::sleep(std::time::Duration::from_secs(settings.slot_duration));\n\n }\n\n}\n\n\n", "file_path": "testing/quibitous-automation/src/testing/time.rs", "rank": 84, "score": 119188.25074030922 }, { "content": "#[derive(Clone)]\n\nstruct Tip(Arc<RwLock<HeaderHash>>);\n\n\n\n#[derive(Clone)]\n\npub struct ExplorerDb {\n\n /// Structure that keeps all the known states to allow easy branch management\n\n /// each new block is indexed by getting its previous `State` from the multiverse\n\n /// and inserted a new updated one.\n\n multiverse: Multiverse,\n\n /// This keeps track of the longest chain seen until now. All the queries are\n\n /// performed using the state of this branch, the HeaderHash is used as key for the\n\n /// multiverse, and the ChainLength is used in the updating process.\n\n longest_chain_tip: Tip,\n\n pub blockchain_config: BlockchainConfig,\n\n stable_store: StableIndex,\n\n tip_broadcast: tokio::sync::broadcast::Sender<(HeaderHash, multiverse::Ref)>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct StableIndex {\n\n confirmed_block_chain_length: Arc<AtomicU32>,\n", "file_path": "explorer/src/db/mod.rs", "rank": 85, "score": 119021.0448670129 }, { "content": "fn parse_block_hash(hex: &str) -> Result<Hash, Error> {\n\n Blake2b256::from_str(hex)\n\n .map_err(Into::into)\n\n .map(Into::into)\n\n}\n\n\n", "file_path": "quibitous/src/rest/v0/logic.rs", "rank": 86, "score": 119012.27011943782 }, { "content": "pub fn serialize_public<S, A>(key: &PublicKey<A>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n A: AsymmetricPublicKey,\n\n{\n\n if serializer.is_human_readable() {\n\n key.to_bech32_str().serialize(serializer)\n\n } else {\n\n key.as_ref().serialize(serializer)\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 87, "score": 118794.87720415111 }, { "content": "pub fn serialize_secret<S, A>(key: &SecretKey<A>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n A: AsymmetricKey,\n\n SecretKey<A>: Bech32,\n\n{\n\n if serializer.is_human_readable() {\n\n key.to_bech32_str().serialize(serializer)\n\n } else {\n\n panic!(\"binary encoding for serialization of the secret key does not exist in chain-crypto\")\n\n }\n\n}\n\n\n", "file_path": "quibitous-lib/src/crypto/serde.rs", "rank": 88, "score": 118794.87720415111 }, { "content": "#[test]\n\npub fn topics_of_interest_influences_node_sync_ability() {\n\n const FAST_CLIENT: &str = \"FAST_CLIENT\";\n\n const SLOW_CLIENT: &str = \"SLOW_CLIENT\";\n\n\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(FAST_CLIENT).with_trusted_peer(SERVER))\n\n .with_node(Node::new(SLOW_CLIENT).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(ALICE)\n\n .with(1_000_000)\n\n .delegated_to(SERVER)\n\n .build(),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(BOB)\n", "file_path": "testing/quibitous-integration-tests/src/networking/p2p/connections.rs", "rank": 89, "score": 118679.99646240375 }, { "content": "#[test]\n\npub fn test_correct_hash_is_returned_for_correct_block() {\n\n let qcli: JCli = Default::default();\n\n let content = qcli.genesis().init();\n\n let temp_dir = TempDir::new().unwrap();\n\n let yaml_file = temp_dir.child(\"init_file.yaml\");\n\n yaml_file.write_str(&content).unwrap();\n\n let block_file = temp_dir.child(\"block-0.bin\");\n\n\n\n qcli.genesis().encode(yaml_file.path(), &block_file);\n\n qcli.genesis().hash(block_file.path());\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/qcli/genesis/hash.rs", "rank": 90, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn upload_block_nonexisting_stake_pool() {\n\n let setup = setup::client::bootstrap(\n\n ConfigurationBuilder::new()\n\n .with_block0_consensus(ConsensusVersion::GenesisOptimum)\n\n .to_owned(),\n\n );\n\n let tip_header = setup.client.tip();\n\n let stake_pool = StakePoolBuilder::new().build();\n\n\n\n let time_era = TimeEra::new(\n\n 0u64.into(),\n\n Epoch(0u32),\n\n setup\n\n .config\n\n .block0_configuration()\n\n .blockchain_configuration\n\n .slots_per_epoch\n\n .into(),\n\n );\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 91, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn pull_blocks_hashes_wrong_order() {\n\n let setup = setup::client::default();\n\n\n\n setup\n\n .client\n\n .wait_for_chain_length(10.into(), CHAIN_GROWTH_TIMEOUT);\n\n\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n let start = 2;\n\n let end = 8;\n\n let expected_hashes = block_hashes_from_logs[start..end].to_vec();\n\n\n\n let result = setup.client.pull_blocks(\n\n &[expected_hashes.last().copied().unwrap()],\n\n expected_hashes[0],\n\n );\n\n\n\n assert!(result.is_err());\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 92, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn test_genesis_block_is_built_from_correct_yaml() {\n\n let temp_dir = TempDir::new().unwrap();\n\n let config = Block0ConfigurationBuilder::new().build();\n\n let config_file = temp_dir.child(\"genesis.yaml\");\n\n let content = serde_yaml::to_string(&config).unwrap();\n\n config_file.write_str(&content).unwrap();\n\n let output_block_file = temp_dir.child(\"block-0.bin\");\n\n let qcli: JCli = Default::default();\n\n qcli.genesis()\n\n .encode(config_file.path(), &output_block_file);\n\n\n\n assert!(output_block_file.path().exists());\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/qcli/genesis/encode.rs", "rank": 93, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn test_non_empty_hash_is_returned_for_block0() {\n\n let qcli: JCli = Default::default();\n\n let quibitous = Starter::new().start().unwrap();\n\n let rest_uri = quibitous.rest_uri();\n\n let block_id = qcli.rest().v0().tip(&rest_uri);\n\n qcli.rest().v0().block().get(block_id, rest_uri);\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/qcli/rest/block.rs", "rank": 94, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn test_genesis_block_is_built_from_init_yaml() {\n\n let qcli: JCli = Default::default();\n\n\n\n let content = qcli.genesis().init();\n\n let temp_dir = TempDir::new().unwrap();\n\n let yaml_file = temp_dir.child(\"init_file.yaml\");\n\n yaml_file.write_str(&content).unwrap();\n\n let block_file = temp_dir.child(\"block-0.bin\");\n\n qcli.genesis().encode(yaml_file.path(), &block_file);\n\n}\n", "file_path": "testing/quibitous-integration-tests/src/qcli/genesis/init.rs", "rank": 95, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn test_increased_block_content_max_size() {\n\n let receivers: Vec<Wallet> = std::iter::from_fn(|| Some(silica::Wallet::default()))\n\n .take(98)\n\n .collect();\n\n let mut stake_pool_owner = silica::Wallet::default();\n\n\n\n let stake_pool_owner_stake = 1;\n\n\n\n let (quibitous, _stake_pools) = startup::start_stake_pool(\n\n &[stake_pool_owner.clone()],\n\n &[],\n\n ConfigurationBuilder::new()\n\n .with_consensus_genesis_optimum_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_block_content_max_size(8192.into()),\n\n )\n\n .unwrap();\n\n\n\n let settings = quibitous.rest().settings().unwrap();\n\n\n\n let transaction_sender = FragmentSender::new(\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/genesis/fragments.rs", "rank": 96, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn pull_blocks_correct_hashes_partial() {\n\n let setup = setup::client::default();\n\n setup\n\n .client\n\n .wait_for_chain_length(10.into(), CHAIN_GROWTH_TIMEOUT);\n\n\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n let start = 2;\n\n let end = 8;\n\n let expected_hashes = block_hashes_from_logs[start..end].to_vec();\n\n\n\n let blocks = setup\n\n .client\n\n .pull_blocks(\n\n &[expected_hashes[0]],\n\n expected_hashes.last().copied().unwrap(),\n\n )\n\n .unwrap();\n\n\n\n let blocks_hashes: Vec<Hash> = blocks.iter().map(|x| x.header().hash()).collect();\n\n\n\n assert_eq!(&expected_hashes[1..], &blocks_hashes);\n\n}\n\n\n\n// L1023 PullBlocks to and from in wrong order\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/grpc/client_tests.rs", "rank": 97, "score": 118605.34832378927 }, { "content": "#[test]\n\npub fn test_block_content_max_size_below_transaction_size() {\n\n let receivers: Vec<Wallet> = std::iter::from_fn(|| Some(silica::Wallet::default()))\n\n .take(98)\n\n .collect();\n\n let mut stake_pool_owner = silica::Wallet::default();\n\n\n\n let stake_pool_owner_stake = 1;\n\n\n\n let (quibitous, _stake_pools) = startup::start_stake_pool(\n\n &[stake_pool_owner.clone()],\n\n &[],\n\n ConfigurationBuilder::new()\n\n .with_consensus_genesis_optimum_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_block_content_max_size(4092.into()),\n\n )\n\n .unwrap();\n\n\n\n let fragment_sender = FragmentSender::from_with_setup(\n\n quibitous.block0_configuration(),\n\n FragmentSenderSetup::should_stop_at_error(),\n", "file_path": "testing/quibitous-integration-tests/src/quibitous/genesis/fragments.rs", "rank": 98, "score": 116894.49583122502 }, { "content": "pub fn test_correct_error_is_returned_for_incorrect_host_syntax() {\n\n let qcli: JCli = Default::default();\n\n let incorrect_host = \"not_a_correct_syntax\";\n\n\n\n qcli.rest().v0().tip_expect_fail(\n\n incorrect_host,\n\n \"Invalid value for '--host <host>': relative URL without a base\",\n\n );\n\n}\n\n\n", "file_path": "testing/quibitous-integration-tests/src/qcli/rest/host.rs", "rank": 99, "score": 116879.94886666126 } ]
Rust
src/main.rs
azerupi/cube-parse
ee5af4aa48755449bab7d7afe01166e029791ffc
use std::{collections::HashMap, env, path::Path}; use alphanumeric_sort::compare_str; use clap::{App, Arg}; use lazy_static::lazy_static; use regex::Regex; mod family; mod internal_peripheral; mod mcu; mod utils; #[derive(Debug, PartialEq)] enum GenerateTarget { Features, PinMappings, EepromSizes, } lazy_static! { static ref GPIO_VERSION: Regex = Regex::new("^([^_]*)_gpio_v1_0$").unwrap(); } fn gpio_version_to_feature(version: &str) -> Result<String, String> { if let Some(captures) = GPIO_VERSION.captures(version) { Ok(format!("io-{}", captures.get(1).unwrap().as_str())) } else { Err(format!("Could not parse version {:?}", version)) } } fn eeprom_size_to_feature(size: u32) -> String { format!("eeprom-{}", size) } fn flash_size_to_feature(size: u32) -> String { format!("flash-{}", size) } fn ram_size_to_feature(size: u32) -> String { format!("ram-{}", size) } fn main() -> Result<(), String> { let args = App::new("cube-parse") .version(env!("CARGO_PKG_VERSION")) .about("Extract AF modes on MCU pins from the database files provided with STM32CubeMX") .author(&*env!("CARGO_PKG_AUTHORS").replace(":", ", ")) .arg( Arg::with_name("db_dir") .short("d") .help("Path to the CubeMX MCU database directory") .takes_value(true) .required(true), ) .arg( Arg::with_name("generate") .help("What to generate") .takes_value(true) .possible_values(&["features", "pin_mappings", "eeprom_sizes"]) .required(true), ) .arg( Arg::with_name("mcu_family") .help("The MCU family to extract, e.g. \"STM32L0\"") .takes_value(true) .required(true), ) .get_matches(); let db_dir = Path::new(args.value_of("db_dir").unwrap()); let mcu_family = args.value_of("mcu_family").unwrap(); let generate = match args.value_of("generate").unwrap() { "features" => GenerateTarget::Features, "pin_mappings" => GenerateTarget::PinMappings, "eeprom_sizes" => GenerateTarget::EepromSizes, _ => unreachable!(), }; let families = family::Families::load(&db_dir) .map_err(|e| format!("Could not load families XML: {}", e))?; let family = (&families) .into_iter() .find(|v| v.name == mcu_family) .ok_or_else(|| format!("Could not find family {}", mcu_family))?; let mut mcu_map: HashMap<String, (&family::Mcu, mcu::Mcu)> = HashMap::new(); let mut mcu_gpio_map: HashMap<String, Vec<String>> = HashMap::new(); let mut mcu_package_map: HashMap<String, String> = HashMap::new(); let mut mcu_eeprom_size_map: HashMap<u32, Vec<String>> = HashMap::new(); let mut mcu_flash_size_map: HashMap<u32, Vec<String>> = HashMap::new(); let mut mcu_ram_size_map: HashMap<u32, Vec<String>> = HashMap::new(); for sf in family { for mcu in sf { let mcu_dat = mcu::Mcu::load(&db_dir, &mcu.name) .map_err(|e| format!("Could not load MCU data for mcu {}: {}", &mcu.name, e))?; let gpio_version = mcu_dat.get_ip("GPIO").unwrap().get_version().to_string(); mcu_gpio_map .entry(gpio_version) .or_insert(vec![]) .push(mcu.ref_name.clone()); if mcu_family == "STM32L0" { mcu_package_map.insert(mcu.ref_name.clone(), mcu.package_name.clone()); } if let Some(size) = mcu_dat.get_eeprom_size() { mcu_eeprom_size_map .entry(size) .or_insert(vec![]) .push(mcu.ref_name.clone()); } if let Some(flash_size) = mcu.flash_size() { mcu_flash_size_map .entry(flash_size) .or_insert(vec![]) .push(mcu.ref_name.clone()); } if let Some(ram_size) = mcu.ram_size() { mcu_ram_size_map .entry(ram_size) .or_insert(vec![]) .push(mcu.ref_name.clone()); } mcu_map.insert(mcu.ref_name.clone(), (mcu, mcu_dat)); } } match generate { GenerateTarget::Features => generate_features( &mcu_map, &mcu_gpio_map, &mcu_package_map, &mcu_eeprom_size_map, &mcu_flash_size_map, &mcu_ram_size_map, &mcu_family, )?, GenerateTarget::PinMappings => generate_pin_mappings(&mcu_gpio_map, &db_dir)?, GenerateTarget::EepromSizes => generate_eeprom_sizes(&mcu_eeprom_size_map)?, }; Ok(()) } lazy_static! { static ref FEATURE_DEPENDENCIES: HashMap<&'static str, HashMap<&'static str, &'static str>> = { let mut m = HashMap::new(); let mut l0 = HashMap::new(); l0.insert("^STM32L0.1", "stm32l0x1"); l0.insert("^STM32L0.2", "stm32l0x2"); l0.insert("^STM32L0.3", "stm32l0x3"); m.insert("STM32L0", l0); m }; } fn generate_features( mcu_map: &HashMap<String, (&family::Mcu, mcu::Mcu)>, mcu_gpio_map: &HashMap<String, Vec<String>>, mcu_package_map: &HashMap<String, String>, mcu_eeprom_size_map: &HashMap<u32, Vec<String>>, mcu_flash_size_map: &HashMap<u32, Vec<String>>, mcu_ram_size_map: &HashMap<u32, Vec<String>>, mcu_family: &str, ) -> Result<(), String> { let mut io_features = mcu_gpio_map .keys() .map(|gpio| gpio_version_to_feature(gpio)) .collect::<Result<Vec<String>, String>>()?; io_features.sort(); println!("# Features based on the GPIO peripheral version"); println!("# This determines the pin function mapping of the MCU"); for feature in io_features { println!("{} = []", feature); } println!(); let mut eeprom_sizes = mcu_eeprom_size_map.keys().collect::<Vec<_>>(); eeprom_sizes.sort(); println!("# Features based on EEPROM size (in bytes)"); for size in eeprom_sizes { println!("{} = []", eeprom_size_to_feature(*size)); } println!(); let mut flash_sizes = mcu_flash_size_map.keys().collect::<Vec<_>>(); flash_sizes.sort(); println!("# Features based on Flash size (in kbytes)"); for size in flash_sizes { println!("{} = []", flash_size_to_feature(*size)); } println!(); let mut ram_sizes = mcu_ram_size_map.keys().collect::<Vec<_>>(); ram_sizes.sort(); println!("# Features based on RAM size (in kbytes)"); for size in ram_sizes { println!("{} = []", ram_size_to_feature(*size)); } println!(); if !mcu_package_map.is_empty() { println!("# Physical packages"); let mut packages = mcu_package_map .values() .map(|v| v.to_lowercase()) .collect::<Vec<_>>(); packages.sort_by(|a, b| compare_str(a, b)); packages.dedup(); for pkg in packages { println!("{} = []", pkg); } println!(); } let mut mcu_aliases = vec![]; for (gpio, mcu_list) in mcu_gpio_map { let gpio_version_feature = gpio_version_to_feature(gpio).unwrap(); for mcu in mcu_list { let mut dependencies = vec![]; if let Some(family) = FEATURE_DEPENDENCIES.get(mcu_family) { for (pattern, feature) in family { if Regex::new(pattern).unwrap().is_match(&mcu) { dependencies.push(feature.to_string()); break; } } } if let Some(package) = mcu_package_map.get(mcu) { dependencies.push(package.to_lowercase()); } dependencies.push(gpio_version_feature.clone()); let (mcu_info, mcu_dat) = mcu_map.get(mcu).unwrap(); if let Some(size) = mcu_dat.get_eeprom_size() { dependencies.push(eeprom_size_to_feature(size)); } if let Some(flash_size) = mcu_info.flash_size() { dependencies.push(flash_size_to_feature(flash_size)); } if let Some(ram_size) = mcu_info.ram_size() { dependencies.push(ram_size_to_feature(ram_size)); } mcu_aliases.push(format!( "mcu-{} = [{}]", mcu, &dependencies.iter().map(|val| format!("\"{}\"", val)).fold( String::new(), |mut acc, x| { if !acc.is_empty() { acc.push_str(", "); } acc.push_str(&x); acc } ) )); } } mcu_aliases.sort(); println!("# MCU aliases"); println!("#"); println!("# Note: These are just aliases, they should not be used to directly feature gate"); println!( "# functionality in the HAL! However, user code should usually depend on a MCU alias." ); for alias in mcu_aliases { println!("{}", alias); } Ok(()) } fn generate_pin_mappings( mcu_gpio_map: &HashMap<String, Vec<String>>, db_dir: &Path, ) -> Result<(), String> { let mut gpio_versions = mcu_gpio_map.keys().collect::<Vec<_>>(); gpio_versions.sort(); for gpio in gpio_versions { let gpio_version_feature = gpio_version_to_feature(&gpio)?; println!("#[cfg(feature = \"{}\")]", gpio_version_feature); let gpio_data = internal_peripheral::IpGPIO::load(db_dir, &gpio) .map_err(|e| format!("Could not load IP GPIO file: {}", e))?; render_pin_modes(&gpio_data); println!("\n"); } Ok(()) } fn generate_eeprom_sizes(mcu_eeprom_size_map: &HashMap<u32, Vec<String>>) -> Result<(), String> { println!("// EEPROM sizes in bytes, generated with cube-parse"); for size in mcu_eeprom_size_map.keys() { println!("#[cfg(feature = \"{}\")]", eeprom_size_to_feature(*size)); println!("const EEPROM_SIZE_BYTES: u32 = {};", size); } Ok(()) } fn render_pin_modes(ip: &internal_peripheral::IpGPIO) { let mut pin_map: HashMap<String, Vec<String>> = HashMap::new(); for p in &ip.gpio_pin { let name = p.get_name(); if let Some(n) = name { pin_map.insert(n, p.get_af_modes()); } } let mut pin_map = pin_map .into_iter() .map(|(k, mut v)| { #[allow(clippy::redundant_closure)] v.sort_by(|a, b| compare_str(a, b)); (k, v) }) .collect::<Vec<_>>(); pin_map.sort_by(|a, b| compare_str(&a.0, &b.0)); println!("pins! {{"); for (n, af) in pin_map { if af.is_empty() { continue; } else if af.len() == 1 { println!(" {} => {{{}}},", n, af[0]); } else { println!(" {} => {{", n); for a in af { println!(" {},", a); } println!(" }},"); } } println!("}}"); } #[cfg(test)] mod tests { use super::*; #[test] fn test_gpio_version_to_feature() { assert_eq!( gpio_version_to_feature("STM32L152x8_gpio_v1_0").unwrap(), "io-STM32L152x8" ); assert_eq!( gpio_version_to_feature("STM32F333_gpio_v1_0").unwrap(), "io-STM32F333" ); assert!(gpio_version_to_feature("STM32F333_gpio_v1_1").is_err()); assert!(gpio_version_to_feature("STM32F333_qqio_v1_0").is_err()); assert!(gpio_version_to_feature("STM32_STM32F333_gpio_v1_0").is_err()); } }
use std::{collections::HashMap, env, path::Path}; use alphanumeric_sort::compare_str; use clap::{App, Arg}; use lazy_static::lazy_static; use regex::Regex; mod family; mod internal_peripheral; mod mcu; mod utils; #[derive(Debug, PartialEq)] enum GenerateTarget { Features, PinMappings, EepromSizes, } lazy_static! { static ref GPIO_VERSION: Regex = Regex::new("^([^_]*)_gpio_v1_0$").unwrap(); } fn gpio_version_to_feature(version: &str) -> Result<String, String> { if let Some(captures) = GPIO_VERSION.captures(version) { Ok(format!("io-{}", captures.get(1).unwrap().as_str())) } else { Err(format!("Could not parse version {:?}", version)) } } fn eeprom_size_to_feature(size: u32) -> String { format!("eeprom-{}", size) } fn flash_size_to_feature(size: u32) -> String { format!("flash-{}", size) } fn ram_size_to_feature(size: u32) -> String { format!("ram-{}", size) } fn main() -> Result<(), String> { let args = App::new("cube-parse") .version(env!("CARGO_PKG_VERSION")) .about("Extract AF modes on MCU pins from the database files provided with STM32CubeMX") .author(&*env!("CARGO_PKG_AUTHORS").replace(":", ", ")) .arg( Arg::with_name("db_dir") .short("d") .help("Path to the CubeMX MCU database directory") .takes_value(true) .required(true), ) .arg( Arg::with_name("generate") .help("What to generate") .takes_value(true) .possible_values(&["features", "pin_mappings", "eeprom_sizes"]) .required(true), ) .arg( Arg::with_name("mcu_family") .help("The MCU family to extract, e.g. \"STM32L0\"") .takes_value(true) .required(true), ) .get_matches(); let db_dir = Path::new(args.value_of("db_dir").unwrap()); let mcu_family = args.value_of("mcu_family").unwrap(); let generate = match args.value_of("generate").unwrap() { "features" => GenerateTarget::Features, "pin_mappings" => Genera
} if let Some(ram_size) = mcu.ram_size() { mcu_ram_size_map .entry(ram_size) .or_insert(vec![]) .push(mcu.ref_name.clone()); } mcu_map.insert(mcu.ref_name.clone(), (mcu, mcu_dat)); } } match generate { GenerateTarget::Features => generate_features( &mcu_map, &mcu_gpio_map, &mcu_package_map, &mcu_eeprom_size_map, &mcu_flash_size_map, &mcu_ram_size_map, &mcu_family, )?, GenerateTarget::PinMappings => generate_pin_mappings(&mcu_gpio_map, &db_dir)?, GenerateTarget::EepromSizes => generate_eeprom_sizes(&mcu_eeprom_size_map)?, }; Ok(()) } lazy_static! { static ref FEATURE_DEPENDENCIES: HashMap<&'static str, HashMap<&'static str, &'static str>> = { let mut m = HashMap::new(); let mut l0 = HashMap::new(); l0.insert("^STM32L0.1", "stm32l0x1"); l0.insert("^STM32L0.2", "stm32l0x2"); l0.insert("^STM32L0.3", "stm32l0x3"); m.insert("STM32L0", l0); m }; } fn generate_features( mcu_map: &HashMap<String, (&family::Mcu, mcu::Mcu)>, mcu_gpio_map: &HashMap<String, Vec<String>>, mcu_package_map: &HashMap<String, String>, mcu_eeprom_size_map: &HashMap<u32, Vec<String>>, mcu_flash_size_map: &HashMap<u32, Vec<String>>, mcu_ram_size_map: &HashMap<u32, Vec<String>>, mcu_family: &str, ) -> Result<(), String> { let mut io_features = mcu_gpio_map .keys() .map(|gpio| gpio_version_to_feature(gpio)) .collect::<Result<Vec<String>, String>>()?; io_features.sort(); println!("# Features based on the GPIO peripheral version"); println!("# This determines the pin function mapping of the MCU"); for feature in io_features { println!("{} = []", feature); } println!(); let mut eeprom_sizes = mcu_eeprom_size_map.keys().collect::<Vec<_>>(); eeprom_sizes.sort(); println!("# Features based on EEPROM size (in bytes)"); for size in eeprom_sizes { println!("{} = []", eeprom_size_to_feature(*size)); } println!(); let mut flash_sizes = mcu_flash_size_map.keys().collect::<Vec<_>>(); flash_sizes.sort(); println!("# Features based on Flash size (in kbytes)"); for size in flash_sizes { println!("{} = []", flash_size_to_feature(*size)); } println!(); let mut ram_sizes = mcu_ram_size_map.keys().collect::<Vec<_>>(); ram_sizes.sort(); println!("# Features based on RAM size (in kbytes)"); for size in ram_sizes { println!("{} = []", ram_size_to_feature(*size)); } println!(); if !mcu_package_map.is_empty() { println!("# Physical packages"); let mut packages = mcu_package_map .values() .map(|v| v.to_lowercase()) .collect::<Vec<_>>(); packages.sort_by(|a, b| compare_str(a, b)); packages.dedup(); for pkg in packages { println!("{} = []", pkg); } println!(); } let mut mcu_aliases = vec![]; for (gpio, mcu_list) in mcu_gpio_map { let gpio_version_feature = gpio_version_to_feature(gpio).unwrap(); for mcu in mcu_list { let mut dependencies = vec![]; if let Some(family) = FEATURE_DEPENDENCIES.get(mcu_family) { for (pattern, feature) in family { if Regex::new(pattern).unwrap().is_match(&mcu) { dependencies.push(feature.to_string()); break; } } } if let Some(package) = mcu_package_map.get(mcu) { dependencies.push(package.to_lowercase()); } dependencies.push(gpio_version_feature.clone()); let (mcu_info, mcu_dat) = mcu_map.get(mcu).unwrap(); if let Some(size) = mcu_dat.get_eeprom_size() { dependencies.push(eeprom_size_to_feature(size)); } if let Some(flash_size) = mcu_info.flash_size() { dependencies.push(flash_size_to_feature(flash_size)); } if let Some(ram_size) = mcu_info.ram_size() { dependencies.push(ram_size_to_feature(ram_size)); } mcu_aliases.push(format!( "mcu-{} = [{}]", mcu, &dependencies.iter().map(|val| format!("\"{}\"", val)).fold( String::new(), |mut acc, x| { if !acc.is_empty() { acc.push_str(", "); } acc.push_str(&x); acc } ) )); } } mcu_aliases.sort(); println!("# MCU aliases"); println!("#"); println!("# Note: These are just aliases, they should not be used to directly feature gate"); println!( "# functionality in the HAL! However, user code should usually depend on a MCU alias." ); for alias in mcu_aliases { println!("{}", alias); } Ok(()) } fn generate_pin_mappings( mcu_gpio_map: &HashMap<String, Vec<String>>, db_dir: &Path, ) -> Result<(), String> { let mut gpio_versions = mcu_gpio_map.keys().collect::<Vec<_>>(); gpio_versions.sort(); for gpio in gpio_versions { let gpio_version_feature = gpio_version_to_feature(&gpio)?; println!("#[cfg(feature = \"{}\")]", gpio_version_feature); let gpio_data = internal_peripheral::IpGPIO::load(db_dir, &gpio) .map_err(|e| format!("Could not load IP GPIO file: {}", e))?; render_pin_modes(&gpio_data); println!("\n"); } Ok(()) } fn generate_eeprom_sizes(mcu_eeprom_size_map: &HashMap<u32, Vec<String>>) -> Result<(), String> { println!("// EEPROM sizes in bytes, generated with cube-parse"); for size in mcu_eeprom_size_map.keys() { println!("#[cfg(feature = \"{}\")]", eeprom_size_to_feature(*size)); println!("const EEPROM_SIZE_BYTES: u32 = {};", size); } Ok(()) } fn render_pin_modes(ip: &internal_peripheral::IpGPIO) { let mut pin_map: HashMap<String, Vec<String>> = HashMap::new(); for p in &ip.gpio_pin { let name = p.get_name(); if let Some(n) = name { pin_map.insert(n, p.get_af_modes()); } } let mut pin_map = pin_map .into_iter() .map(|(k, mut v)| { #[allow(clippy::redundant_closure)] v.sort_by(|a, b| compare_str(a, b)); (k, v) }) .collect::<Vec<_>>(); pin_map.sort_by(|a, b| compare_str(&a.0, &b.0)); println!("pins! {{"); for (n, af) in pin_map { if af.is_empty() { continue; } else if af.len() == 1 { println!(" {} => {{{}}},", n, af[0]); } else { println!(" {} => {{", n); for a in af { println!(" {},", a); } println!(" }},"); } } println!("}}"); } #[cfg(test)] mod tests { use super::*; #[test] fn test_gpio_version_to_feature() { assert_eq!( gpio_version_to_feature("STM32L152x8_gpio_v1_0").unwrap(), "io-STM32L152x8" ); assert_eq!( gpio_version_to_feature("STM32F333_gpio_v1_0").unwrap(), "io-STM32F333" ); assert!(gpio_version_to_feature("STM32F333_gpio_v1_1").is_err()); assert!(gpio_version_to_feature("STM32F333_qqio_v1_0").is_err()); assert!(gpio_version_to_feature("STM32_STM32F333_gpio_v1_0").is_err()); } }
teTarget::PinMappings, "eeprom_sizes" => GenerateTarget::EepromSizes, _ => unreachable!(), }; let families = family::Families::load(&db_dir) .map_err(|e| format!("Could not load families XML: {}", e))?; let family = (&families) .into_iter() .find(|v| v.name == mcu_family) .ok_or_else(|| format!("Could not find family {}", mcu_family))?; let mut mcu_map: HashMap<String, (&family::Mcu, mcu::Mcu)> = HashMap::new(); let mut mcu_gpio_map: HashMap<String, Vec<String>> = HashMap::new(); let mut mcu_package_map: HashMap<String, String> = HashMap::new(); let mut mcu_eeprom_size_map: HashMap<u32, Vec<String>> = HashMap::new(); let mut mcu_flash_size_map: HashMap<u32, Vec<String>> = HashMap::new(); let mut mcu_ram_size_map: HashMap<u32, Vec<String>> = HashMap::new(); for sf in family { for mcu in sf { let mcu_dat = mcu::Mcu::load(&db_dir, &mcu.name) .map_err(|e| format!("Could not load MCU data for mcu {}: {}", &mcu.name, e))?; let gpio_version = mcu_dat.get_ip("GPIO").unwrap().get_version().to_string(); mcu_gpio_map .entry(gpio_version) .or_insert(vec![]) .push(mcu.ref_name.clone()); if mcu_family == "STM32L0" { mcu_package_map.insert(mcu.ref_name.clone(), mcu.package_name.clone()); } if let Some(size) = mcu_dat.get_eeprom_size() { mcu_eeprom_size_map .entry(size) .or_insert(vec![]) .push(mcu.ref_name.clone()); } if let Some(flash_size) = mcu.flash_size() { mcu_flash_size_map .entry(flash_size) .or_insert(vec![]) .push(mcu.ref_name.clone());
random
[ { "content": "pub fn load_file<'a, P: AsRef<Path>, Q: AsRef<Path>, R: Deserialize<'a>>(\n\n db_dir: P,\n\n file_path: Q,\n\n) -> Result<R, Box<dyn Error>> {\n\n let db_dir = db_dir.as_ref();\n\n let mut fin = BufReader::new(File::open(&db_dir.join(file_path.as_ref()))?);\n\n\n\n Ok(serde_xml_rs::deserialize(&mut fin)?)\n\n}\n", "file_path": "src/utils.rs", "rank": 8, "score": 56449.948449911426 }, { "content": "use std::{error::Error, fs::File, io::BufReader, path::Path};\n\n\n\nuse serde::Deserialize;\n\n\n", "file_path": "src/utils.rs", "rank": 11, "score": 21039.490770387103 }, { "content": "use std::error::Error;\n\nuse std::path::Path;\n\n\n\nuse serde_derive::Deserialize;\n\n\n\nuse crate::utils::load_file;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Mcu {\n\n #[serde(rename = \"IP\", default)]\n\n ip: Vec<IP>,\n\n #[serde(rename = \"E2prom\")]\n\n eeprom_size_bytes: String,\n\n}\n\n\n\nimpl Mcu {\n\n pub fn load<P: AsRef<Path>>(db_dir: P, mcu_name: &str) -> Result<Self, Box<dyn Error>> {\n\n load_file(db_dir, format!(\"{}.xml\", mcu_name))\n\n }\n\n\n", "file_path": "src/mcu.rs", "rank": 12, "score": 20922.383714628784 }, { "content": " pub fn get_ip(&self, name: &str) -> Option<&IP> {\n\n self.ip.iter().find(|v| v.name == name)\n\n }\n\n\n\n /// Return the EEPROM size in bytes\n\n pub fn get_eeprom_size(&self) -> Option<u32> {\n\n self.eeprom_size_bytes.parse().ok()\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct IP {\n\n instance_name: String,\n\n name: String,\n\n version: String,\n\n}\n\n\n\nimpl IP {\n\n pub fn get_version(&self) -> &str {\n\n &self.version\n\n }\n\n}\n", "file_path": "src/mcu.rs", "rank": 13, "score": 20920.064908222997 }, { "content": "use std::error::Error;\n\nuse std::path::Path;\n\n\n\nuse serde_derive::Deserialize;\n\n\n\nuse crate::utils::load_file;\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Families {\n\n #[serde(rename = \"Family\")]\n\n families: Vec<Family>,\n\n}\n\n\n\n/// A MCU family (e.g. \"STM32F0\" or \"STM32L3\").\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Family {\n\n pub name: String,\n\n #[serde(rename = \"SubFamily\")]\n", "file_path": "src/family.rs", "rank": 14, "score": 20532.15338937752 }, { "content": "#[serde(rename_all = \"PascalCase\")]\n\npub struct Mcu {\n\n pub name: String,\n\n pub package_name: String,\n\n pub ref_name: String,\n\n #[serde(rename = \"Flash\")]\n\n pub flash_size: String,\n\n #[serde(rename = \"Ram\")]\n\n pub ram_size: String,\n\n}\n\n\n\nimpl Mcu {\n\n pub fn flash_size(&self) -> Option<u32> {\n\n self.flash_size.parse().ok()\n\n }\n\n\n\n pub fn ram_size(&self) -> Option<u32> {\n\n self.ram_size.parse().ok()\n\n }\n\n}\n", "file_path": "src/family.rs", "rank": 15, "score": 20531.987001778067 }, { "content": " sub_families: Vec<SubFamily>,\n\n}\n\n\n\n/// A MCU subfamily (e.g. \"STM32F0x0 Value Line\").\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct SubFamily {\n\n pub name: String,\n\n #[serde(rename = \"Mcu\")]\n\n pub mcus: Vec<Mcu>,\n\n}\n\n\n\n/// A MCU (e.g. STM32L071KBTx).\n\n///\n\n/// Note that multiple MCUs (with unique `ref_name`) share a common name. For\n\n/// example:\n\n///\n\n/// - `<Mcu Name=\"STM32L071K(B-Z)Tx\" PackageName=\"LQFP32\" RefName=\"STM32L071KBTx\" RPN=\"STM32L071KB\">`\n\n/// - `<Mcu Name=\"STM32L071K(B-Z)Tx\" PackageName=\"LQFP32\" RefName=\"STM32L071KZTx\" RPN=\"STM32L071KZ\">`\n\n///\n", "file_path": "src/family.rs", "rank": 16, "score": 20528.822593878693 }, { "content": "\n\nimpl Families {\n\n pub fn load<P: AsRef<Path>>(db_dir: P) -> Result<Self, Box<dyn Error>> {\n\n load_file(db_dir, \"families.xml\")\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a Families {\n\n type Item = &'a Family;\n\n type IntoIter = std::slice::Iter<'a, Family>;\n\n\n\n fn into_iter(self) -> <Self as IntoIterator>::IntoIter {\n\n self.families.iter()\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a Family {\n\n type Item = &'a SubFamily;\n\n type IntoIter = std::slice::Iter<'a, SubFamily>;\n\n\n", "file_path": "src/family.rs", "rank": 17, "score": 20528.689295711083 }, { "content": " fn into_iter(self) -> <Self as IntoIterator>::IntoIter {\n\n self.sub_families.iter()\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a SubFamily {\n\n type Item = &'a Mcu;\n\n type IntoIter = std::slice::Iter<'a, Mcu>;\n\n\n\n fn into_iter(self) -> <Self as IntoIterator>::IntoIter {\n\n self.mcus.iter()\n\n }\n\n}\n", "file_path": "src/family.rs", "rank": 18, "score": 20526.136094209854 }, { "content": "/// Both MCUs share the same name, but the ref name is different.\n\n///\n\n/// The meaning of the name, using the `STM32L071KBTx` as an example:\n\n///\n\n/// |Part |Meaning |\n\n/// |-----|--------|\n\n/// |STM32|Family |\n\n/// | L |Type |\n\n/// | 0 |Core |\n\n/// | 71 |Line |\n\n/// | K |Pincount|\n\n/// | B |Flash |\n\n/// | T |Package |\n\n/// | x |Temp |\n\n/// |-----|--------|\n\n///\n\n/// See https://ziutek.github.io/2018/05/07/stm32_naming_scheme.html for more details.\n\n///\n\n/// Note that sometimes there are exceptions from this naming rule.\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/family.rs", "rank": 19, "score": 20525.779814903995 }, { "content": "\n\nlazy_static! {\n\n static ref USART_RX: Regex = Regex::new(\"(LP)?US?ART._RX\").unwrap();\n\n static ref USART_TX: Regex = Regex::new(\"(LP)?US?ART._TX\").unwrap();\n\n static ref SPI_MOSI: Regex = Regex::new(\"SPI._MOSI\").unwrap();\n\n static ref SPI_MISO: Regex = Regex::new(\"SPI._MISO\").unwrap();\n\n static ref SPI_SCK: Regex = Regex::new(\"SPI._SCK\").unwrap();\n\n static ref I2C_SCL: Regex = Regex::new(\"I2C._SCL\").unwrap();\n\n static ref I2C_SDA: Regex = Regex::new(\"I2C._SDA\").unwrap();\n\n}\n\n\n\nimpl GPIOPin {\n\n pub fn get_name(&self) -> Option<String> {\n\n let gpio_pin = self\n\n .specific_parameter\n\n .iter()\n\n .find(|v| v.name == \"GPIO_Pin\");\n\n match gpio_pin {\n\n Some(v) => {\n\n let num = v.possible_value.val.split('_').collect::<Vec<_>>()[2];\n", "file_path": "src/internal_peripheral.rs", "rank": 36, "score": 11.702323036921413 }, { "content": "# Cube Parser\n\n\n\n[![Build Status][github-actions-badge]][github-actions]\n\n\n\nA program to extract hardware configuration information from the MCU database\n\nfiles shipped with STM32CubeMX.\n\n\n\n\n\n## Usage\n\n\n\n cargo run features STM32L0 -d /path/to/stm32cubemx/db/mcu/\n\n cargo run pin_mappings STM32L0 -d /path/to/stm32cubemx/db/mcu/\n\n\n\nUnder a default Windows install, the database path is `C:\\Program Files\n\n(x86)\\STMicroelectronics\\STM32Cube\\STM32CubeMX\\db\\mcu`, adjust as appropriate\n\nfor your local config. The MCU family name should match one of the MCU families\n\nas defined in `families.xml`. At the time of writing, the following families\n\nare available:\n\n\n\n* STM32F0\n\n* STM32F1\n\n* STM32F2\n\n* STM32F3\n\n* STM32F4\n\n* STM32F7\n\n* STM32G0\n\n* STM32G4\n\n* STM32H7\n\n* STM32L0\n\n* STM32L1\n\n* STM32L4\n\n* STM32L4+\n\n* STM32L5\n\n* STM32MP1\n\n* STM32WB\n\n\n\n\n\n## The STM32CubeMX Database\n\n\n\nThe STM32CubeMX database contains the following files that are relevant to us:\n\n\n\n### Families\n\n\n\nIn the root, there is a file called `families.xml`. It contains all MCUs\n\ngrouped by family (e.g. \"STM32F0\") and subfamily (e.g. \"STM32F0x0 Value Line\").\n\n\n\n```xml\n\n<Families xsi:noNamespaceSchemaLocation=\"families.xsd\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n\n <Family Name=\"STM32F0\">\n\n <SubFamily Name=\"STM32F0x0 Value Line\">\n\n <Mcu Name=\"STM32F030C6Tx\" PackageName=\"LQFP48\" RefName=\"STM32F030C6Tx\" RPN=\"STM32F030C6\">\n\n <Core>Arm Cortex-M0</Core>\n\n <Frequency>48</Frequency>\n\n <Ram>4</Ram>\n\n ...\n\n </Mcu>\n\n <Mcu Name=\"STM32F030C8Tx\" PackageName=\"LQFP48\" RefName=\"STM32F030C8Tx\" RPN=\"STM32F030C8\">\n\n ...\n\n```\n\n\n", "file_path": "README.md", "rank": 37, "score": 11.513922570500776 }, { "content": "#[serde(rename = \"GPIO_Pin\", rename_all = \"PascalCase\")]\n\npub struct GPIOPin {\n\n port_name: String,\n\n name: String,\n\n specific_parameter: Vec<SpecificParameter>,\n\n pin_signal: Option<Vec<PinSignal>>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename = \"IP\")]\n\npub struct IpGPIO {\n\n #[serde(rename = \"GPIO_Pin\")]\n\n pub(crate) gpio_pin: Vec<GPIOPin>,\n\n}\n\n\n\nimpl IpGPIO {\n\n pub fn load<P: AsRef<Path>>(db_dir: P, version: &str) -> Result<Self, Box<dyn Error>> {\n\n load_file(db_dir, format!(\"IP/GPIO-{}_Modes.xml\", version))\n\n }\n\n}\n", "file_path": "src/internal_peripheral.rs", "rank": 38, "score": 11.445490020880955 }, { "content": "use std::error::Error;\n\nuse std::path::Path;\n\n\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse serde_derive::Deserialize;\n\n\n\nuse crate::utils::load_file;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct PossibleValue {\n\n #[serde(rename = \"$value\")]\n\n pub(crate) val: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct SpecificParameter {\n\n name: String,\n\n possible_value: PossibleValue,\n", "file_path": "src/internal_peripheral.rs", "rank": 39, "score": 11.224633114431713 }, { "content": "### GPIO Internal Peripheral\n\n\n\nThe GPIO IP can be found in the `IP` directory. The relevant file for us is at\n\n`IP/GPIO-<version>_Modes.xml`. The version can be extracted from the `IP`\n\nelement shown in the previous section.\n\n\n\nIn the case of the `STM32L071KBTx`, the relevant GPIO IP version file is at\n\n`IP/GPIO-STM32L071_gpio_v1_0_Modes.xml`.\n\n\n\nThat file starts out with some `RefParameter` and `RefMode` elements. Relevant\n\nto us are mostly the `GPIO_Pin` elements. They look like this:\n\n\n\n```xml\n\n<GPIO_Pin PortName=\"PB\" Name=\"PB6\">\n\n <SpecificParameter Name=\"GPIO_Pin\">\n\n <PossibleValue>GPIO_PIN_6</PossibleValue>\n\n </SpecificParameter>\n\n <PinSignal Name=\"I2C1_SCL\">\n\n <SpecificParameter Name=\"GPIO_AF\">\n\n <PossibleValue>GPIO_AF1_I2C1</PossibleValue>\n\n </SpecificParameter>\n\n </PinSignal>\n\n <PinSignal Name=\"LPTIM1_ETR\">\n\n <SpecificParameter Name=\"GPIO_AF\">\n\n <PossibleValue>GPIO_AF2_LPTIM1</PossibleValue>\n\n </SpecificParameter>\n\n </PinSignal>\n\n <PinSignal Name=\"TSC_G5_IO3\">\n\n <SpecificParameter Name=\"GPIO_AF\">\n\n <PossibleValue>GPIO_AF3_TSC</PossibleValue>\n\n </SpecificParameter>\n\n </PinSignal>\n\n <PinSignal Name=\"USART1_TX\">\n\n <SpecificParameter Name=\"GPIO_AF\">\n\n <PossibleValue>GPIO_AF0_USART1</PossibleValue>\n\n </SpecificParameter>\n\n </PinSignal>\n\n</GPIO_Pin>\n\n```\n\n\n\nAs you can see, this element describes the pin `PB6`. Depending on the chosen\n\nAlternative Function (AF), it can become an `I2C1_SCL` pin (AF1), a `USART1_TX`\n\npin (AF0), or some other variants.\n\n\n\n\n", "file_path": "README.md", "rank": 40, "score": 10.488742724816301 }, { "content": " Some(format!(\"{}{}\", &self.port_name, num))\n\n }\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn get_af_modes(&self) -> Vec<String> {\n\n let mut res = Vec::new();\n\n if let Some(ref v) = self.pin_signal {\n\n for sig in v {\n\n let per = sig.name.split('_').collect::<Vec<_>>()[0];\n\n if USART_RX.is_match(&sig.name) {\n\n res.push(format!(\"{}: RxPin<{}>\", sig.get_af_value(), per));\n\n }\n\n if USART_TX.is_match(&sig.name) {\n\n res.push(format!(\"{}: TxPin<{}>\", sig.get_af_value(), per));\n\n }\n\n if SPI_MOSI.is_match(&sig.name) {\n\n res.push(format!(\"{}: MosiPin<{}>\", sig.get_af_value(), per));\n\n }\n", "file_path": "src/internal_peripheral.rs", "rank": 41, "score": 10.073777299161456 }, { "content": "### MCU\n\n\n\nNext to the `families.xml` file, there are a lot of MCU definitions. The\n\nfilenames match the `Name` attribute in the `Mcu` element above.\n\n\n\nFor example, the `STM32L071KB(B-Z)Tx.xml` file starts like this:\n\n\n\n```xml\n\n<Mcu ClockTree=\"STM32L0\" DBVersion=\"V3.0\" Family=\"STM32L0\" HasPowerPad=\"false\"\n\n IOType=\"\" Line=\"STM32L0x1\" Package=\"LQFP32\" RefName=\"STM32L071K(B-Z)Tx\"\n\n xmlns=\"http://mcd.rou.st.com/modules.php?name=mcu\">\n\n\t<Core>Arm Cortex-M0+</Core>\n\n\t<Frequency>32</Frequency>\n\n\t<E2prom>6144</E2prom>\n\n\t<Ram>20</Ram>\n\n\t<IONb>25</IONb>\n\n\t<Die>DIE447</Die>\n\n\t<Flash>128</Flash>\n\n\t<Flash>192</Flash>\n\n\t<Voltage Max=\"3.6\" Min=\"1.65\"/>\n\n\t<Current Lowest=\"0.29\" Run=\"87\"/>\n\n\t<Temperature Max=\"125\" Min=\"-40\"/>\n\n ...\n\n```\n\n\n\nThis first part describes the MCU: How much RAM it has, what the frequency is,\n\nhow many I/Os it has, what flash variants there are, etc. Many of these things\n\nare also encoded in the full MCU `RefName`.\n\n\n\nFollowing this general description, there are a number of `IP` elements. IP\n\nstands for \"Internal Peripheral\". Here we have things like the USART peripherals:\n\n\n\n```xml\n\n<IP InstanceName=\"USART1\" Name=\"USART\" Version=\"sci3_v1_1_Cube\"/>\n\n<IP InstanceName=\"USART2\" Name=\"USART\" Version=\"sci3_v1_1_Cube\"/>\n\n```\n\n\n\n...or the RCC peripheral:\n\n\n\n```xml\n\n<IP InstanceName=\"RCC\" Name=\"RCC\" Version=\"STM32L051_rcc_v1_0\"/>\n\n```\n\n\n\n...and, most important to us, the GPIO peripheral.\n\n\n\n```xml\n\n<IP ConfigFile=\"GPIO-STM32L0xx\" InstanceName=\"GPIO\" Name=\"GPIO\" Version=\"STM32L071_gpio_v1_0\"/>\n\n```\n\n\n\nHere, the value of the `Version` attribute points to the actual GPIO signal\n\ndefinition.\n\n\n\n(There are also some other interesting entries in that file, for example the\n\nmapping from physical pins to internal pin names. Those are not relevant for us\n\nthough.)\n\n\n", "file_path": "README.md", "rank": 42, "score": 9.294614568310474 }, { "content": " if SPI_MISO.is_match(&sig.name) {\n\n res.push(format!(\"{}: MisoPin<{}>\", sig.get_af_value(), per));\n\n }\n\n if SPI_SCK.is_match(&sig.name) {\n\n res.push(format!(\"{}: SckPin<{}>\", sig.get_af_value(), per));\n\n }\n\n if I2C_SCL.is_match(&sig.name) {\n\n res.push(format!(\"{}: SclPin<{}>\", sig.get_af_value(), per));\n\n }\n\n if I2C_SDA.is_match(&sig.name) {\n\n res.push(format!(\"{}: SdaPin<{}>\", sig.get_af_value(), per));\n\n }\n\n }\n\n }\n\n res\n\n }\n\n}\n", "file_path": "src/internal_peripheral.rs", "rank": 43, "score": 7.260907694552924 }, { "content": "}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct PinSignal {\n\n name: String,\n\n specific_parameter: SpecificParameter,\n\n}\n\n\n\nimpl PinSignal {\n\n fn get_af_value(&self) -> &str {\n\n self.specific_parameter\n\n .possible_value\n\n .val\n\n .split('_')\n\n .collect::<Vec<_>>()[1]\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/internal_peripheral.rs", "rank": 44, "score": 7.073337782884597 }, { "content": "## GPIO Feature Groups\n\n\n\nWhen generating pin function mappings, we want to avoid generating a mapping\n\nfor every possible MCU, since that would result in dozens or even hundreds of\n\npin definitions. However, if we don't want a mapping per MCU, we need to group\n\nthem somehow. The best way for grouping is probably to follow ST's grouping,\n\nwhich is encoded in the IP versions described above.\n\n\n\nThe feature names are mapped as follows:\n\n\n\n- `STM32L031_gpio_v1_0` -> `io-STM32L031`\n\n- `STM32L051_gpio_v1_0` -> `io-STM32L051`\n\n- `STM32L152x8_gpio_v1_0` -> `io-STM32L152x8`\n\n\n\nFor example, the GPIO IP file named \"STM32L031_gpio_v1_0\" is shared among the\n\nfollowing MCUs:\n\n\n\n- STM32L010C6Tx\n\n- STM32L031C(4-6)Tx\n\n- STM32L031C(4-6)Tx\n\n- STM32L031C6Ux\n\n- STM32L031E(4-6)Yx\n\n- STM32L031E(4-6)Yx\n\n- STM32L031F(4-6)Px\n\n- STM32L031F(4-6)Px\n\n- STM32L031G(4-6)Ux\n\n- STM32L031G(4-6)Ux\n\n- STM32L031G6UxS\n\n- STM32L031K(4-6)Tx\n\n- STM32L031K(4-6)Tx\n\n- STM32L031K(4-6)Ux\n\n- STM32L031K(4-6)Ux\n\n- STM32L041C(4-6)Tx\n\n- STM32L041C(4-6)Tx\n\n- STM32L041E6Yx\n\n- STM32L041F6Px\n\n- STM32L041G6Ux\n\n- STM32L041G6UxS\n\n- STM32L041K6Tx\n\n- STM32L041K6Ux\n\n\n\nAs you can see, this may be a bit confusing due to the fact that both the\n\n`STM32L010C6Tx` and the `STM32L041E6Yx` require the `io-STM32L031` feature.\n\nHowever, sticking to the (sometimes non-logical) grouping used in the CubeMX\n\ndatabase is probably still better than creating our own grouping, which may be\n\nbroken at any time by ST releasing a new MCU in a pre-existing group, but with\n\na different, incompatible GPIO IP version.\n\n\n\nIn order to simplify the GPIO IP version selection for the user, alias features\n\nare generated. These are purely a convenience for the user and are never used\n\ndirectly as feature gates in the source code.\n\n\n\n\n\n<!-- Badges -->\n\n[github-actions]: https://github.com/dbrgn/cube-parse/actions?query=branch%3Amaster\n\n[github-actions-badge]: https://github.com/dbrgn/cube-parse/workflows/CI/badge.svg\n", "file_path": "README.md", "rank": 45, "score": 6.568527927854404 } ]
Rust
grid/src/lib.rs
Daniel-del-Castillo/ia1
c11d768af415a8d117152b954cb9cf25bce48631
use crossterm::style::Colorize; use std::fmt; mod content; mod path_finding; use content::Content; use rand::{thread_rng, Rng}; pub struct Grid { grid: Vec<Vec<Content>>, goal: Option<(usize, usize)>, car: Option<(usize, usize)>, } impl fmt::Display for Grid { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for _ in 0..self.grid[0].len() + 2 { write!(f, "{}", " ".on_dark_blue())?; } write!(f, "\n\r")?; for row in self.grid.iter() { write!(f, "{}", " ".on_dark_blue())?; for cell in row.iter() { write!(f, "{}", cell)? } write!(f, "{}", " \n\r".on_dark_blue())?; } for _ in 0..self.grid[0].len() + 2 { write!(f, "{}", " ".on_dark_blue())?; } write!(f, "\n\r")?; Ok(()) } } impl Grid { pub fn new(m: usize, n: usize) -> Self { assert!(m != 0 && n != 0); Grid { grid: vec![vec![Content::Empty; n]; m], goal: None, car: None, } } pub fn m(&self) -> usize { self.grid.len() } pub fn n(&self) -> usize { self.grid[0].len() } pub fn has_goal(&self) -> bool { match self.goal { None => false, Some(_) => true, } } pub fn has_car(&self) -> bool { match self.car { None => false, Some(_) => true, } } pub fn set_width(&mut self, n: usize) { assert!(n != 0); let width = self.grid[0].len(); if n == width { return; } else if n < width { self.grid.iter_mut().for_each(|row| row.truncate(n)); self.check_car_valididy(); self.check_goal_valididy(); } else { self.grid .iter_mut() .for_each(|row| (0..n - width).for_each(|_| row.push(Content::Empty))); } } pub fn set_height(&mut self, m: usize) { assert!(m != 0); let height = self.grid.len(); if m == height { return; } else if m < height { self.grid.truncate(m); self.check_car_valididy(); self.check_goal_valididy(); } else { let width = self.grid[0].len(); (0..m - height).for_each(|_| self.grid.push(vec![Content::Empty; width])); } } pub fn set_wall(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Car => self.car = None, Content::Goal => self.goal = None, _ => {} } self.grid[y][x] = Content::Wall; } pub fn set_goal(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Car => self.car = None, Content::Goal => return, _ => {} } self.grid[y][x] = Content::Goal; if let Some(old_goal_pos) = &mut self.goal { self.grid[old_goal_pos.1][old_goal_pos.0] = Content::Empty; } self.goal = Some((x, y)); } pub fn set_car(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Goal => self.goal = None, Content::Car => return, _ => {} } self.grid[y][x] = Content::Car; if let Some(old_car_pos) = &mut self.car { self.grid[old_car_pos.1][old_car_pos.0] = Content::Empty; } self.car = Some((x, y)); } pub fn set_empty(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Goal => self.goal = None, Content::Car => self.car = None, _ => {} } self.grid[y][x] = Content::Empty; } pub fn clear(&mut self) { self.grid = vec![vec![Content::Empty; self.grid[0].len()]; self.grid.len()]; } pub fn fill_random(&mut self, wall_percentage: usize) { assert!(wall_percentage <= 100); self.car = None; self.goal = None; self.fill_random_walls(wall_percentage); let car_pos = self.get_random_pos(); self.set_car(car_pos.0, car_pos.1); if self.m() * self.n() != 1 { let goal_pos = loop { let pos = self.get_random_pos(); if pos != car_pos { break pos; } }; self.set_goal(goal_pos.0, goal_pos.1); } } fn fill_random_walls(&mut self, wall_percentage: usize) { let mut rng = thread_rng(); for content in self.grid.iter_mut().map(|i| i.iter_mut()).flatten() { if rng.gen_range(1, 101) <= wall_percentage { *content = Content::Wall; } else { *content = Content::Empty; } } } fn get_random_pos(&mut self) -> (usize, usize) { let n_cells = self.m() * self.n(); let pos = thread_rng().gen_range(0, n_cells); let y = pos / self.n(); let x = pos % self.n(); (x, y) } fn check_car_valididy(&mut self) { if let Some(pos) = self.car { if pos.0 >= self.n() || pos.1 >= self.m() { self.car = None; } } } fn check_goal_valididy(&mut self) { if let Some(pos) = self.goal { if pos.0 >= self.n() || pos.1 >= self.m() { self.goal = None; } } } }
use crossterm::style::Colorize; use std::fmt; mod content; mod path_finding; use content::Content; use rand::{thread_rng, Rng}; pub struct Grid { grid: Vec<Vec<Content>>, goal: Option<(usize, usize)>, car: Option<(usize, usize)>, } impl fmt::Display for Grid { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for _ in 0..self.grid[0].len() + 2 { write!(f, "{}", " ".on_dark_blue())?; } write!(f, "\n\r")?; for row in self.grid.iter() { write!(f, "{}", " ".on_dark_blue())?; for cell in row.iter() { write!(f, "{}", cell)? } write!(f, "{}", " \n\r".on_dark_blue())?; } for _ in 0..self.grid[0].len() + 2 { write!(f, "{}", " ".on_dark_blue())?; } write!(f, "\n\r")?; Ok(()) } } impl Grid { pub fn new(m: usize, n: usize) -> Self { assert!(m != 0 && n != 0); Grid { grid: vec![vec![Content::Empty; n]; m], goal: None, car: None, } } pub fn m(&self) -> usize { self.grid.len() } pub fn n(&self) -> usize { self.grid[0].len() } pub fn has_goal(&self) -> bool { match self.goal { None => false, Some(_) => true, } } pub fn has_car(&self) -> bool { match self.car { None => false, Some(_) => true, } } pub fn set_width(&mut self, n: usize) { assert!(n != 0); let width = self.grid[0].len(); if n == width { return; } else if n < width { self.grid.iter_mut().for_each(|row| row.truncate(n)); self.check_car_valididy(); self.check_goal_valididy(); } else { self.grid .iter_mut() .for_each(|row| (0..n - width).for_each(|_| row.push(Content::Empty))); } } pub fn set_height(&mut self, m: usize) { assert!(m != 0); let height = self.grid.len(); if m == height { return; } else if m < height { self.grid.truncate(m); self.check_car_valididy(); self.check_goal_valididy(); } else { let width = self.grid[0].len(); (0..m - height).for_each(|_| self.grid.push(vec![Content::Empty; width])); } } pub fn set_wall(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Car => self.car = None, Content::Goal => self.goal = None, _ => {} } self.grid[y][x] = Content::Wall; } pub fn set_goal(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Car => self.car = None, Content::Goal => return, _ => {} } self.grid[y][x] = Content::Goal; if let Some(old_goal_pos) = &mut self.goal { se
y][x] = Content::Car; if let Some(old_car_pos) = &mut self.car { self.grid[old_car_pos.1][old_car_pos.0] = Content::Empty; } self.car = Some((x, y)); } pub fn set_empty(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Goal => self.goal = None, Content::Car => self.car = None, _ => {} } self.grid[y][x] = Content::Empty; } pub fn clear(&mut self) { self.grid = vec![vec![Content::Empty; self.grid[0].len()]; self.grid.len()]; } pub fn fill_random(&mut self, wall_percentage: usize) { assert!(wall_percentage <= 100); self.car = None; self.goal = None; self.fill_random_walls(wall_percentage); let car_pos = self.get_random_pos(); self.set_car(car_pos.0, car_pos.1); if self.m() * self.n() != 1 { let goal_pos = loop { let pos = self.get_random_pos(); if pos != car_pos { break pos; } }; self.set_goal(goal_pos.0, goal_pos.1); } } fn fill_random_walls(&mut self, wall_percentage: usize) { let mut rng = thread_rng(); for content in self.grid.iter_mut().map(|i| i.iter_mut()).flatten() { if rng.gen_range(1, 101) <= wall_percentage { *content = Content::Wall; } else { *content = Content::Empty; } } } fn get_random_pos(&mut self) -> (usize, usize) { let n_cells = self.m() * self.n(); let pos = thread_rng().gen_range(0, n_cells); let y = pos / self.n(); let x = pos % self.n(); (x, y) } fn check_car_valididy(&mut self) { if let Some(pos) = self.car { if pos.0 >= self.n() || pos.1 >= self.m() { self.car = None; } } } fn check_goal_valididy(&mut self) { if let Some(pos) = self.goal { if pos.0 >= self.n() || pos.1 >= self.m() { self.goal = None; } } } }
lf.grid[old_goal_pos.1][old_goal_pos.0] = Content::Empty; } self.goal = Some((x, y)); } pub fn set_car(&mut self, x: usize, y: usize) { match &mut self.grid[y][x] { Content::Goal => self.goal = None, Content::Car => return, _ => {} } self.grid[
random
[ { "content": "fn get_grid_size(matches: &ArgMatches) -> (usize, usize) {\n\n let m = matches.value_of(\"m\").unwrap_or(\"10\");\n\n let n = matches.value_of(\"n\").unwrap_or(\"10\");\n\n let m = match m.parse() {\n\n Err(_) | Ok(0) => {\n\n eprintln!(\"The -m parameter must be a positive integer\");\n\n std::process::exit(-1);\n\n }\n\n Ok(num) => num,\n\n };\n\n let n = match n.parse() {\n\n Err(_) | Ok(0) => {\n\n eprintln!(\"The -n parameter must be a positive integer\");\n\n std::process::exit(-1);\n\n }\n\n Ok(num) => num,\n\n };\n\n\n\n (m, n)\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 0, "score": 105717.34834109014 }, { "content": "fn get_grid(matches: &ArgMatches) -> Result<Grid> {\n\n if let Some(file) = matches.value_of(\"file\") {\n\n get_grid_from_file(file)\n\n } else {\n\n get_grid_from_args(matches)\n\n }\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 1, "score": 102260.1118388106 }, { "content": "fn get_grid_from_args(matches: &ArgMatches) -> Result<Grid> {\n\n let (m, n) = get_grid_size(&matches);\n\n check_valid_size(m, n)?;\n\n Ok(Grid::new(m, n))\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 2, "score": 100036.48291096688 }, { "content": "fn get_wall_percentage(matches: &ArgMatches) -> usize {\n\n let wall_percentage = matches.value_of(\"wall_percentage\").unwrap_or(\"15\");\n\n let wall_percentage = match wall_percentage.parse() {\n\n Ok(num @ 0..=100) => num,\n\n _ => {\n\n eprintln!(\"The -r parameter must be a positive integer between 0 and 100\");\n\n std::process::exit(-1);\n\n }\n\n };\n\n\n\n wall_percentage\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 3, "score": 85922.94839340687 }, { "content": "fn check_valid_size(m: usize, n: usize) -> Result<()> {\n\n let term_size = size()?;\n\n if m > term_size.1 as usize - 4 {\n\n eprintln!(\n\n \"There isn't space in your terminal for a grid with a height of {}\",\n\n m\n\n );\n\n eprintln!(\n\n \"The maximum for the actual size of your terminal is {}\",\n\n term_size.1 as usize - 4\n\n );\n\n std::process::exit(-1);\n\n } else if n > term_size.0 as usize / 2 - 2 {\n\n eprintln!(\n\n \"There isn't space in your terminal for a grid with a width of {}\",\n\n n\n\n );\n\n eprintln!(\n\n \"The maximum for the actual size of your terminal is {}\",\n\n term_size.0 as usize / 2 - 2\n\n );\n\n std::process::exit(-1);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 4, "score": 79256.887623791 }, { "content": "fn get_grid_from_file(path: &str) -> Result<Grid> {\n\n let file = File::open(path)?;\n\n let mut buffer = BufReader::new(file).lines();\n\n let m = buffer.next().unwrap()?.parse::<usize>()?;\n\n let n = buffer.next().unwrap()?.parse::<usize>()?;\n\n check_valid_size(m, n)?;\n\n let mut grid = Grid::new(m, n);\n\n for i in 0..m {\n\n let row = buffer.next().unwrap()?;\n\n let mut row_chars = row.chars();\n\n for j in 0..n {\n\n let c = row_chars.next().unwrap();\n\n match c {\n\n 'C' => grid.set_car(j, i),\n\n 'G' => grid.set_goal(j, i),\n\n 'X' => grid.set_wall(j, i),\n\n _ => {}\n\n }\n\n }\n\n }\n\n Ok(grid)\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 5, "score": 77076.15850458808 }, { "content": "fn get_args_matches() -> ArgMatches<'static> {\n\n App::new(\"ia1\")\n\n .args(&[\n\n Arg::with_name(\"m\")\n\n .short(\"m\")\n\n .long(\"rows\")\n\n .takes_value(true)\n\n .help(\"Sets the number of initial rows\"),\n\n Arg::with_name(\"n\")\n\n .short(\"n\")\n\n .long(\"columns\")\n\n .takes_value(true)\n\n .help(\"Sets the number of initial columns\"),\n\n Arg::with_name(\"wall_percentage\")\n\n .short(\"r\")\n\n .long(\"random\")\n\n .takes_value(true)\n\n .help(\"Sets the percentage of walls in a random generated map\"),\n\n Arg::with_name(\"euclidean\")\n\n .long(\"euclidean\")\n", "file_path": "car_simulation/src/main.rs", "rank": 6, "score": 71086.3495282603 }, { "content": "fn get_heuristic(matches: &ArgMatches) -> Heuristic {\n\n if matches.is_present(\"euclidean\") {\n\n return Heuristic::Euclidean;\n\n }\n\n if matches.is_present(\"chebyshev\") {\n\n return Heuristic::Chebyshev;\n\n }\n\n Heuristic::Manhattan\n\n}\n", "file_path": "car_simulation/src/main.rs", "rank": 7, "score": 69959.12603020846 }, { "content": "fn get_chebyshev_dist(pos1: (usize, usize), pos2: (usize, usize)) -> f32 {\n\n max(\n\n max(pos1.0, pos2.0) - min(pos1.0, pos2.0),\n\n max(pos1.1, pos2.1) - min(pos1.1, pos2.1),\n\n ) as f32\n\n}\n", "file_path": "car_simulation/src/frontend/run_simulation.rs", "rank": 8, "score": 64285.18936283658 }, { "content": "fn get_manhattan_dist(pos1: (usize, usize), pos2: (usize, usize)) -> f32 {\n\n (max(pos1.0, pos2.0) - min(pos1.0, pos2.0) + max(pos1.1, pos2.1) - min(pos1.1, pos2.1)) as f32\n\n}\n\n\n", "file_path": "car_simulation/src/frontend/run_simulation.rs", "rank": 9, "score": 64285.18936283658 }, { "content": "fn get_euclidean_dist(pos1: (usize, usize), pos2: (usize, usize)) -> f32 {\n\n let pos1 = (pos1.0 as f32, pos1.1 as f32);\n\n let pos2 = (pos2.0 as f32, pos2.1 as f32);\n\n ((pos1.0 - pos2.0).powi(2) + (pos1.1 - pos2.1).powi(2)).sqrt()\n\n}\n\n\n", "file_path": "car_simulation/src/frontend/run_simulation.rs", "rank": 10, "score": 64285.18936283658 }, { "content": "#[derive(Copy, Clone)]\n\nstruct AStarNode {\n\n pos: (usize, usize),\n\n predecessor: Option<(usize, usize)>,\n\n dist: usize,\n\n guessed_dist: f32,\n\n depth: usize,\n\n}\n\n\n\nimpl Default for AStarNode {\n\n fn default() -> Self {\n\n AStarNode {\n\n pos: (0, 0),\n\n predecessor: None,\n\n dist: usize::MAX,\n\n guessed_dist: f32::MAX,\n\n depth: 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "grid/src/path_finding.rs", "rank": 11, "score": 61037.951356981284 }, { "content": "fn quit() -> ! {\n\n disable_raw_mode().unwrap();\n\n execute!(stdout(), LeaveAlternateScreen, DisableMouseCapture, Show).unwrap();\n\n std::process::exit(0);\n\n}\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 12, "score": 54119.60155313226 }, { "content": "fn get_euclidean_dist(pos1: (usize, usize), pos2: (usize, usize)) -> f32 {\n\n let pos1 = (pos1.0 as f32, pos1.1 as f32);\n\n let pos2 = (pos2.0 as f32, pos2.1 as f32);\n\n ((pos1.0 - pos2.0).powi(2) + (pos1.1 - pos2.1).powi(2)).sqrt()\n\n}\n\n\n", "file_path": "benchmarks/src/main.rs", "rank": 13, "score": 53582.2410165603 }, { "content": "fn get_chebyshev_dist(pos1: (usize, usize), pos2: (usize, usize)) -> f32 {\n\n max(\n\n max(pos1.0, pos2.0) - min(pos1.0, pos2.0),\n\n max(pos1.1, pos2.1) - min(pos1.1, pos2.1),\n\n ) as f32\n\n}\n", "file_path": "benchmarks/src/main.rs", "rank": 14, "score": 53582.2410165603 }, { "content": "fn get_manhattan_dist(pos1: (usize, usize), pos2: (usize, usize)) -> f32 {\n\n (max(pos1.0, pos2.0) - min(pos1.0, pos2.0) + max(pos1.1, pos2.1) - min(pos1.1, pos2.1)) as f32\n\n}\n\n\n", "file_path": "benchmarks/src/main.rs", "rank": 15, "score": 53582.2410165603 }, { "content": "fn main() -> Result<()> {\n\n let matches = get_args_matches();\n\n let grid = get_grid(&matches)?;\n\n let wall_percentage = get_wall_percentage(&matches);\n\n let heuristic = get_heuristic(&matches);\n\n let mut frontend = FrontEnd::new(grid, wall_percentage, heuristic);\n\n if matches.is_present(\"compat\") {\n\n frontend.use_compatibility_mode();\n\n }\n\n frontend.run()\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 16, "score": 52318.19090911687 }, { "content": "\n\nimpl fmt::Display for Content {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Content::Car => write!(f, \"{}\", \"🚗\".on_black()),\n\n Content::Goal => write!(f, \"{}\", \"🏁\".on_black()),\n\n Content::Wall => write!(f, \"{}\", \" \".on_red()),\n\n Content::Empty => write!(f, \"{}\", \" \".on_black()),\n\n Content::Explored => write!(f, \"{}\", \" \".on_yellow()),\n\n Content::Trace(dir) => match dir {\n\n Direction::Left => write!(f, \"{}\", \"←←\".on_black()),\n\n Direction::Up => write!(f, \"{}\", \"↑↑\".on_black()),\n\n Direction::Right => write!(f, \"{}\", \"→→\".on_black()),\n\n Direction::Down => write!(f, \"{}\", \"↓↓\".on_black()),\n\n },\n\n }\n\n }\n\n}\n", "file_path": "grid/src/content.rs", "rank": 17, "score": 47706.2932457319 }, { "content": "use crossterm::style::Colorize;\n\nuse std::fmt;\n\n\n\n#[derive(Clone, Copy)]\n\npub enum Content {\n\n Car,\n\n Goal,\n\n Wall,\n\n Empty,\n\n Trace(Direction),\n\n Explored,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum Direction {\n\n Left,\n\n Up,\n\n Right,\n\n Down,\n\n}\n", "file_path": "grid/src/content.rs", "rank": 18, "score": 47706.1783143322 }, { "content": " compatibility_mode: bool,\n\n}\n\n\n\nimpl FrontEnd {\n\n pub fn new(grid: Grid, wall_percentage: usize, heuristic: Heuristic) -> Self {\n\n assert!(wall_percentage <= 100);\n\n FrontEnd {\n\n grid,\n\n state: State::Wall,\n\n wall_percentage,\n\n heuristic,\n\n status_msg: String::new(),\n\n compatibility_mode: false,\n\n }\n\n }\n\n\n\n pub fn run(&mut self) -> Result<()> {\n\n execute!(stdout(), Hide, EnableMouseCapture, EnterAlternateScreen)?;\n\n enable_raw_mode()?;\n\n loop {\n", "file_path": "car_simulation/src/frontend/mod.rs", "rank": 19, "score": 42550.02261159859 }, { "content": " self.draw_screen()?;\n\n self.process_event()?;\n\n }\n\n }\n\n\n\n pub fn use_compatibility_mode(&mut self) {\n\n self.compatibility_mode = true;\n\n }\n\n}\n", "file_path": "car_simulation/src/frontend/mod.rs", "rank": 20, "score": 42547.97972269843 }, { "content": "use crossterm::{\n\n cursor::Hide,\n\n event::EnableMouseCapture,\n\n execute,\n\n terminal::{enable_raw_mode, EnterAlternateScreen},\n\n Result,\n\n};\n\nuse grid::Grid;\n\nuse std::io::{stdout, Write};\n\nmod events;\n\nmod run_simulation;\n\nmod screen;\n\n\n\n#[derive(PartialEq)]\n", "file_path": "car_simulation/src/frontend/mod.rs", "rank": 21, "score": 42546.410833464826 }, { "content": "#[derive(PartialEq)]\n\nenum State {\n\n Car,\n\n Goal,\n\n Wall,\n\n Remove,\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub enum Heuristic {\n\n Euclidean,\n\n Manhattan,\n\n Chebyshev,\n\n}\n\n\n\npub struct FrontEnd {\n\n grid: Grid,\n\n state: State,\n\n wall_percentage: usize,\n\n heuristic: Heuristic,\n\n status_msg: String,\n", "file_path": "car_simulation/src/frontend/mod.rs", "rank": 22, "score": 40771.12490476072 }, { "content": "fn main() {\n\n println!(\"Time benchmarks\");\n\n println!(\n\n \"{: <10} {: >12} {: >12} {: >12}\",\n\n \"15%\", \"200x200\", \"100x100\", \"50x50\"\n\n );\n\n println!(\n\n \"{: <10} {: >10}μs {: >10}μs {: >10}μs\",\n\n \"Manhattan\",\n\n get_average_duration(200, 200, 15, get_manhattan_dist, 5000),\n\n get_average_duration(100, 100, 15, get_manhattan_dist, 5000),\n\n get_average_duration(50, 50, 15, get_manhattan_dist, 5000),\n\n );\n\n println!(\n\n \"{: <10} {: >10}μs {: >10}μs {: >10}μs\",\n\n \"Euclidean\",\n\n get_average_duration(200, 200, 15, get_euclidean_dist, 5000),\n\n get_average_duration(100, 100, 15, get_euclidean_dist, 5000),\n\n get_average_duration(50, 50, 15, get_euclidean_dist, 5000),\n\n );\n", "file_path": "benchmarks/src/main.rs", "rank": 23, "score": 37039.44925417092 }, { "content": "fn get_average_explored(\n\n m: usize,\n\n n: usize,\n\n wall_percentage: usize,\n\n heuristic: fn((usize, usize), (usize, usize)) -> f32,\n\n repetitions: usize,\n\n) -> u128 {\n\n let mut grid = Grid::new(m, n);\n\n let mut acc = 0;\n\n for _ in 0..repetitions {\n\n acc += loop {\n\n grid.fill_random(wall_percentage);\n\n let result = grid.find_path(heuristic);\n\n if let Some(path_result) = result {\n\n break path_result.get_n_explored();\n\n }\n\n } as u128\n\n }\n\n acc / repetitions as u128\n\n}\n\n\n", "file_path": "benchmarks/src/main.rs", "rank": 24, "score": 35174.09012918645 }, { "content": "fn get_average_duration(\n\n m: usize,\n\n n: usize,\n\n wall_percentage: usize,\n\n heuristic: fn((usize, usize), (usize, usize)) -> f32,\n\n repetitions: usize,\n\n) -> u128 {\n\n let mut grid = Grid::new(m, n);\n\n let mut acc = 0;\n\n for _ in 0..repetitions {\n\n acc += loop {\n\n grid.fill_random(wall_percentage);\n\n let instant = Instant::now();\n\n let result = grid.find_path(heuristic);\n\n let duration = instant.elapsed();\n\n if let Some(_) = result {\n\n break duration.as_micros();\n\n }\n\n }\n\n }\n\n acc / repetitions as u128\n\n}\n\n\n", "file_path": "benchmarks/src/main.rs", "rank": 25, "score": 35174.09012918645 }, { "content": " break lenght;\n\n }\n\n lenght += 1;\n\n }\n\n }\n\n\n\n pub fn get_n_explored(&self) -> usize {\n\n self.explored\n\n }\n\n}\n\n\n\nimpl Grid {\n\n pub fn find_path(\n\n &mut self,\n\n heuristic: fn((usize, usize), (usize, usize)) -> f32,\n\n ) -> Option<PathResult> {\n\n assert!(self.car.is_some() && self.goal.is_some());\n\n let car_pos = self.car.unwrap();\n\n let goal_pos = self.goal.unwrap();\n\n\n", "file_path": "grid/src/path_finding.rs", "rank": 36, "score": 21581.414801534724 }, { "content": " if pos.1 + 1 < self.m() {\n\n neighs.push((pos.0, pos.1 + 1));\n\n }\n\n if pos.1 != 0 {\n\n neighs.push((pos.0, pos.1 - 1));\n\n }\n\n neighs\n\n .into_iter()\n\n .filter(|pos| {\n\n if let Content::Wall = self.grid[pos.1][pos.0] {\n\n false\n\n } else {\n\n true\n\n }\n\n })\n\n .collect()\n\n }\n\n\n\n pub fn clear_path(&mut self) {\n\n for cell in self.grid.iter_mut().map(|row| row.iter_mut()).flatten() {\n\n if let Content::Trace(_) | Content::Explored = cell {\n\n *cell = Content::Empty;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "grid/src/path_finding.rs", "rank": 37, "score": 21581.21740895744 }, { "content": "impl Default for &AStarNode {\n\n fn default() -> Self {\n\n &AStarNode {\n\n pos: (0, 0),\n\n predecessor: None,\n\n dist: usize::MAX,\n\n guessed_dist: f32::MAX,\n\n depth: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Eq for AStarNode {}\n\n\n\nimpl PartialEq for AStarNode {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.guessed_dist == other.guessed_dist && self.depth == other.depth\n\n }\n\n}\n\n\n", "file_path": "grid/src/path_finding.rs", "rank": 38, "score": 21576.18163278859 }, { "content": " break;\n\n }\n\n self.grid[current.1][current.0] = match current {\n\n (x, y) if x == prev.0 + 1 && y == prev.1 => Content::Trace(Direction::Left),\n\n (x, y) if x + 1 == prev.0 && y == prev.1 => Content::Trace(Direction::Right),\n\n (x, y) if x == prev.0 && y == prev.1 + 1 => Content::Trace(Direction::Up),\n\n (x, y) if x == prev.0 && y + 1 == prev.1 => Content::Trace(Direction::Down),\n\n _ => unreachable!(\"Corrupted predecessors table\"),\n\n };\n\n }\n\n }\n\n\n\n fn get_neighbours(&self, pos: (usize, usize)) -> Vec<(usize, usize)> {\n\n let mut neighs = Vec::new();\n\n if pos.0 + 1 < self.n() {\n\n neighs.push((pos.0 + 1, pos.1));\n\n }\n\n if pos.0 != 0 {\n\n neighs.push((pos.0 - 1, pos.1));\n\n }\n", "file_path": "grid/src/path_finding.rs", "rank": 39, "score": 21575.144674272302 }, { "content": " }\n\n x => x,\n\n }\n\n }\n\n}\n\n\n\npub struct PathResult {\n\n explored: usize,\n\n node_map: FxHashMap<(usize, usize), AStarNode>,\n\n start: (usize, usize),\n\n end: (usize, usize),\n\n}\n\n\n\nimpl PathResult {\n\n pub fn get_path_length(self) -> usize {\n\n let mut current = self.end;\n\n let mut lenght = 0;\n\n loop {\n\n current = self.node_map[&current].predecessor.unwrap();\n\n if current == self.start {\n", "file_path": "grid/src/path_finding.rs", "rank": 40, "score": 21575.071819058492 }, { "content": " let mut node_map = FxHashMap::default();\n\n //reserve space for twice the space needed for the expected length upfront to avoid reallocations\n\n node_map.reserve(heuristic(car_pos, goal_pos) as usize * 2);\n\n node_map.insert(\n\n car_pos,\n\n AStarNode {\n\n pos: car_pos,\n\n predecessor: None,\n\n dist: 0,\n\n guessed_dist: heuristic(car_pos, goal_pos),\n\n depth: 0,\n\n },\n\n );\n\n\n\n let mut priority_queue = BinaryHeap::new();\n\n priority_queue.push(node_map[&car_pos]);\n\n let mut iteration_count = 0;\n\n\n\n while !priority_queue.is_empty() {\n\n iteration_count += 1;\n", "file_path": "grid/src/path_finding.rs", "rank": 41, "score": 21574.758425442953 }, { "content": "use super::content::{Content, Direction};\n\nuse super::Grid;\n\nuse fxhash::FxHashMap;\n\nuse std::cmp::Ordering;\n\nuse std::collections::BinaryHeap;\n\n\n\n#[derive(Copy, Clone)]\n", "file_path": "grid/src/path_finding.rs", "rank": 42, "score": 21573.571018835024 }, { "content": " neigh_node.guessed_dist = dist as f32 + heuristic(neigh_pos, goal_pos);\n\n neigh_node.depth = current.depth + 1;\n\n priority_queue.push(neigh_node.clone());\n\n }\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn draw_path(\n\n &mut self,\n\n node_map: &FxHashMap<(usize, usize), AStarNode>,\n\n start: (usize, usize),\n\n end: (usize, usize),\n\n ) {\n\n let mut current = end;\n\n loop {\n\n let prev = current;\n\n current = node_map[&current].predecessor.unwrap();\n\n if current == start {\n", "file_path": "grid/src/path_finding.rs", "rank": 43, "score": 21573.455313757862 }, { "content": "impl Ord for AStarNode {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n //this will panic if guessed_dist is NaN. That shouldn't happen and the floating point\n\n //is needed to use heuristics like the euclidean distance\n\n other.guessed_dist.partial_cmp(&self.guessed_dist).unwrap()\n\n }\n\n}\n\n\n\nimpl PartialOrd for AStarNode {\n\n //They get compared in inversed order\n\n //so an AStarNode will have more priority if its guessed distance is smaller\n\n //if equal the node with the higher depth will have more priority\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n match other.guessed_dist.partial_cmp(&self.guessed_dist) {\n\n Some(Ordering::Equal) => {\n\n if other.depth < self.depth {\n\n Some(Ordering::Greater)\n\n } else {\n\n Some(Ordering::Less)\n\n }\n", "file_path": "grid/src/path_finding.rs", "rank": 44, "score": 21573.323305117618 }, { "content": " let current = priority_queue.pop().unwrap();\n\n if current.pos == goal_pos {\n\n self.draw_path(&node_map, car_pos, goal_pos);\n\n return Some(PathResult {\n\n explored: iteration_count,\n\n node_map,\n\n start: car_pos,\n\n end: goal_pos,\n\n });\n\n }\n\n if current.pos != car_pos {\n\n self.grid[current.pos.1][current.pos.0] = Content::Explored;\n\n }\n\n let dist = current.dist + 1;\n\n for neigh_pos in self.get_neighbours(current.pos) {\n\n if dist < node_map.get(&neigh_pos).unwrap_or_default().dist {\n\n let neigh_node = node_map.entry(neigh_pos).or_default();\n\n neigh_node.pos = neigh_pos;\n\n neigh_node.predecessor = Some(current.pos);\n\n neigh_node.dist = dist;\n", "file_path": "grid/src/path_finding.rs", "rank": 45, "score": 21572.10464214399 }, { "content": " The file must have:\n\n -The number of rows in the first row\n\n -The number of columns in the second column\n\n -A representation of the map using:\n\n -C. as the car\n\n -G. as the goal\n\n -X. as walls\n\n -another character as empty cells\",\n\n ),\n\n ])\n\n .get_matches()\n\n}\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 46, "score": 20676.922493605813 }, { "content": "use clap::{App, Arg, ArgMatches};\n\nuse crossterm::{terminal::size, Result};\n\nuse grid::Grid;\n\nmod frontend;\n\nuse frontend::{FrontEnd, Heuristic};\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n", "file_path": "car_simulation/src/main.rs", "rank": 47, "score": 20671.7112123769 }, { "content": " .conflicts_with(\"manhattan\")\n\n .conflicts_with(\"chebyshev\")\n\n .help(\"Uses euclidean distance as the heuristic function\"),\n\n Arg::with_name(\"manhattan\")\n\n .long(\"manhattan\")\n\n .help(\"Uses manhattan distance as the heuristic function. This is the default\"),\n\n Arg::with_name(\"chebyshev\")\n\n .long(\"chebyshev\")\n\n .help(\"Uses chebyshev distance as the heuristic function\"),\n\n Arg::with_name(\"compat\")\n\n .long(\"compat\")\n\n .help(\"Uses a compatibility mode. Use this option if you can't see the car or the goal flag correctly\"),\n\n Arg::with_name(\"file\")\n\n .long(\"file\")\n\n .short(\"f\")\n\n .takes_value(true)\n\n .conflicts_with_all(&[\"m\", \"n\"])\n\n .help(\"Reads a map from the specified file\")\n\n .long_help(\n\n \"Reads a map from the specified file\n", "file_path": "car_simulation/src/main.rs", "rank": 48, "score": 20668.778693069828 }, { "content": " }\n\n\n\n //keep in mind that cell are two spaces wide\n\n fn is_inside_grid(&self, pos: (usize, usize)) -> bool {\n\n pos.0 <= self.grid.n() * 2 + 1 && pos.0 >= 2 && pos.1 <= self.grid.m() && pos.1 >= 1\n\n }\n\n\n\n fn set_cell(&mut self, pos: (usize, usize)) {\n\n match self.state {\n\n State::Car => self\n\n .grid\n\n .set_car(pos.0 as usize / 2 - 1, pos.1 as usize - 1),\n\n State::Goal => self\n\n .grid\n\n .set_goal(pos.0 as usize / 2 - 1, pos.1 as usize - 1),\n\n State::Wall => self\n\n .grid\n\n .set_wall(pos.0 as usize / 2 - 1, pos.1 as usize - 1),\n\n State::Remove => self\n\n .grid\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 49, "score": 19782.721064235684 }, { "content": " pub(super) fn process_event(&mut self) -> Result<()> {\n\n let term_size = size()?;\n\n let buttons_y = term_size.1 - 2;\n\n match read()? {\n\n Event::Mouse(MouseEvent::Down(MouseButton::Left, x, y, ..)) => {\n\n self.status_msg.clear();\n\n if self.is_inside_grid((x as usize, y as usize)) {\n\n self.set_cell((x as usize, y as usize));\n\n } else if y == buttons_y {\n\n self.process_clicked_button(x, term_size)?;\n\n }\n\n }\n\n Event::Mouse(MouseEvent::Drag(MouseButton::Left, x, y, ..)) => {\n\n if self.is_inside_grid((x as usize, y as usize)) {\n\n self.set_cell((x as usize, y as usize));\n\n }\n\n }\n\n _ => {}\n\n }\n\n Ok(())\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 50, "score": 19779.233395291434 }, { "content": " .set_empty(pos.0 as usize / 2 - 1, pos.1 as usize - 1),\n\n }\n\n }\n\n\n\n fn process_clicked_button(&mut self, x: u16, term_size: (u16, u16)) -> Result<()> {\n\n if x >= ROWS_MINUS_BUTTON_BEGIN && x <= ROWS_MINUS_BUTTON_END {\n\n self.change_height_while_clicked(-1, term_size.1 as usize)?;\n\n } else if x >= ROWS_PLUS_BUTTON_BEGIN && x <= ROWS_PLUS_BUTTON_END {\n\n self.change_height_while_clicked(1, term_size.1 as usize)?;\n\n } else if x >= COLUMNS_MINUS_BUTTON_BEGIN && x <= COLUMNS_MINUS_BUTTON_END {\n\n self.change_width_while_clicked(-1, term_size.0 as usize)?;\n\n } else if x >= COLUMNS_PLUS_BUTTON_BEGIN && x <= COLUMNS_PLUS_BUTTON_END {\n\n self.change_width_while_clicked(1, term_size.0 as usize)?;\n\n } else if x >= CAR_BUTTON_BEGIN && x <= CAR_BUTTON_END {\n\n self.state = State::Car;\n\n } else if x >= GOAL_BUTTON_BEGIN && x <= GOAL_BUTTON_END {\n\n self.state = State::Goal;\n\n } else if x >= WALL_BUTTON_BEGIN && x <= WALL_BUTTON_END {\n\n self.state = State::Wall;\n\n } else if x >= REMOVE_BUTTON_BEGIN && x <= REMOVE_BUTTON_END {\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 51, "score": 19779.040335277336 }, { "content": "use super::{FrontEnd, Heuristic, State};\n\nuse crossterm::{\n\n cursor::MoveTo,\n\n queue,\n\n style::Styler,\n\n terminal::{size, Clear, ClearType},\n\n Result,\n\n};\n\nuse std::io::{stdout, Write};\n\n\n\nimpl FrontEnd {\n\n pub(super) fn draw_screen(&mut self) -> Result<()> {\n\n queue!(stdout(), Clear(ClearType::All))?;\n\n self.draw_grid()?;\n\n self.draw_buttons()?;\n\n self.draw_status_bar()?;\n\n stdout().flush()?;\n\n Ok(())\n\n }\n\n\n", "file_path": "car_simulation/src/frontend/screen.rs", "rank": 52, "score": 19776.553170083534 }, { "content": " self.draw_screen()?;\n\n if poll(Duration::from_millis(50))? {\n\n if let Event::Mouse(MouseEvent::Up(MouseButton::Left, ..)) = read()? {\n\n break;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n fn change_width_while_clicked(&mut self, change: isize, term_width: usize) -> Result<()> {\n\n loop {\n\n let desired_width = self.grid.n() as isize + change;\n\n self.grid\n\n .set_width(min(max(desired_width, 1) as usize, term_width / 2 - 2));\n\n self.draw_screen()?;\n\n if poll(Duration::from_millis(50))? {\n\n if let Event::Mouse(MouseEvent::Up(MouseButton::Left, ..)) = read()? {\n\n break;\n\n }\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 53, "score": 19776.45844416712 }, { "content": " self.state = State::Remove;\n\n } else if x >= CLEAR_BUTTON_BEGIN && x <= CLEAR_BUTTON_END {\n\n self.grid.clear();\n\n } else if x >= RANDOM_BUTTON_BEGIN && x <= RANDOM_BUTTON_END {\n\n self.grid.fill_random(self.wall_percentage);\n\n } else if x >= HEURISTIC_BUTTON_BEGIN && x <= HEURISTIC_BUTTON_END {\n\n self.change_heuristic();\n\n } else if x >= RUN_BUTTON_BEGIN && x <= RUN_BUTTON_END {\n\n self.run_simulation();\n\n } else if x >= QUIT_BUTTON_BEGIN && x <= QUIT_BUTTON_END {\n\n quit();\n\n }\n\n Ok(())\n\n }\n\n\n\n fn change_height_while_clicked(&mut self, change: isize, term_height: usize) -> Result<()> {\n\n loop {\n\n let desired_height = self.grid.m() as isize + change;\n\n self.grid\n\n .set_height(min(max(desired_height, 1) as usize, term_height - 4));\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 54, "score": 19776.104844965863 }, { "content": " stdout(),\n\n \"{0}Rows{1} {0}Columns{1} {2} {3} {4} {5} {6} {7} {8} {9} {10}\\n\\r\",\n\n \"--\".negative(),\n\n \"++\".negative(),\n\n if self.state == State::Car {\n\n \"Car\".bold()\n\n } else {\n\n \"Car\".negative()\n\n },\n\n if self.state == State::Goal {\n\n \"Goal\".bold()\n\n } else {\n\n \"Goal\".negative()\n\n },\n\n if self.state == State::Wall {\n\n \"Wall\".bold()\n\n } else {\n\n \"Wall\".negative()\n\n },\n\n if self.state == State::Remove {\n", "file_path": "car_simulation/src/frontend/screen.rs", "rank": 55, "score": 19774.099990190924 }, { "content": " fn draw_grid(&mut self) -> Result<()> {\n\n queue!(stdout(), MoveTo(0, 0))?;\n\n if self.compatibility_mode {\n\n write!(\n\n stdout(),\n\n \"{}\",\n\n self.grid\n\n .to_string()\n\n .replace(\"🚗\", \"CC\")\n\n .replace(\"🏁\", \"GG\")\n\n )?;\n\n } else {\n\n write!(stdout(), \"{}\", self.grid)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn draw_buttons(&mut self) -> Result<()> {\n\n queue!(stdout(), MoveTo(0, size()?.1 - 2))?;\n\n write!(\n", "file_path": "car_simulation/src/frontend/screen.rs", "rank": 56, "score": 19773.574447191877 }, { "content": " }\n\n }\n\n Ok(())\n\n }\n\n\n\n fn change_heuristic(&mut self) {\n\n self.heuristic = match self.heuristic {\n\n Heuristic::Manhattan => Heuristic::Euclidean,\n\n Heuristic::Euclidean => Heuristic::Chebyshev,\n\n Heuristic::Chebyshev => Heuristic::Manhattan,\n\n }\n\n }\n\n}\n\n\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 57, "score": 19773.076921996468 }, { "content": " \"Remove\".bold()\n\n } else {\n\n \"Remove\".negative()\n\n },\n\n \"Clear\".negative(),\n\n \"Random\".negative(),\n\n match self.heuristic {\n\n Heuristic::Euclidean => \"Euclidean\".bold(),\n\n Heuristic::Manhattan => \"Manhattan\".bold(),\n\n Heuristic::Chebyshev => \"Chebyshev\".bold(),\n\n },\n\n \"Run\".negative(),\n\n \"Quit\".negative()\n\n )?;\n\n Ok(())\n\n }\n\n\n\n fn draw_status_bar(&mut self) -> Result<()> {\n\n write!(stdout(), \"{}\", self.status_msg)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "car_simulation/src/frontend/screen.rs", "rank": 58, "score": 19772.434550076003 }, { "content": "use super::{FrontEnd, Heuristic, State};\n\nuse crossterm::{\n\n cursor::Show,\n\n event::{poll, read, DisableMouseCapture, Event, MouseButton, MouseEvent},\n\n execute,\n\n terminal::{disable_raw_mode, size, LeaveAlternateScreen},\n\n Result,\n\n};\n\nuse std::cmp::{max, min};\n\nuse std::io::{stdout, Write};\n\nuse std::time::Duration;\n\n\n\nconst ROWS_MINUS_BUTTON_BEGIN: u16 = 0;\n\nconst ROWS_MINUS_BUTTON_END: u16 = 1;\n\nconst ROWS_PLUS_BUTTON_BEGIN: u16 = 6;\n\nconst ROWS_PLUS_BUTTON_END: u16 = 7;\n\nconst COLUMNS_MINUS_BUTTON_BEGIN: u16 = 9;\n\nconst COLUMNS_MINUS_BUTTON_END: u16 = 10;\n\nconst COLUMNS_PLUS_BUTTON_BEGIN: u16 = 18;\n\nconst COLUMNS_PLUS_BUTTON_END: u16 = 19;\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 59, "score": 19771.773613596193 }, { "content": "const CAR_BUTTON_BEGIN: u16 = 21;\n\nconst CAR_BUTTON_END: u16 = 23;\n\nconst GOAL_BUTTON_BEGIN: u16 = 25;\n\nconst GOAL_BUTTON_END: u16 = 28;\n\nconst WALL_BUTTON_BEGIN: u16 = 30;\n\nconst WALL_BUTTON_END: u16 = 33;\n\nconst REMOVE_BUTTON_BEGIN: u16 = 35;\n\nconst REMOVE_BUTTON_END: u16 = 40;\n\nconst CLEAR_BUTTON_BEGIN: u16 = 42;\n\nconst CLEAR_BUTTON_END: u16 = 46;\n\nconst RANDOM_BUTTON_BEGIN: u16 = 48;\n\nconst RANDOM_BUTTON_END: u16 = 53;\n\nconst HEURISTIC_BUTTON_BEGIN: u16 = 55;\n\nconst HEURISTIC_BUTTON_END: u16 = 63;\n\nconst RUN_BUTTON_BEGIN: u16 = 65;\n\nconst RUN_BUTTON_END: u16 = 67;\n\nconst QUIT_BUTTON_BEGIN: u16 = 69;\n\nconst QUIT_BUTTON_END: u16 = 72;\n\n\n\nimpl FrontEnd {\n", "file_path": "car_simulation/src/frontend/events.rs", "rank": 60, "score": 19771.37923221826 }, { "content": "use super::{FrontEnd, Heuristic};\n\nuse std::cmp::{max, min};\n\nuse std::time::Instant;\n\n\n\nimpl FrontEnd {\n\n pub(super) fn run_simulation(&mut self) {\n\n self.grid.clear_path();\n\n if !self.check_valid_state() {\n\n self.status_msg = String::from(\"A goal and a car must be in the grid\");\n\n return;\n\n }\n\n let heuristic_fn = match self.heuristic {\n\n Heuristic::Euclidean => get_euclidean_dist,\n\n Heuristic::Manhattan => get_manhattan_dist,\n\n Heuristic::Chebyshev => get_chebyshev_dist,\n\n };\n\n let time = Instant::now();\n\n let path_result = self.grid.find_path(heuristic_fn);\n\n let time_used = time.elapsed();\n\n match path_result {\n", "file_path": "car_simulation/src/frontend/run_simulation.rs", "rank": 61, "score": 18959.519737209834 }, { "content": " None => self.status_msg = String::from(\"Couldn't find a path\"),\n\n Some(path_result) => {\n\n self.status_msg =\n\n format!(\n\n \"Path with length {1} found! {0} cells were explored. Approximate time used: {2} μs\",\n\n path_result.get_n_explored(), path_result.get_path_length(), time_used.as_micros()\n\n );\n\n }\n\n }\n\n }\n\n\n\n fn check_valid_state(&self) -> bool {\n\n self.grid.has_car() && self.grid.has_goal()\n\n }\n\n}\n\n\n", "file_path": "car_simulation/src/frontend/run_simulation.rs", "rank": 62, "score": 18957.932595692222 }, { "content": "use grid::Grid;\n\nuse std::cmp::{max, min};\n\nuse std::time::Instant;\n", "file_path": "benchmarks/src/main.rs", "rank": 63, "score": 5.894982962416464 }, { "content": " # PRÁCTICA. ESTRATEGIAS DE BÚSQUEDA \n\nProfesores: Marcos Moreno Vega, Elena Sánchez Nielsen \n\n\n\nAlumno: Daniel del Castillo de la Rosa\n\n## Asignatura: Inteligencia Artificial \n\n## Curso: 2020/2021 \n\n## OBJETIVO \n\nEl objetivo del desarrollo de la actividad práctica es la utilización de estrategias de búsqueda como propuesta de resolución en la determinación de la planificación de trayectorias para coches autónomos. \n\n### DESCRIPCIÓN DEL PROBLEMA \n\nEl entorno del coche autónomo se puede suponer rectangular de dimensiones M x N y constituido por celdas libres y ocupadas, donde el coche puede efectuar acciones de movimiento, una cada vez, desde la casilla actual a una de las 4-vecinas (Norte, Sur, Este u Oeste) que no se encuentre ocupada. Las casillas ocupadas corresponden a obstáculos. Las casillas libres corresponden con celdas libres de obstáculos. El coche dispone de un vector de percepción, constituido por: \n\n\n\n* El estado de un sensor de proximidad (SN, SO, SS, SE) por cada una de las direcciones de movimiento, que detecta si el vecino correspondiente está ocupado por algún obstáculo (Si=1). \n\n\n\nEl desarrollo de la práctica, consiste en: \n\n\n\n1. **Diseño de un simulador de entorno**: se debe realizar un simulador de entorno que permita: (1) Definición de celdas de dimensiones variables M x N; (2) Definir de manera cómoda la posición de los obstáculos, de forma aleatoria y manual y (3) permitir una visualización de la trayectoria determinada. \n\n\n\n2. **Planificación del camino usando estrategias heurísticas**: Suponiendo un único coche autónomo según el escenario definido en el apartado anterior, se deberá determinar la trayectoria óptima partiendo desde una posición inicial hasta alcanzar una posición final. Ambas posiciones son definidas por el usuario, en el entorno de simulación desarrollado previamente.Se puede disponer de un mapa topológico que contenga la descripción del entorno. \n\n\n\n3. **Evaluación Experimental**: se evaluará los resultados obtenidos para la estrategia implementada utilizando como mínimo dos funciones heurísticas diferentes y tres tipos de tamaños diferentes de escenarios. \n", "file_path": "README.md", "rank": 64, "score": 5.275862159470835 } ]
Rust
src/parser/rfc3501/body.rs
filtsin/rimap
21954bdd1a848fe4a17e4180552ab4e58027a100
use super::{core::*, grammar::envelope}; use crate::parser::types::{ Body, BodyEnc, BodyFields, BodyTypeBasic, BodyTypeMsg, BodyTypeText, MediaBasic, MediaType, }; use nom::{ branch::alt, bytes::streaming::{tag, tag_no_case}, combinator::{map, value}, multi::separated_list1, sequence::{delimited, preceded, separated_pair, terminated, tuple}, IResult, }; pub(crate) fn body(i: &[u8]) -> IResult<&[u8], Body> { todo!() } pub(crate) fn body_type_1part(i: &[u8]) -> IResult<&[u8], Body<'_>> { todo!() } pub(crate) fn body_type_basic(i: &[u8]) -> IResult<&[u8], BodyTypeBasic<'_>> { map( separated_pair(media_basic, tag(" "), body_fields), |(media, fields)| BodyTypeBasic { media, fields }, )(i) } pub(crate) fn media_basic(i: &[u8]) -> IResult<&[u8], MediaBasic<'_>> { map( separated_pair( alt(( delimited( tag("\""), alt(( value(MediaType::Application, tag_no_case("APPLICATION")), value(MediaType::Audio, tag_no_case("AUDIO")), value(MediaType::Image, tag_no_case("IMAGE")), value(MediaType::Message, tag_no_case("MESSAGE")), value(MediaType::Video, tag_no_case("VIDEO")), )), tag("\""), ), map(string, MediaType::Custom), )), tag(" "), string, ), |(media_type, subtype)| MediaBasic { media_type, subtype, }, )(i) } pub(crate) fn body_fields(i: &[u8]) -> IResult<&[u8], BodyFields<'_>> { map( tuple(( body_fld_param, tag(" "), nstring, tag(" "), nstring, tag(" "), body_fld_enc, tag(" "), number, )), |(param, _, id, _, desc, _, enc, _, octets)| BodyFields { param, id, desc, enc, octets, }, )(i) } pub(crate) fn body_fld_param(i: &[u8]) -> IResult<&[u8], Option<Vec<(&str, &str)>>> { alt(( map( delimited( tag("("), separated_list1(tag(" "), separated_pair(string, tag(" "), string)), tag(")"), ), Some, ), nil, ))(i) } pub(crate) fn body_fld_enc(i: &[u8]) -> IResult<&[u8], BodyEnc<'_>> { alt(( delimited( tag("\""), alt(( value(BodyEnc::N7bit, tag_no_case("7BIT")), value(BodyEnc::N8bit, tag_no_case("8BIT")), value(BodyEnc::Binary, tag_no_case("BINARY")), value(BodyEnc::Base64, tag_no_case("BASE64")), value(BodyEnc::QuotedPrintable, tag_no_case("QUOTED-PRINTABLE")), )), tag("\""), ), map(string, BodyEnc::Custom), ))(i) } pub(crate) fn body_type_msg(i: &[u8]) -> IResult<&[u8], BodyTypeMsg<'_>> { map( tuple(( tag_no_case("\"MESSAGE\" \"RFC822\" "), body_fields, tag(" "), envelope, tag(" "), body, tag(" "), number, )), |(_, fields, _, envelope, _, body, _, lines)| BodyTypeMsg { fields, envelope, body: Box::new(body), lines, }, )(i) } pub(crate) fn body_type_text(i: &[u8]) -> IResult<&[u8], BodyTypeText<'_>> { map( tuple(( tag_no_case("\"TEXT\" "), string, tag(" "), body_fields, tag(" "), number, )), |(_, subtype, _, fields, _, lines)| BodyTypeText { subtype, fields, lines, }, )(i) } pub(crate) fn body_ext_1part(i: &[u8]) -> IResult<&[u8], ()> { todo!() }
use super::{core::*, grammar::envelope}; use crate::parser::types::{ Body, BodyEnc, BodyFields, BodyTypeBasic, BodyTypeMsg, BodyTypeText, MediaBasic, MediaType, }; use nom::{ branch::alt, bytes::streaming::{tag, tag_no_case}, combinator::{map, value}, multi::separated_list1, sequence::{delimited, preceded, separated_pair, terminated, tuple}, IResult, }; pub(crate) fn body(i: &[u8]) -> IResult<&[u8], Body> { todo!() } pub(crate) fn body_type_1part(i: &[u8]) -> IResult<&[u8], Body<'_>> { todo!() } pub(crate) fn body_type_basic(i: &[u8]) -> IResult<&[u8], BodyTypeBasic<'_>> { map( separated_pair(media_basic, tag(" "), body_fields), |(media, fields)| BodyTypeBasic { media, fields }, )(i) } pub(crate) fn media_basic(i: &[u8]) -> IResult<&[u8], MediaBasic<'_>> { map( separated_pair( alt(( delimited( tag("\""), alt(( value(MediaType::Application, tag_no_case("APPLICATION")), value(MediaType::Audio, tag_no_case("AUDIO")), value(MediaType::Image, tag_no_case("IMAGE")), value(MediaType::Message, tag_no_case("MESSAGE")), value(MediaType::Video, tag_no_case("VIDEO")), )), tag("\""), ), map(string, MediaType::Custom), )), tag(" "), string, ), |(media_type, subtype)| MediaBasic { media_type, subtype, }, )(i) } pub(crate) fn body_fields(i: &[u8]) ->
body: Box::new(body), lines, }, )(i) } pub(crate) fn body_type_text(i: &[u8]) -> IResult<&[u8], BodyTypeText<'_>> { map( tuple(( tag_no_case("\"TEXT\" "), string, tag(" "), body_fields, tag(" "), number, )), |(_, subtype, _, fields, _, lines)| BodyTypeText { subtype, fields, lines, }, )(i) } pub(crate) fn body_ext_1part(i: &[u8]) -> IResult<&[u8], ()> { todo!() }
IResult<&[u8], BodyFields<'_>> { map( tuple(( body_fld_param, tag(" "), nstring, tag(" "), nstring, tag(" "), body_fld_enc, tag(" "), number, )), |(param, _, id, _, desc, _, enc, _, octets)| BodyFields { param, id, desc, enc, octets, }, )(i) } pub(crate) fn body_fld_param(i: &[u8]) -> IResult<&[u8], Option<Vec<(&str, &str)>>> { alt(( map( delimited( tag("("), separated_list1(tag(" "), separated_pair(string, tag(" "), string)), tag(")"), ), Some, ), nil, ))(i) } pub(crate) fn body_fld_enc(i: &[u8]) -> IResult<&[u8], BodyEnc<'_>> { alt(( delimited( tag("\""), alt(( value(BodyEnc::N7bit, tag_no_case("7BIT")), value(BodyEnc::N8bit, tag_no_case("8BIT")), value(BodyEnc::Binary, tag_no_case("BINARY")), value(BodyEnc::Base64, tag_no_case("BASE64")), value(BodyEnc::QuotedPrintable, tag_no_case("QUOTED-PRINTABLE")), )), tag("\""), ), map(string, BodyEnc::Custom), ))(i) } pub(crate) fn body_type_msg(i: &[u8]) -> IResult<&[u8], BodyTypeMsg<'_>> { map( tuple(( tag_no_case("\"MESSAGE\" \"RFC822\" "), body_fields, tag(" "), envelope, tag(" "), body, tag(" "), number, )), |(_, fields, _, envelope, _, body, _, lines)| BodyTypeMsg { fields, envelope,
random
[ { "content": "fn vec_to_string(v: &Vec<u8>) -> String {\n\n std::string::String::from_utf8_lossy(&v[..]).into_owned()\n\n}\n", "file_path": "src/error.rs", "rank": 0, "score": 90814.00460926183 }, { "content": "pub fn create_custom_error(msg: String) -> Error {\n\n Error::Custom(msg)\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 1, "score": 51457.04885232965 }, { "content": "// CHAR8 = %x01-ff;\n\n// any octet except 0\n\nfn is_char8(i: u8) -> bool {\n\n i != 0x00\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 2, "score": 51435.4882609334 }, { "content": "// SP = %x20;\n\n// space\n\nfn is_space(i: u8) -> bool {\n\n i == b' '\n\n}\n\n\n\n// rfc 3501\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 3, "score": 51435.4882609334 }, { "content": "// CHAR = %x01-7F;\n\n// any 7-bit US-ASCII character excluding 0\n\nfn is_char(i: u8) -> bool {\n\n (0x01..=0x7f).contains(&i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 4, "score": 51435.4882609334 }, { "content": "// CTL = %x00-1F;\n\n// controls\n\nfn is_ctl(i: u8) -> bool {\n\n (0x00..=0x1f).contains(&i) || i == 0x7f\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 5, "score": 51435.4882609334 }, { "content": "// resp-specials = ']'\n\nfn is_resp_specials(i: u8) -> bool {\n\n i == b']'\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 6, "score": 49640.334941377885 }, { "content": "// TEXT-CHAR = <any CHAR except CR and LF>\n\nfn is_text_char(i: u8) -> bool {\n\n is_char(i) && i != b'\\r' && i != b'\\n'\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 7, "score": 49640.334941377885 }, { "content": "// quoted-specials = '\"' | '\\'\n\nfn is_quoted_specials(i: u8) -> bool {\n\n i == b'\"' || i == b'\\\\'\n\n}\n\n\n\n// TODO: incorrect 2nd or: '\\' quoted-specials\n\n// QUOTED-CHAR = <any TEXT-CHAR except quoted-specials> | quoted-specials\n\npub(crate) fn is_quoted_char(i: u8) -> bool {\n\n is_quoted_specials(i) || is_text_char(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 8, "score": 49640.334941377885 }, { "content": "// list-wildcards = '%' | '*'\n\nfn is_list_wildcards(i: u8) -> bool {\n\n i == b'%' || i == b'*'\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 9, "score": 49640.334941377885 }, { "content": "// atom-specials = '(' | ')' | '{' | SP | CTL | list-wildcards | quoted-specials | resp-specials\n\nfn is_atom_specials(i: u8) -> bool {\n\n i == b')'\n\n || i == b')'\n\n || i == b'{'\n\n || is_space(i)\n\n || is_ctl(i)\n\n || is_list_wildcards(i)\n\n || is_quoted_specials(i)\n\n || is_resp_specials(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 10, "score": 49640.334941377885 }, { "content": "// ATOM-CHAR = <any CHAR except atom-specials>\n\nfn is_atom_char(i: u8) -> bool {\n\n !is_atom_specials(i) && is_char(i)\n\n}\n\n\n\n// ASTRING-CHAR = ATOM-CHAR | resp-specials\n\npub(crate) fn is_astring_char(i: u8) -> bool {\n\n is_atom_char(i) || is_resp_specials(i)\n\n}\n\n\n\n// nil = 'NIL'\n\npub(crate) fn nil<T>(i: &[u8]) -> IResult<&[u8], Option<T>> {\n\n map(tag_no_case(\"NIL\"), |_| None)(i)\n\n}\n\n\n\n// astring = 1*ASTRING-CHAR | string\n\npub(crate) fn astring(i: &[u8]) -> IResult<&[u8], &str> {\n\n alt((\n\n string,\n\n map_res(take_while1(is_astring_char), std::str::from_utf8),\n\n ))(i)\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 11, "score": 49640.334941377885 }, { "content": "// base64-char = ALPHA | DIGIT | '+' | '/'\n\nfn is_base64_char(i: u8) -> bool {\n\n is_alphanumeric(i) || i == b'+' || i == b'/'\n\n}\n\n\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 12, "score": 49640.334941377885 }, { "content": "#[test]\n\nfn parse_tagged_answer() {\n\n let response = b\"a0017 OK CAPABILITY completed\\r\\n\";\n\n\n\n let result = parse(response).unwrap();\n\n\n\n let tagged_response = ImapResponse::Response(TaggedResponse {\n\n tag: Tag::new('a', 17),\n\n resp: RespCond {\n\n status: ImapResult::Ok,\n\n text: resp_text(\"CAPABILITY completed\"),\n\n },\n\n });\n\n\n\n assert_eq(result, tagged_response);\n\n}\n\n\n", "file_path": "src/parser/tests/mod.rs", "rank": 13, "score": 44198.3539516174 }, { "content": "fn assert_eq((remainder, result): (&[u8], ImapResponse<'_>), target: ImapResponse<'_>) {\n\n assert_eq!(remainder.len(), 0);\n\n assert_eq!(result, target);\n\n}\n\n\n", "file_path": "src/parser/tests/mod.rs", "rank": 14, "score": 38050.86559681582 }, { "content": "//! Imap `tag` declaration\n\n//!\n\n\n\nuse std::{fmt::Display, iter::Iterator};\n\n\n\n/// `Tag` with prefixed letter\n\n/// # Example\n\n/// a1, c1001\n\n#[derive(Eq, PartialEq, Debug, Hash)]\n\npub(crate) struct Tag {\n\n prefix: char,\n\n index: u32,\n\n}\n\n\n\nimpl Tag {\n\n /// Construct new `Tag`\n\n pub(crate) fn new(prefix: char, index: u32) -> Self {\n\n Self { prefix, index }\n\n }\n\n}\n", "file_path": "src/tag.rs", "rank": 15, "score": 26659.397993258604 }, { "content": "\n\nimpl TagGenerator {\n\n /// Construct new TagGenerator\n\n pub fn new(prefix: char, index: u32) -> Self {\n\n TagGenerator { prefix, index }\n\n }\n\n}\n\n\n\nimpl Default for TagGenerator {\n\n fn default() -> Self {\n\n TagGenerator::new('a', 0)\n\n }\n\n}\n\n\n\nimpl Iterator for TagGenerator {\n\n type Item = Tag;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let result = Tag::new(self.prefix, self.index);\n\n self.index += 1;\n\n Some(result)\n\n }\n\n}\n", "file_path": "src/tag.rs", "rank": 16, "score": 26658.079722632723 }, { "content": "\n\nimpl Default for Tag {\n\n fn default() -> Self {\n\n Tag::new('a', 0)\n\n }\n\n}\n\n\n\nimpl Display for Tag {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}{}\", self.prefix, self.index)\n\n }\n\n}\n\n\n\n/// Each client command is prefixed with an identifer and a different tag is\n\n/// generated by the client for each command. Every call `next` method will\n\n/// return an unique `tag` for the command.\n\npub(crate) struct TagGenerator {\n\n prefix: char,\n\n index: u32,\n\n}\n", "file_path": "src/tag.rs", "rank": 17, "score": 26657.89364130878 }, { "content": "#[test]\n\nfn parse_greeting() {\n\n let response = b\"* OK IMAP4rev1 Service Ready\\r\\n\";\n\n\n\n let result = parse(response).unwrap();\n\n\n\n let greeting = ImapResponse::Greeting(Greeting {\n\n status: GreetingStatus::Ok(resp_text(\"IMAP4rev1 Service Ready\")),\n\n });\n\n\n\n assert_eq(result, greeting);\n\n}\n\n\n", "file_path": "src/parser/tests/mod.rs", "rank": 18, "score": 24858.14225189695 }, { "content": "#[test]\n\nfn parse_continue_req() {\n\n let response = b\"+ Ready\\r\\n\";\n\n\n\n let result = parse(response).unwrap();\n\n\n\n let continue_req = ImapResponse::Continue(ContinueReq::Text(resp_text(\"Ready\")));\n\n\n\n assert_eq(result, continue_req);\n\n}\n", "file_path": "src/parser/tests/mod.rs", "rank": 19, "score": 23877.14166816756 }, { "content": "fn resp_text(s: &str) -> RespText<'_> {\n\n RespText {\n\n code: vec![],\n\n text: s,\n\n }\n\n}\n\n\n", "file_path": "src/parser/tests/mod.rs", "rank": 29, "score": 20865.059733839662 }, { "content": " alt((map(string, Some), nil))(i)\n\n}\n\n\n\n// base64-terminal = (2base64-char '==') | (3base64-char '=')\n\npub(crate) fn base64_terminal(i: &[u8]) -> IResult<&[u8], &str> {\n\n map_res(\n\n length_data(many1_count(peek(alt((\n\n tuple((take_while_m_n(2, 2, is_base64_char), tag(\"==\"))),\n\n tuple((take_while_m_n(3, 3, is_base64_char), tag(\"=\"))),\n\n ))))),\n\n std::str::from_utf8,\n\n )(i)\n\n}\n\n\n\n// base64 = *(4base64_char) [base64_terminal]\n\npub(crate) fn base64(i: &[u8]) -> IResult<&[u8], &str> {\n\n // ODO: Check it\n\n map_res(\n\n length_data(many1_count(peek(tuple((\n\n take_while_m_n(4, 4, is_base64_char),\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 30, "score": 14.353696146607424 }, { "content": "//! IMAP core types\n\n\n\nuse std::{fmt::Debug, str::FromStr};\n\n\n\nuse nom::{\n\n branch::alt,\n\n bytes::streaming::{tag, tag_no_case, take_while, take_while1, take_while_m_n},\n\n character::is_alphanumeric,\n\n character::{\n\n is_digit,\n\n streaming::{crlf, u32},\n\n },\n\n combinator::{map, map_res, not, opt, peek, value},\n\n multi::{length_data, many1_count},\n\n sequence::{delimited, tuple},\n\n IResult,\n\n};\n\n\n\n// strings\n\n\n\n// CORE rules from rfc 5234\n\n\n\n// CHAR = %x01-7F;\n\n// any 7-bit US-ASCII character excluding 0\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 31, "score": 14.046736221762322 }, { "content": "pub(crate) mod body;\n\npub(crate) mod core;\n\npub(crate) mod grammar;\n\n\n\nuse self::core::base64;\n\nuse crate::parser::types::{\n\n ContinueReq, Greeting, GreetingStatus, TaggedResponse, UntaggedResponse,\n\n};\n\nuse grammar::{imap_tag, mailbox_data, resp_cond_auth, resp_cond_bye, resp_cond_state, resp_text};\n\nuse nom::{\n\n branch::alt,\n\n bytes::streaming::tag,\n\n character::streaming::crlf,\n\n combinator::map,\n\n sequence::{delimited, tuple},\n\n IResult,\n\n};\n\n\n\n// greeting = \"*\" SP (resp_cond_auth | resp_cond_bye) CRLF\n\npub(crate) fn greeting(i: &[u8]) -> IResult<&[u8], Greeting<'_>> {\n", "file_path": "src/parser/rfc3501/mod.rs", "rank": 32, "score": 12.799385372668649 }, { "content": "//! IMAP grammar [rfc3501]\n\n\n\nuse std::char::from_u32;\n\n\n\nuse super::core::*;\n\nuse crate::parser::types::{\n\n Address, ByeResponse, Capability, DateTime, Envelope, Flag, ImapResult, ListFlag, ListMailBox,\n\n MailBoxData, MessageData, Month, MsgAtt, MsgFlag, RespCond, RespText, RespTextCode, StatusInfo,\n\n StatusResponse, Time,\n\n};\n\nuse crate::tag::Tag;\n\nuse nom::{\n\n branch::alt,\n\n bytes::streaming::{tag, tag_no_case, take_while_m_n},\n\n combinator::{map, map_res, opt, value},\n\n multi::{many0, many1, separated_list0, separated_list1},\n\n sequence::{delimited, preceded, separated_pair, terminated, tuple},\n\n IResult,\n\n};\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 33, "score": 12.66106704577743 }, { "content": "//! IMAP parser implementation\n\n\n\nmod rfc3501;\n\n#[cfg(test)]\n\nmod tests;\n\nmod types;\n\n\n\nuse rfc3501::{continue_req, greeting, response_tagged};\n\nuse types::ImapResponse;\n\n\n\nuse nom::{branch::alt, combinator::map, IResult};\n\n\n\npub(crate) fn parse(i: &[u8]) -> IResult<&[u8], ImapResponse<'_>> {\n\n alt((\n\n map(greeting, |v| ImapResponse::Greeting(v)),\n\n map(continue_req, |v| ImapResponse::Continue(v)),\n\n map(response_tagged, |v| ImapResponse::Response(v)),\n\n ))(i)\n\n}\n", "file_path": "src/parser/mod.rs", "rank": 34, "score": 12.512161343474142 }, { "content": "//mailbox-list = '(' [mbx-list-flags] ')' SP (DQUOTE QUOTED-CHAR DQUOTE | nil) SP mailbox\n\npub(crate) fn mailbox_list(i: &[u8]) -> IResult<&[u8], ListMailBox<'_>> {\n\n map(\n\n tuple((\n\n delimited(tag(\"(\"), opt(mbx_list_flags), tag(\")\")),\n\n tag(\" \"),\n\n alt((\n\n map(\n\n delimited(tag(\"\\\"\"), take_while_m_n(1, 1, is_quoted_char), tag(\"\\\"\")),\n\n Some,\n\n ),\n\n nil,\n\n )),\n\n tag(\" \"),\n\n mailbox,\n\n )),\n\n |(flags, _, delimiter, _, name)| ListMailBox {\n\n flags: match flags {\n\n Some(v) => v,\n\n None => vec![],\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 35, "score": 11.747442902371848 }, { "content": " map(capability_data, RespTextCode::Capability)(i)\n\n}\n\n\n\n// 'PARSE'\n\npub(crate) fn rtc_parse(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(tag_no_case(\"PARSE\"), |_| RespTextCode::Parse)(i)\n\n}\n\n\n\n// 'PERMANENTFLAGS' SP '(' [flag-perm *(SP flag-perm)] ')'\n\npub(crate) fn rtc_permanent_flags(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(\n\n delimited(\n\n tag_no_case(\"PERMANENTFLAGS (\"),\n\n // TODO: Use nom::multi::separated_list0 for parsing lists\n\n separated_list0(tag(\" \"), flag_perm),\n\n tag(\")\"),\n\n ),\n\n RespTextCode::PermanentFlags,\n\n )(i)\n\n}\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 36, "score": 11.665598818009094 }, { "content": "}\n\n\n\n// quoted = DQUOTE *QUOTED-CHAR DQUOTE;\n\n// quoted text\n\npub(crate) fn quoted(i: &[u8]) -> IResult<&[u8], &str> {\n\n map_res(\n\n delimited(tag(\"\\\"\"), take_while(is_quoted_char), tag(\"\\\"\")),\n\n std::str::from_utf8,\n\n )(i)\n\n}\n\n\n\n// string = quoted | literal\n\n//\n\npub(crate) fn string(i: &[u8]) -> IResult<&[u8], &str> {\n\n alt((quoted, literal))(i)\n\n}\n\n\n\n// nstring = string | nil\n\n// nil = 'NIL'\n\npub(crate) fn nstring(i: &[u8]) -> IResult<&[u8], Option<&str>> {\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 37, "score": 11.594160759107721 }, { "content": " map(\n\n terminated(number, tag_no_case(\" EXISTS\")),\n\n MailBoxData::Exists,\n\n ),\n\n map(\n\n terminated(number, tag_no_case(\" RECENT\")),\n\n MailBoxData::Recent,\n\n ),\n\n ))(i)\n\n}\n\n\n\n//flag-list = '(' [flag *(SP flag)] ')'\n\npub(crate) fn flag_list(i: &[u8]) -> IResult<&[u8], Vec<Flag<'_>>> {\n\n delimited(\n\n tag(\"(\"),\n\n separated_list0(tag(\" \"), map(flag, Flag::from)),\n\n tag(\")\"),\n\n )(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 38, "score": 11.29292214372099 }, { "content": "pub(crate) fn continue_req(i: &[u8]) -> IResult<&[u8], ContinueReq<'_>> {\n\n delimited(\n\n tag(\"+ \"),\n\n alt((\n\n map(resp_text, ContinueReq::Text),\n\n map(base64, ContinueReq::Base64),\n\n )),\n\n crlf,\n\n )(i)\n\n}\n\n\n\n// response-tagged = tag SP resp-cond-state CRLF\n\npub(crate) fn response_tagged(i: &[u8]) -> IResult<&[u8], TaggedResponse<'_>> {\n\n map(\n\n tuple((imap_tag, tag(\" \"), resp_cond_state, crlf)),\n\n |(tag, _, resp, _)| TaggedResponse { tag, resp },\n\n )(i)\n\n}\n\n\n\n//response-data = '*' SP (resp-cond-state | resp-cond-bye | mailbox-data |\n", "file_path": "src/parser/rfc3501/mod.rs", "rank": 39, "score": 11.086139566598682 }, { "content": "\n\n// zone = ('+' | '-') 4DIGIT\n\npub(crate) fn zone(i: &[u8]) -> IResult<&[u8], i16> {\n\n map(\n\n tuple((alt((tag(\"+\"), tag(\"-\"))), fixed_num(4))),\n\n |(sign, mut value): (_, i16)| {\n\n if sign == b\"-\" {\n\n value *= -1;\n\n }\n\n\n\n value\n\n },\n\n )(i)\n\n}\n\n\n\n//mailbox-data = 'FLAGS' SP flag-list | 'LIST' SP mailbox-list | 'LSUB' SP mailbox-list |\n\n// 'SEARCH' *(SP nz-number) | 'STATUS' SP mailbox SP '(' [status-att-list] ')' |\n\n// number SP 'EXISTS' | number SP 'RECENT'\n\npub(crate) fn mailbox_data(i: &[u8]) -> IResult<&[u8], MailBoxData<'_>> {\n\n alt((\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 40, "score": 10.785594671571086 }, { "content": "// auth-type = atom\n\npub(crate) fn auth_type(i: &[u8]) -> IResult<&[u8], &str> {\n\n // TODO: Create enum for auth type\n\n atom(i)\n\n}\n\n\n\n// capability-data = \"CAPABILITY\" *(SP CAPABILITY) SP \"IMAP4rev1\" *(SP capability)\n\npub(crate) fn capability_data(i: &[u8]) -> IResult<&[u8], Vec<Capability<'_>>> {\n\n // Grammar is not exactly as in rfc3501.\n\n // Just take all capabilities delimited by space\n\n // hoping that IMAP4rev1 is present\n\n map(\n\n tuple((\n\n tag_no_case(\"CAPABILITY\"),\n\n many1(map(tuple((tag(\" \"), capability)), |(_, data)| data)),\n\n )),\n\n |(_, capability)| capability,\n\n )(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 41, "score": 10.744016142163256 }, { "content": " ))(i)\n\n}\n\n\n\n// resp-text = [ \"[\" resp-text-code \"]\" SP ] text\n\npub(crate) fn resp_text(i: &[u8]) -> IResult<&[u8], RespText<'_>> {\n\n map(\n\n tuple((many0(delimited(tag(\"[\"), resp_text_code, tag(\"] \"))), text)),\n\n |(code, text)| RespText { code, text },\n\n )(i)\n\n}\n\n\n\n// resp-cond-auth = (\"OK\" | \"PREAUTH\") SP resp-text;\n\n// Authentication condition\n\npub(crate) fn resp_cond_auth(i: &[u8]) -> IResult<&[u8], (&str, RespText<'_>)> {\n\n map(\n\n separated_pair(\n\n alt((tag_no_case(\"OK\"), tag_no_case(\"PREAUTH\"))),\n\n tag(\" \"),\n\n resp_text,\n\n ),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 42, "score": 10.265462414935055 }, { "content": "// Status condition\n\npub(crate) fn resp_cond_state(i: &[u8]) -> IResult<&[u8], RespCond<'_>> {\n\n map(\n\n separated_pair(\n\n alt((\n\n value(ImapResult::Ok, tag_no_case(\"OK\")),\n\n value(ImapResult::No, tag_no_case(\"NO\")),\n\n value(ImapResult::Bad, tag_no_case(\"BAD\")),\n\n )),\n\n tag(\" \"),\n\n resp_text,\n\n ),\n\n |(status, text)| RespCond { status, text },\n\n )(i)\n\n}\n\n\n\n// flag-keyword = atom\n\npub(crate) fn flag_keyword(i: &[u8]) -> IResult<&[u8], &str> {\n\n atom(i)\n\n}\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 43, "score": 10.077958772237618 }, { "content": "}\n\n\n\n// flag-fetch = flag | '\\Recent'\n\npub(crate) fn flag_fetch(i: &[u8]) -> IResult<&[u8], MsgFlag<'_>> {\n\n alt((\n\n map(flag, |v| MsgFlag::Common(Flag::from(v))),\n\n map(tag_no_case(\"\\\\Recent\"), |_| MsgFlag::Recent),\n\n ))(i)\n\n}\n\n\n\n// msg-att-static = 'ENVELOPE' SP envelope | \"INTERNALDATE\" SP date-time |\n\n// 'RFC822' ['.HEADER' | '.TEXT'] SP nstring |\n\n// 'RFC822.SIZE' SP number | 'BODY' ['STRUCTURE'] SP body |\n\n// 'BODY' section ['<' number '>'] SP nstring |\n\n// 'UID' SP uniqueid\n\n// ; Must not change for a message\n\npub(crate) fn msg_att_static(i: &[u8]) -> IResult<&[u8], MsgAtt<'_>> {\n\n alt((\n\n map(\n\n preceded(tag_no_case(\"ENVELOPE \"), envelope),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 44, "score": 9.743547309966667 }, { "content": "\n\n// flag-extension = '\\' atom;\n\n// Future expansion\n\npub(crate) fn flag_extension(i: &[u8]) -> IResult<&[u8], &str> {\n\n // TODO: incorrect removing suffix '\\'\n\n map(tuple((tag(\"\\\\\"), atom)), |(_, result)| result)(i)\n\n}\n\n\n\n// TODO: Change return type because Flag must not contain Perm\n\n// flag-perm = flag | '\\*'\n\npub(crate) fn flag_perm(i: &[u8]) -> IResult<&[u8], Flag<'_>> {\n\n map(flag, Flag::from)(i)\n\n}\n\n\n\n// flag = '\\Answered' | '\\Flagged' | '\\Deleted' | '\\Seen' | '\\Draft' | flag_keyword | flag_extension\n\npub(crate) fn flag(i: &[u8]) -> IResult<&[u8], &str> {\n\n // \\Answered, \\Flagged are handle by flag_extension parser\n\n alt((flag_extension, flag_keyword))(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 45, "score": 9.623681244970047 }, { "content": "// date-day-fixed = (SP DIGIT) | 2DIGIT\n\npub(crate) fn date_day_fixed(i: &[u8]) -> IResult<&[u8], u8> {\n\n alt((preceded(tag(\" \"), fixed_num(1)), fixed_num(2)))(i)\n\n}\n\n\n\n// date-month = 'Jan' | 'Feb' | 'Mar' | 'Apr' | 'May' | 'Jun' | 'Jul' | 'Aug' |\n\n// 'Sep' | 'Oct' | 'Nov' | 'Dec'\n\npub(crate) fn date_month(i: &[u8]) -> IResult<&[u8], Month> {\n\n alt((\n\n value(Month::Jan, tag_no_case(\"Jan\")),\n\n value(Month::Feb, tag_no_case(\"Feb\")),\n\n value(Month::Mar, tag_no_case(\"Mar\")),\n\n value(Month::Apr, tag_no_case(\"Apr\")),\n\n value(Month::May, tag_no_case(\"May\")),\n\n value(Month::Jun, tag_no_case(\"Jun\")),\n\n value(Month::Jul, tag_no_case(\"Jul\")),\n\n value(Month::Aug, tag_no_case(\"Aug\")),\n\n value(Month::Sep, tag_no_case(\"Sep\")),\n\n value(Month::Oct, tag_no_case(\"Oct\")),\n\n value(Month::Nov, tag_no_case(\"Nov\")),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 46, "score": 9.611526596130524 }, { "content": "// capability = ('AUTH=' auth-type) | atom;\n\npub(crate) fn capability(i: &[u8]) -> IResult<&[u8], Capability<'_>> {\n\n let auth_parser = map(\n\n tuple((tag_no_case(\"AUTH=\"), auth_type)),\n\n |(_, auth_type)| auth_type,\n\n );\n\n\n\n alt((\n\n map(auth_parser, Capability::Auth),\n\n map(atom, Capability::Other),\n\n ))(i)\n\n}\n\n\n\n// resp-text-code branches\n\n\n\n// 'ALERT'\n\npub(crate) fn rtc_alert(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(tag_no_case(\"ALERT\"), |_| RespTextCode::Alert)(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 47, "score": 9.549120394524223 }, { "content": " value(Month::Dec, tag_no_case(\"Dec\")),\n\n ))(i)\n\n}\n\n\n\n// date-year = 4DIGIT\n\npub(crate) fn date_year(i: &[u8]) -> IResult<&[u8], u16> {\n\n fixed_num(4)(i)\n\n}\n\n\n\n// time = 2DIGIT ':' 2DIGIT ':' 2DIGIT\n\npub(crate) fn time(i: &[u8]) -> IResult<&[u8], Time> {\n\n map(\n\n tuple((fixed_num(2), tag(\":\"), fixed_num(2), tag(\":\"), fixed_num(2))),\n\n |(hours, _, minutes, _, seconds)| Time {\n\n hours,\n\n minutes,\n\n seconds,\n\n },\n\n )(i)\n\n}\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 48, "score": 9.345305891641502 }, { "content": "// message-data | capability-data) CRLF\n\npub(crate) fn response_data(i: &[u8]) -> IResult<&[u8], UntaggedResponse<'_>> {\n\n delimited(\n\n tag(\"* \"),\n\n alt((\n\n map(resp_cond_state, UntaggedResponse::RespCond),\n\n map(resp_cond_bye, UntaggedResponse::RespBye),\n\n map(mailbox_data, UntaggedResponse::MailBox),\n\n )),\n\n crlf,\n\n )(i)\n\n}\n", "file_path": "src/parser/rfc3501/mod.rs", "rank": 49, "score": 9.239328833473088 }, { "content": " )),\n\n tag(\")\"),\n\n ),\n\n |(name, _, adl, _, mailbox, _, host)| Address {\n\n name,\n\n adl,\n\n mailbox,\n\n host,\n\n },\n\n )(i)\n\n}\n\n\n\n// date-time = DQUOTE date-day-fixed '-' date-month '-' date-year SP time SP zone DQUOTE\n\npub(crate) fn date_time(i: &[u8]) -> IResult<&[u8], DateTime> {\n\n map(\n\n delimited(\n\n tag(\"\\\"\"),\n\n tuple((\n\n date_day_fixed,\n\n tag(\"-\"),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 50, "score": 9.091962249782393 }, { "content": " },\n\n // SAFETY: is_quoted_char is valid ascii character, so it is valid utf-8\n\n delimiter: unsafe { delimiter.map(|v| std::str::from_utf8_unchecked(v)) },\n\n name,\n\n },\n\n )(i)\n\n}\n\n\n\n// *(SP nz-number)\n\npub(crate) fn mailbox_data_search(i: &[u8]) -> IResult<&[u8], Vec<u32>> {\n\n separated_list1(tag(\" \"), nz_number)(i)\n\n}\n\n\n\n// mailbox SP '(' [status-att-list] ')'\n\npub(crate) fn mailbox_data_status(i: &[u8]) -> IResult<&[u8], StatusResponse<'_>> {\n\n map(\n\n separated_pair(\n\n mailbox,\n\n tag(\" \"),\n\n delimited(tag(\"(\"), opt(status_att_list), tag(\")\")),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 51, "score": 8.957451511144 }, { "content": "// 'BADCHARSET' [SP '(' astring *(SP astring) ')']\n\npub(crate) fn rtc_bad_charset(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(\n\n preceded(\n\n tag_no_case(\"BADCHARSET\"),\n\n opt(delimited(\n\n tag(\" (\"),\n\n many1(terminated(astring, opt(tag(\" \")))),\n\n tag(\")\"),\n\n )),\n\n ),\n\n |v| match v {\n\n Some(v) => RespTextCode::BadCharset(v),\n\n None => RespTextCode::BadCharset(vec![]),\n\n },\n\n )(i)\n\n}\n\n\n\n// capability-data\n\npub(crate) fn rtc_capability_data(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 52, "score": 8.926121260113115 }, { "content": " to,\n\n cc,\n\n bcc,\n\n in_reply_to,\n\n message_id,\n\n },\n\n )(i)\n\n}\n\n\n\n// '(' 1*address ')' | nil\n\npub(crate) fn envelope_addr(i: &[u8]) -> IResult<&[u8], Option<Vec<Address<'_>>>> {\n\n alt((\n\n map(delimited(tag(\"(\"), many1(address), tag(\")\")), Some),\n\n nil,\n\n ))(i)\n\n}\n\n\n\n// address = '(' addr-name SP addr-adl SP addr-mailbox SP addr-host ')'\n\n//\n\n// addr-name = nstring\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 53, "score": 8.907717994742079 }, { "content": " ListFlag::from(s)\n\n },\n\n )(i)\n\n}\n\n\n\n// tag(rfc) = 1*<any ASTRING-CHAR except '+'>\n\n// tag(this) = ASTRING-CHAR number\n\n// We use our own (tag)[tag::Tag] definition of tag\n\n// with one prefix letter and u32 id\n\npub(crate) fn imap_tag(i: &[u8]) -> IResult<&[u8], Tag> {\n\n map(\n\n tuple((take_while_m_n(1, 1, is_astring_char), number)),\n\n |(letter, index)| {\n\n let prefix = from_u32(letter[0] as u32).unwrap();\n\n Tag::new(prefix, index)\n\n },\n\n )(i)\n\n}\n\n\n\n// resp-cond-state = (\"OK\" | \"NO\" | \"BAD\") SP resp_text;\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 54, "score": 8.889719544080991 }, { "content": "//message-data = nz-number SP (\"EXPUNGE\" | (\"FETCH\" SP msg-att))\n\npub(crate) fn message_data(i: &[u8]) -> IResult<&[u8], MessageData<'_>> {\n\n todo!()\n\n}\n\n\n\n// msg_att = '(' (msg-att-dynamic | msg-att-static) *(SP (msg-att-dynamic | msg-att-static)) ')'\n\npub(crate) fn msg_att(i: &[u8]) -> IResult<&[u8], MsgAtt<'_>> {\n\n todo!()\n\n}\n\n\n\n// msg-att-dynamic = 'FLAGS' SP '(' [flag-fetch *(SP flag-fetch)] ')'\n\n// ; May change for a message\n\npub(crate) fn msg_att_dynamic(i: &[u8]) -> IResult<&[u8], MsgAtt<'_>> {\n\n map(\n\n preceded(\n\n tag_no_case(\"FLAGS \"),\n\n delimited(tag(\"(\"), separated_list1(tag(\" \"), flag_fetch), tag(\")\")),\n\n ),\n\n MsgAtt::Flags,\n\n )(i)\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 55, "score": 8.373020674484083 }, { "content": " alt((\n\n map_res(tag_no_case(\"\\\\Noinferiors\"), std::str::from_utf8),\n\n flag_extension,\n\n )),\n\n ListFlag::from,\n\n )(i)\n\n}\n\n\n\n//mbx-list-sflag = '\\Noselect' | '\\Marked' | '\\Unmarked'\n\n// Selectability flags; only one per LIST response\n\npub(crate) fn mbx_list_sflag(i: &[u8]) -> IResult<&[u8], ListFlag<'_>> {\n\n map(\n\n alt((\n\n tag_no_case(\"\\\\Noselect\"),\n\n tag_no_case(\"\\\\Marked\"),\n\n tag_no_case(\"\\\\Unmarked\"),\n\n )),\n\n |flag| {\n\n // SAFETY: flag is \\Noselect, \\Marked or \\Unmarked ASCII texts, so it is valid UTF-8\n\n let s = unsafe { std::str::from_utf8_unchecked(flag) };\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 56, "score": 7.833111215328197 }, { "content": "// ; If non-NIL holds pharse from [RFC-2822] mailbox after removing quoting\n\n// addr-adl = nstring\n\n// ; Holds route from route-addr if non-NIL\n\n// addr-mailbox = nstring\n\n// ; NIL indicates end of group; if non-NIL and addr-host is NIL, holds group name\n\n// ; Otherwise, holds local-part after removing quoting\n\n// addr-host = nstring\n\n// ; NIL indicates group syntax. Otherwise, holds domain name\n\npub(crate) fn address(i: &[u8]) -> IResult<&[u8], Address<'_>> {\n\n map(\n\n delimited(\n\n tag(\"(\"),\n\n tuple((\n\n nstring,\n\n tag(\" \"),\n\n nstring,\n\n tag(\" \"),\n\n nstring,\n\n tag(\" \"),\n\n nstring,\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 57, "score": 7.558531480954739 }, { "content": "}\n\n\n\n// atom = 1*ATOM-CHAR\n\npub(crate) fn atom(i: &[u8]) -> IResult<&[u8], &str> {\n\n map_res(take_while1(is_atom_char), std::str::from_utf8)(i)\n\n}\n\n\n\n// literal = \"{\" number \"}\" CRLF *CHAR8;\n\n// number represents the number of CHAR8s\n\npub(crate) fn literal(i: &[u8]) -> IResult<&[u8], &str> {\n\n let (i, (_, count, _, _)) = tuple((tag(\"{\"), number, tag(\"}\"), crlf))(i)?;\n\n let parser = take_while_m_n(count as usize, count as usize, is_char8);\n\n\n\n map_res(parser, std::str::from_utf8)(i)\n\n}\n\n\n\n// text = 1*TEXT-CHAR\n\n//\n\npub(crate) fn text(i: &[u8]) -> IResult<&[u8], &str> {\n\n map_res(take_while1(is_text_char), std::str::from_utf8)(i)\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 58, "score": 7.481104187495063 }, { "content": "}\n\n\n\n// envelope = '(' env-date SP env-subject SP env-from SP env-sender\n\n// SP env-reply-to SP env-to SP env-cc SP env-bcc SP env-in-reply-to\n\n// SP env-message-id ')'\n\n// env-date, env-subject, env-in-reply-to, env-message-id = nstring\n\n// env-from, env-sender, env-reply-to, env-to, env-cc, env-bcc = '(' 1*address ')' | nil\n\npub(crate) fn envelope(i: &[u8]) -> IResult<&[u8], Envelope<'_>> {\n\n map(\n\n delimited(\n\n tag(\"(\"),\n\n tuple((\n\n nstring,\n\n tag(\" \"),\n\n nstring,\n\n tag(\" \"),\n\n envelope_addr,\n\n tag(\" \"),\n\n envelope_addr,\n\n tag(\" \"),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 59, "score": 7.414224337037648 }, { "content": "pub(crate) struct MediaBasic<'a> {\n\n pub(crate) media_type: MediaType<'a>,\n\n pub(crate) subtype: &'a str,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub(crate) enum BodyEnc<'a> {\n\n N7bit,\n\n N8bit,\n\n Binary,\n\n Base64,\n\n QuotedPrintable,\n\n Custom(&'a str),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct BodyFields<'a> {\n\n pub(crate) param: Option<Vec<(&'a str, &'a str)>>,\n\n pub(crate) id: Option<&'a str>,\n\n pub(crate) desc: Option<&'a str>,\n", "file_path": "src/parser/types.rs", "rank": 60, "score": 7.390714252378748 }, { "content": " pub(crate) enc: BodyEnc<'a>,\n\n pub(crate) octets: u32,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct BodyTypeBasic<'a> {\n\n pub(crate) media: MediaBasic<'a>,\n\n pub(crate) fields: BodyFields<'a>,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct BodyTypeMsg<'a> {\n\n pub(crate) fields: BodyFields<'a>,\n\n pub(crate) envelope: Envelope<'a>,\n\n pub(crate) body: Box<Body<'a>>,\n\n pub(crate) lines: u32,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct BodyTypeText<'a> {\n", "file_path": "src/parser/types.rs", "rank": 61, "score": 7.3420149283324925 }, { "content": " opt(base64_terminal),\n\n ))))),\n\n std::str::from_utf8,\n\n )(i)\n\n}\n\n\n\n// numbers\n\n\n\n// number = 1*DIGIT;\n\n// unsigned 32-bit integer\n\npub(crate) fn number(i: &[u8]) -> IResult<&[u8], u32> {\n\n u32(i)\n\n}\n\n\n\n// nz-number = digit-nz *DIGIT;\n\n// non-zero unsigned 32-bit integer\n\npub(crate) fn nz_number(i: &[u8]) -> IResult<&[u8], u32> {\n\n let (i, (_, result)) = tuple((not(tag(\"0\")), number))(i)?;\n\n Ok((i, result))\n\n}\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 62, "score": 7.238544951129705 }, { "content": " )(i)\n\n}\n\n\n\n// 'Unseen' SP number\n\npub(crate) fn status_unseen(i: &[u8]) -> IResult<&[u8], StatusInfo> {\n\n map(\n\n preceded(tag_no_case(\"UNSEEN \"), nz_number),\n\n StatusInfo::Unseen,\n\n )(i)\n\n}\n\n\n\n// TODO! Incorrect\n\n// status-att-list = status-att SP number *(SP status-att SP number)\n\n// status-att = 'MESSAGES' | 'RECENT' | 'UIDNEXT' | 'UIDVALIDITY' | 'UNSEEN'\n\npub(crate) fn status_att_list(i: &[u8]) -> IResult<&[u8], Vec<StatusInfo>> {\n\n many1(alt((\n\n status_messages,\n\n status_recent,\n\n status_uidnext,\n\n status_uidvalidity,\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 63, "score": 7.1642252519453145 }, { "content": " map(\n\n delimited(\n\n tag(\"* \"),\n\n alt((\n\n map(resp_cond_auth, |(status, resp_text)| {\n\n if status == \"OK\" {\n\n GreetingStatus::Ok(resp_text)\n\n } else {\n\n GreetingStatus::Preauth(resp_text)\n\n }\n\n }),\n\n map(resp_cond_bye, GreetingStatus::Bye),\n\n )),\n\n crlf,\n\n ),\n\n |status| Greeting { status },\n\n )(i)\n\n}\n\n\n\n// continue-req = '+' SP (resp-text | base64) CRLF\n", "file_path": "src/parser/rfc3501/mod.rs", "rank": 64, "score": 7.097529669074544 }, { "content": "\n\n// 'READ-ONLY'\n\npub(crate) fn rtc_read_only(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(tag_no_case(\"READ-ONLY\"), |_| RespTextCode::ReadOnly)(i)\n\n}\n\n\n\n// 'READ-WRITE'\n\npub(crate) fn rtc_read_write(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(tag_no_case(\"READ-WRITE\"), |_| RespTextCode::ReadWrite)(i)\n\n}\n\n\n\n// 'TRYCREATE'\n\npub(crate) fn rtc_try_create(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(tag_no_case(\"TRYCREATE\"), |_| RespTextCode::TryCreate)(i)\n\n}\n\n\n\n// 'UIDNEXT' SP nz-number\n\npub(crate) fn rtc_uidnext(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(\n\n preceded(tag_no_case(\"UIDNEXT \"), nz_number),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 65, "score": 7.086661600378328 }, { "content": " pub(crate) subtype: &'a str,\n\n pub(crate) fields: BodyFields<'a>,\n\n pub(crate) lines: u32,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct BodyExt1Part<'a> {\n\n md5: Option<&'a str>,\n\n}\n", "file_path": "src/parser/types.rs", "rank": 66, "score": 6.964096763757909 }, { "content": "pub(crate) fn status_recent(i: &[u8]) -> IResult<&[u8], StatusInfo> {\n\n map(\n\n preceded(tag_no_case(\"RECENT \"), nz_number),\n\n StatusInfo::Recent,\n\n )(i)\n\n}\n\n\n\n// 'UIDNEXT' SP number\n\npub(crate) fn status_uidnext(i: &[u8]) -> IResult<&[u8], StatusInfo> {\n\n map(\n\n preceded(tag_no_case(\"UIDNEXT \"), nz_number),\n\n StatusInfo::UidNext,\n\n )(i)\n\n}\n\n\n\n// 'UIDVALIDITY' SP number\n\npub(crate) fn status_uidvalidity(i: &[u8]) -> IResult<&[u8], StatusInfo> {\n\n map(\n\n preceded(tag_no_case(\"UIDVALIDITY \"), nz_number),\n\n StatusInfo::UidValidity,\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 67, "score": 6.928742682711048 }, { "content": "//!\n\nuse crate::error::Result;\n\nuse crate::tag::{Tag, TagGenerator};\n\nuse futures::stream::SplitSink;\n\nuse futures::{SinkExt, StreamExt};\n\nuse log::trace;\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse tokio::sync::mpsc::Sender;\n\nuse tokio::{\n\n net::{TcpStream, ToSocketAddrs},\n\n task::JoinHandle,\n\n};\n\nuse tokio_util::codec::{Decoder, Framed, LinesCodec};\n\n\n\n/// An async tcp stream.\n\n/// The `ImapConnection` serves to register a request to the server\n\n/// and receive a response.\n\n///\n\n/// All requests for the server saved as ([Tag](Tag) - Channel) match.\n", "file_path": "src/imapconnection.rs", "rank": 68, "score": 6.87439261298945 }, { "content": " RespTextCode::UidNext,\n\n )(i)\n\n}\n\n\n\n// 'UIDVALIDITY' SP nz-number\n\npub(crate) fn rtc_uidvalidity(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(\n\n preceded(tag_no_case(\"UIDVALIDITY \"), nz_number),\n\n RespTextCode::UidValidity,\n\n )(i)\n\n}\n\n\n\n// 'UNSEEN' SP nz-number\n\npub(crate) fn rtc_unseen(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n map(\n\n preceded(tag_no_case(\"UNSEEN \"), nz_number),\n\n RespTextCode::Unseen,\n\n )(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 69, "score": 6.1685133551729106 }, { "content": " |(status, resp_text)| {\n\n // SAFETY: status is \"OK\" either \"PREAUTH\" strings, so it is valid utf-8\n\n let status = unsafe { std::str::from_utf8_unchecked(status) };\n\n (status, resp_text)\n\n },\n\n )(i)\n\n}\n\n\n\n// resp-cond-bye = \"BYE\" SP resp-text\n\npub(crate) fn resp_cond_bye(i: &[u8]) -> IResult<&[u8], ByeResponse<'_>> {\n\n map(preceded(tag_no_case(\"BYE \"), resp_text), |resp| {\n\n ByeResponse { resp }\n\n })(i)\n\n}\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 70, "score": 6.068794519806133 }, { "content": "\n\n// Help function for syntax like mDIGIT\n\npub(crate) fn fixed_num<T: FromStr>(m: usize) -> impl Fn(&[u8]) -> IResult<&[u8], T>\n\nwhere\n\n <T as FromStr>::Err: Debug,\n\n{\n\n move |i: &[u8]| {\n\n // SAFETY: v is slice of DIGIT('0' - '9') so it's valid utf-8\n\n map(take_while_m_n(m, m, is_digit), |v| unsafe {\n\n std::str::from_utf8_unchecked(v).parse().unwrap()\n\n })(i)\n\n }\n\n}\n", "file_path": "src/parser/rfc3501/core.rs", "rank": 71, "score": 5.821950730576054 }, { "content": "\n\n let subscriptions = Arc::new(HashMap::new());\n\n\n\n let future = tokio::spawn(async move {\n\n loop {\n\n let buf = stream.next().await.unwrap();\n\n trace!(\"S: {:?}\", buf);\n\n\n\n // Get in subs sender half by tag\n\n }\n\n });\n\n\n\n Ok(Self {\n\n sink,\n\n subscriptions,\n\n generator: TagGenerator::default(),\n\n cancel: future,\n\n })\n\n }\n\n\n\n async fn send(&mut self, data: String) {\n\n // Save subscription by tag to subscriptions map\n\n self.sink.send(data).await.unwrap()\n\n }\n\n}\n", "file_path": "src/imapconnection.rs", "rank": 72, "score": 5.660305819057216 }, { "content": " status_unseen,\n\n )))(i)\n\n}\n\n\n\n//mailbox = 'INBOX' | astring\n\npub(crate) fn mailbox(i: &[u8]) -> IResult<&[u8], &str> {\n\n astring(i)\n\n}\n\n\n\n//mbx-list-flags = *(mbx-list-oflag SP) mbx-list-sflag *(SP mbx-list-oflag) |\n\n// mbx-list-oflag *(SP mbx-list-oflag)\n\npub(crate) fn mbx_list_flags(i: &[u8]) -> IResult<&[u8], Vec<ListFlag<'_>>> {\n\n // Allow multiple mbx-list-sflag for easy writing of parser, it is not lethal\n\n separated_list1(tag(\" \"), alt((mbx_list_oflag, mbx_list_sflag)))(i)\n\n}\n\n\n\n//mbx-list-oflag = '\\Noinferiors' | flag-extension;\n\n// Other flags; multiple possible per LIST response\n\npub(crate) fn mbx_list_oflag(i: &[u8]) -> IResult<&[u8], ListFlag<'_>> {\n\n map(\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 73, "score": 5.367071427760991 }, { "content": " MsgAtt::Envelope,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"INTERNALDATE \"), date_time),\n\n MsgAtt::InternalDate,\n\n ),\n\n map(preceded(tag_no_case(\"RFC822 \"), nstring), MsgAtt::Rfc822),\n\n map(\n\n preceded(tag_no_case(\"RFC822.HEADER \"), nstring),\n\n MsgAtt::Rfc822Header,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"RFC822.TEXT \"), nstring),\n\n MsgAtt::Rfc822Text,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"RFC822.SIZE \"), number),\n\n MsgAtt::Rfc822Size,\n\n ),\n\n ))(i)\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 74, "score": 5.276093757361263 }, { "content": " ),\n\n |(name, status)| {\n\n let status = match status {\n\n Some(v) => v,\n\n None => vec![],\n\n };\n\n StatusResponse { name, status }\n\n },\n\n )(i)\n\n}\n\n\n\n// 'MESSAGES' SP number\n\npub(crate) fn status_messages(i: &[u8]) -> IResult<&[u8], StatusInfo> {\n\n map(\n\n preceded(tag_no_case(\"MESSAGES \"), nz_number),\n\n StatusInfo::Messages,\n\n )(i)\n\n}\n\n\n\n// 'RECENT' SP number\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 75, "score": 5.258714595369732 }, { "content": " map(\n\n preceded(tag_no_case(\"FLAGS \"), flag_list),\n\n MailBoxData::Flags,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"LIST \"), mailbox_list),\n\n MailBoxData::List,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"LSUB \"), mailbox_list),\n\n MailBoxData::Lsub,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"SEARCH \"), mailbox_data_search),\n\n MailBoxData::Search,\n\n ),\n\n map(\n\n preceded(tag_no_case(\"STATUS \"), mailbox_data_status),\n\n MailBoxData::Status,\n\n ),\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 76, "score": 5.251363999885935 }, { "content": "//!\n\n\n\nuse crate::tag::Tag;\n\nuse std::convert::TryFrom;\n\n\n\nuse crate::error::{create_custom_error, Error};\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum ImapResponse<'a> {\n\n Greeting(Greeting<'a>),\n\n Continue(ContinueReq<'a>),\n\n Response(TaggedResponse<'a>),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub(crate) enum ImapResult {\n\n Ok,\n\n Bad,\n\n No,\n\n}\n", "file_path": "src/parser/types.rs", "rank": 77, "score": 5.097452461173988 }, { "content": "/// If the tagged response was received, we'll get Channel by the `Tag`\n\n/// and send parsed response into it.\n\n///\n\n/// We can wait multiple responses at one time in multiple threads\n\n/// because we can analyze received tag and find the corresponding Channel.\n\npub(crate) struct ImapConnection {\n\n sink: SplitSink<Framed<TcpStream, LinesCodec>, String>,\n\n subscriptions: Arc<HashMap<Tag, Sender<String>>>,\n\n generator: TagGenerator,\n\n // Cancel background listener future\n\n cancel: JoinHandle<()>,\n\n}\n\n\n\nimpl ImapConnection {\n\n async fn connect<A: ToSocketAddrs>(addr: A) -> Result<Self> {\n\n let connection = TcpStream::connect(addr).await?;\n\n\n\n let frame = LinesCodec::default().framed(connection);\n\n\n\n let (sink, mut stream) = frame.split();\n", "file_path": "src/imapconnection.rs", "rank": 78, "score": 4.98959282488644 }, { "content": "use crate::{\n\n parser::{\n\n parse,\n\n types::{\n\n ContinueReq, Greeting, GreetingStatus, ImapResponse, ImapResult, RespCond, RespText,\n\n TaggedResponse,\n\n },\n\n },\n\n tag::Tag,\n\n};\n\n\n", "file_path": "src/parser/tests/mod.rs", "rank": 79, "score": 4.965374479703505 }, { "content": "//! Error type\n\n\n\nuse thiserror::Error;\n\n\n\n/// A convenience wrapper around `Result` for [Error][Error]\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// A set of errors\n\n#[non_exhaustive]\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n /// Fail write or read to a network stream\n\n #[error(\"An IO error\")]\n\n Io(#[from] std::io::Error),\n\n /// Error with custom message\n\n #[error(\"An error has occured: {0}\")]\n\n Custom(String),\n\n #[error(\"Parser error [slice: {0:?}, str: {}]\", vec_to_string(.0))]\n\n Parser(Vec<u8>),\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 80, "score": 4.718507492032698 }, { "content": " pub(crate) zone: i16,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum Body<'a> {\n\n Basic(BodyTypeBasic<'a>),\n\n Msg(BodyTypeMsg<'a>),\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub(crate) enum MediaType<'a> {\n\n Application,\n\n Audio,\n\n Image,\n\n Message,\n\n Video,\n\n Custom(&'a str),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n", "file_path": "src/parser/types.rs", "rank": 81, "score": 4.460190273694162 }, { "content": " \"\\\\Seen\" => Ok(Self::Seen),\n\n \"\\\\Answred\" => Ok(Self::Answered),\n\n \"\\\\Flagged\" => Ok(Self::Flagged),\n\n \"\\\\Deleted\" => Ok(Self::Deleted),\n\n \"\\\\Draft\" => Ok(Self::Draft),\n\n \"\\\\Recent\" => Ok(Self::Recent),\n\n _ => Err(create_custom_error(format!(\n\n \"Can not convert {} into DefinedFlag\",\n\n value\n\n ))),\n\n }\n\n }\n\n}\n\n\n\n// TODO: Flag should be without Perm branch\n\n// Perm branch used only for resp_text_code\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum Flag<'a> {\n\n Defined(DefinedFlag),\n\n Keyword(&'a str),\n", "file_path": "src/parser/types.rs", "rank": 82, "score": 4.109431012905009 }, { "content": " pub(crate) resp: RespText<'a>,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum DefinedFlag {\n\n Seen,\n\n Answered,\n\n Flagged,\n\n Deleted,\n\n Draft,\n\n Recent,\n\n}\n\n\n\n// TODO: It is incorrect, because flags are case-insesitive\n\n// Add function for insesitive cmp\n\nimpl TryFrom<&str> for DefinedFlag {\n\n type Error = Error;\n\n\n\n fn try_from(value: &str) -> Result<Self, Self::Error> {\n\n match value {\n", "file_path": "src/parser/types.rs", "rank": 83, "score": 3.961933284595869 }, { "content": "// resp-text-code = \"ALERT\" | \"BADCHARSET\" [SP \"(\" astring *(SP astring) \")\" ] |\n\n// capability-data | \"PARSE\" | \"PERMANENTFLAGS\" SP \"(\"\n\n// [ flag-perm *(SP flag-perm)] \")\" | \"READ-ONLY\" |\n\n// \"READ-WRITE\" | \"TRYCREATE\" | \"UIDNEXT\" SP nz-number |\n\n// \"UIDVALIDITY\" SP nz-number | \"UNSEEN\" SP nz_number |\n\n// atom [ SP 1*<any TEXT-CHAR except \"]\"> ]\n\npub(crate) fn resp_text_code(i: &[u8]) -> IResult<&[u8], RespTextCode<'_>> {\n\n alt((\n\n rtc_alert,\n\n rtc_bad_charset,\n\n rtc_capability_data,\n\n rtc_parse,\n\n rtc_permanent_flags,\n\n rtc_read_only,\n\n rtc_read_write,\n\n rtc_try_create,\n\n rtc_uidnext,\n\n rtc_uidvalidity,\n\n rtc_unseen,\n\n // TODO: add last branch from rfc3501 resp-text-code\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 84, "score": 3.5727988284849936 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct ListMailBox<'a> {\n\n pub(crate) flags: Vec<ListFlag<'a>>,\n\n pub(crate) delimiter: Option<&'a str>,\n\n pub(crate) name: &'a str,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum Capability<'a> {\n\n // TODO: Create enum for common auth types\n\n Auth(&'a str),\n\n // TODO: Create enum for common other capabilities\n\n Other(&'a str),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum RespTextCode<'a> {\n", "file_path": "src/parser/types.rs", "rank": 85, "score": 3.550249516193007 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum ListDefinedFlag {\n\n Noinferiors,\n\n Noselect,\n\n Marked,\n\n Unmarked,\n\n}\n\n\n\n// TODO: It is incorrect, because flags are case-insesitive\n\n// Add function for insesitive cmp\n\nimpl TryFrom<&str> for ListDefinedFlag {\n\n type Error = Error;\n\n\n\n fn try_from(value: &str) -> Result<Self, Self::Error> {\n\n match value {\n\n \"\\\\Noinferiors\" => Ok(Self::Noinferiors),\n\n \"\\\\Noselect\" => Ok(Self::Noselect),\n\n \"\\\\Marked\" => Ok(Self::Marked),\n\n \"\\\\Unmarked\" => Ok(Self::Unmarked),\n\n _ => Err(create_custom_error(format!(\n", "file_path": "src/parser/types.rs", "rank": 86, "score": 3.406052199109396 }, { "content": " envelope_addr,\n\n tag(\" \"),\n\n envelope_addr,\n\n tag(\" \"),\n\n envelope_addr,\n\n tag(\" \"),\n\n envelope_addr,\n\n tag(\" \"),\n\n nstring,\n\n tag(\" \"),\n\n nstring,\n\n )),\n\n tag(\")\"),\n\n ),\n\n |(\n\n date,\n\n _,\n\n subject,\n\n _,\n\n from,\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 87, "score": 3.1811955620990844 }, { "content": " date_month,\n\n tag(\"-\"),\n\n date_year,\n\n tag(\" \"),\n\n time,\n\n tag(\" \"),\n\n zone,\n\n )),\n\n tag(\"\\\"\"),\n\n ),\n\n |(day, _, month, _, year, _, time, _, zone)| DateTime {\n\n day,\n\n month,\n\n year,\n\n time,\n\n zone,\n\n },\n\n )(i)\n\n}\n\n\n", "file_path": "src/parser/rfc3501/grammar.rs", "rank": 88, "score": 2.9044304796054123 }, { "content": " Rfc822Text(Option<&'a str>),\n\n Rfc822Size(u32),\n\n Flags(Vec<MsgFlag<'a>>),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum MessageData<'a> {\n\n Expunge(u32),\n\n Fetch(MsgAtt<'a>),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct Time {\n\n pub(crate) hours: u8,\n\n pub(crate) minutes: u8,\n\n pub(crate) seconds: u8,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub(crate) enum Month {\n", "file_path": "src/parser/types.rs", "rank": 89, "score": 2.2953873223969303 }, { "content": "pub(crate) struct TaggedResponse<'a> {\n\n pub(crate) tag: Tag,\n\n pub(crate) resp: RespCond<'a>,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum UntaggedResponse<'a> {\n\n RespCond(RespCond<'a>),\n\n RespBye(ByeResponse<'a>),\n\n MailBox(MailBoxData<'a>),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct RespCond<'a> {\n\n pub(crate) status: ImapResult,\n\n pub(crate) text: RespText<'a>,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct ByeResponse<'a> {\n", "file_path": "src/parser/types.rs", "rank": 90, "score": 2.0220976468160226 }, { "content": " \"Can not convert {} into ListDefinedFlag\",\n\n value\n\n ))),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum ListFlag<'a> {\n\n Defined(ListDefinedFlag),\n\n Extension(&'a str),\n\n}\n\n\n\nimpl<'a> From<&'a str> for ListFlag<'a> {\n\n fn from(s: &'a str) -> Self {\n\n if let Ok(v) = ListDefinedFlag::try_from(s) {\n\n Self::Defined(v)\n\n } else {\n\n Self::Extension(s)\n\n }\n", "file_path": "src/parser/types.rs", "rank": 91, "score": 1.800870473619605 }, { "content": "//! IMAP client implementation\n\n\n\n#![warn(rust_2018_idioms, /*missing_docs,*/ missing_debug_implementations)]\n\n#![allow(dead_code)] /* allow on develop stage */\n\n\n\npub mod client;\n\npub mod error;\n\nmod imapconnection;\n\nmod parser;\n\nmod tag;\n", "file_path": "src/lib.rs", "rank": 92, "score": 1.6047679171836138 }, { "content": " Jan,\n\n Feb,\n\n Mar,\n\n Apr,\n\n May,\n\n Jun,\n\n Jul,\n\n Aug,\n\n Sep,\n\n Oct,\n\n Nov,\n\n Dec,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct DateTime {\n\n pub(crate) day: u8,\n\n pub(crate) month: Month,\n\n pub(crate) year: u16,\n\n pub(crate) time: Time,\n", "file_path": "src/parser/types.rs", "rank": 93, "score": 1.547012866919586 }, { "content": " Alert,\n\n BadCharset(Vec<&'a str>),\n\n Capability(Vec<Capability<'a>>),\n\n Parse,\n\n PermanentFlags(Vec<Flag<'a>>),\n\n ReadOnly,\n\n ReadWrite,\n\n TryCreate,\n\n UidNext(u32),\n\n UidValidity(u32),\n\n Unseen(u32),\n\n // TODO: add last branch\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct RespText<'a> {\n\n pub(crate) code: Vec<RespTextCode<'a>>,\n\n pub(crate) text: &'a str,\n\n}\n\n\n", "file_path": "src/parser/types.rs", "rank": 94, "score": 1.4912957511420668 } ]
Rust
src/compaction/value.rs
timothee-haudebourg/json-ld
0b44aa736b681893e75ce282c67511480c992901
use super::{compact_iri, JsonSrc, Options}; use crate::{ context::{self, Inversible, Loader, Local}, syntax::{Container, ContainerType, Keyword, Term, Type}, util::{AsAnyJson, AsJson, JsonFrom}, ContextMut, Error, Id, Loc, Reference, Value, }; pub async fn compact_indexed_value_with< J: JsonSrc, K: JsonFrom<J>, T: Sync + Send + Id, C: ContextMut<T>, L: Loader, M, >( value: &Value<J, T>, index: Option<&str>, active_context: Inversible<T, &C>, active_property: Option<&str>, loader: &mut L, options: Options, meta: M, ) -> Result<K, Error> where C: Sync + Send, C::LocalContext: Send + Sync + From<L::Output>, L: Sync + Send, M: Send + Sync + Clone + Fn(Option<&J::MetaData>) -> K::MetaData, { let mut active_context = active_context.into_borrowed(); if let Some(active_property) = active_property { if let Some(active_property_definition) = active_context.get(active_property) { if let Some(local_context) = &active_property_definition.context { active_context = Inversible::new( local_context .process_with( *active_context.as_ref(), loader, active_property_definition.base_url(), context::ProcessingOptions::from(options).with_override(), ) .await .map_err(Loc::unwrap)? .into_inner(), ) .into_owned() } } } let mut result = K::Object::default(); let active_property_definition = match active_property { Some(active_property) => active_context.get(active_property), None => None, }; let language = match active_property_definition { Some(def) => match def.language.as_ref() { Some(lang) => lang.as_ref().map(|l| l.as_ref()).option(), None => active_context.default_language(), }, None => active_context.default_language(), }; let direction = match active_property_definition { Some(def) => match def.direction { Some(dir) => dir.option(), None => active_context.default_base_direction(), }, None => active_context.default_base_direction(), }; let type_mapping: Option<Type<&T>> = match active_property_definition { Some(def) => def.typ.as_ref().map(|t| t.into()), None => None, }; let container_mapping = match active_property_definition { Some(def) => def.container, None => Container::None, }; let remove_index = (index.is_some() && container_mapping.contains(ContainerType::Index)) || index.is_none(); match value { Value::Literal(lit, ty) => { use crate::object::value::Literal; if ty.as_ref().map(Type::Ref) == type_mapping && remove_index { match lit { Literal::Null => return Ok(K::null(meta(None))), Literal::Boolean(b) => return Ok(b.as_json_with(meta(None))), Literal::Number(n) => return Ok(K::number(n.clone().into(), meta(None))), Literal::String(s) => { if ty.is_some() || (language.is_none() && direction.is_none()) { return Ok(s.as_json_with(meta(None))); } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), s.as_json_with(meta(None)), ); } } } } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; match lit { Literal::Null => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), K::null(meta(None)), ); } Literal::Boolean(b) => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), b.as_json_with(meta(None)), ); } Literal::Number(n) => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), K::number(n.clone().into(), meta(None)), ); } Literal::String(s) => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), s.as_json_with(meta(None)), ); } } if let Some(ty) = ty { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Type), true, false, options, )?; let compact_ty = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Ref(Reference::Id(ty.clone())), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), match compact_ty { Some(s) => K::string(s.as_str().into(), meta(None)), None => K::null(meta(None)), }, ); } } } Value::LangString(ls) => { let ls_language = ls.language(); let ls_direction = ls.direction(); if remove_index && (ls_language.is_none() || language == ls_language) && (ls_direction.is_none() || direction == ls_direction) { return Ok(ls.as_str().as_json_with(meta(None))); } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), K::string(ls.as_str().into(), meta(None)), ); if let Some(language) = ls.language() { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Language), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), language.as_json_with(meta(None)), ); } if let Some(direction) = ls.direction() { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Direction), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), direction.as_json_with(meta(None)), ); } } } Value::Json(value) => { if type_mapping == Some(Type::Json) && remove_index { return Ok(value.as_json_with(meta)); } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), value.as_json_with(meta.clone()), ); let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Type), true, false, options, )?; let compact_ty = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Json), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), match compact_ty { Some(s) => K::string(s.as_str().into(), meta(None)), None => K::null(meta(None)), }, ); } } } if !remove_index { if let Some(index) = index { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Index), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), index.as_json_with(meta(None)), ); } } Ok(K::object(result, meta(None))) }
use super::{compact_iri, JsonSrc, Options}; use crate::{ context::{self, Inversible, Loader, Local}, syntax::{Container, ContainerType, Keyword, Term, Type}, util::{AsAnyJson, AsJson, JsonFrom}, ContextMut, Error, Id, Loc, Reference, Value, }; pub async fn compact_indexed_value_with< J: JsonSrc, K: JsonFrom<J>, T: Sync + Send + Id, C: ContextMut<T>, L: Loader, M, >( value: &Value<J, T>, index: Option<&str>, active_context: Inversible<T, &C>, active_property: Option<&str>, loader: &mut L, options: Options, meta: M, ) -> Result<K, Error> where C: Sync + Send, C::LocalContext: Send + Sync + From<L::Output>, L: Sync + Send, M: Send + Sync + Clone + Fn(Option<&J::MetaData>) -> K::MetaData, { let mut active_context = active_context.into_borrowed(); if let Some(active_property) = active_property { if let Some(active_property_definition) = active_context.get(active_property) { if let Some(local_context) = &active_property_definition.context { active_context = Inversible::new( local_context .process_with( *active_context.as_ref(), loader, active_property_definition.base_url(), context::ProcessingOptions::from(options).with_override(), ) .await .map_err(Loc::unwrap)? .into_inner(), ) .into_owned() } } } let mut result = K::Object::default(); let active_property_definition = match active_property { Some(active_property) => active_context.get(active_property), None => None, }; let language = match active_property_definition { Some(def) =>
, None => active_context.default_language(), }; let direction = match active_property_definition { Some(def) => match def.direction { Some(dir) => dir.option(), None => active_context.default_base_direction(), }, None => active_context.default_base_direction(), }; let type_mapping: Option<Type<&T>> = match active_property_definition { Some(def) => def.typ.as_ref().map(|t| t.into()), None => None, }; let container_mapping = match active_property_definition { Some(def) => def.container, None => Container::None, }; let remove_index = (index.is_some() && container_mapping.contains(ContainerType::Index)) || index.is_none(); match value { Value::Literal(lit, ty) => { use crate::object::value::Literal; if ty.as_ref().map(Type::Ref) == type_mapping && remove_index { match lit { Literal::Null => return Ok(K::null(meta(None))), Literal::Boolean(b) => return Ok(b.as_json_with(meta(None))), Literal::Number(n) => return Ok(K::number(n.clone().into(), meta(None))), Literal::String(s) => { if ty.is_some() || (language.is_none() && direction.is_none()) { return Ok(s.as_json_with(meta(None))); } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), s.as_json_with(meta(None)), ); } } } } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; match lit { Literal::Null => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), K::null(meta(None)), ); } Literal::Boolean(b) => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), b.as_json_with(meta(None)), ); } Literal::Number(n) => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), K::number(n.clone().into(), meta(None)), ); } Literal::String(s) => { result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), s.as_json_with(meta(None)), ); } } if let Some(ty) = ty { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Type), true, false, options, )?; let compact_ty = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Ref(Reference::Id(ty.clone())), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), match compact_ty { Some(s) => K::string(s.as_str().into(), meta(None)), None => K::null(meta(None)), }, ); } } } Value::LangString(ls) => { let ls_language = ls.language(); let ls_direction = ls.direction(); if remove_index && (ls_language.is_none() || language == ls_language) && (ls_direction.is_none() || direction == ls_direction) { return Ok(ls.as_str().as_json_with(meta(None))); } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), K::string(ls.as_str().into(), meta(None)), ); if let Some(language) = ls.language() { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Language), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), language.as_json_with(meta(None)), ); } if let Some(direction) = ls.direction() { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Direction), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), direction.as_json_with(meta(None)), ); } } } Value::Json(value) => { if type_mapping == Some(Type::Json) && remove_index { return Ok(value.as_json_with(meta)); } else { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Value), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), value.as_json_with(meta.clone()), ); let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Type), true, false, options, )?; let compact_ty = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Json), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), match compact_ty { Some(s) => K::string(s.as_str().into(), meta(None)), None => K::null(meta(None)), }, ); } } } if !remove_index { if let Some(index) = index { let compact_key = compact_iri::<J, _, _>( active_context.as_ref(), &Term::Keyword(Keyword::Index), true, false, options, )?; result.insert( K::new_key(compact_key.as_ref().unwrap().as_str(), meta(None)), index.as_json_with(meta(None)), ); } } Ok(K::object(result, meta(None))) }
match def.language.as_ref() { Some(lang) => lang.as_ref().map(|l| l.as_ref()).option(), None => active_context.default_language(), }
if_condition
[ { "content": "/// Get the `@value` field of a value object.\n\nfn value_value<J: JsonClone, K: JsonFrom<J>, T: Id, M>(value: &Value<J, T>, meta: M) -> K\n\nwhere\n\n\tM: Clone + Fn(Option<&J::MetaData>) -> K::MetaData,\n\n{\n\n\tuse crate::object::value::Literal;\n\n\tmatch value {\n\n\t\tValue::Literal(lit, _ty) => match lit {\n\n\t\t\tLiteral::Null => K::null(meta(None)),\n\n\t\t\tLiteral::Boolean(b) => b.as_json_with(meta(None)),\n\n\t\t\tLiteral::Number(n) => K::number(n.clone().into(), meta(None)),\n\n\t\t\tLiteral::String(s) => s.as_json_with(meta(None)),\n\n\t\t},\n\n\t\tValue::LangString(str) => K::string(str.as_str().into(), meta(None)),\n\n\t\tValue::Json(json) => json.as_json_with(meta),\n\n\t}\n\n}\n\n\n", "file_path": "src/compaction/mod.rs", "rank": 0, "score": 333201.18401212303 }, { "content": "/// Convert a term to a node id, if possible.\n\n/// Return `None` if the term is `null`.\n\npub fn node_id_of_term<T: Id>(term: Term<T>) -> Option<Reference<T>> {\n\n\tmatch term {\n\n\t\tTerm::Null => None,\n\n\t\tTerm::Ref(prop) => Some(prop),\n\n\t\tTerm::Keyword(kw) => Some(Reference::Invalid(kw.into_str().to_string())),\n\n\t}\n\n}\n\n\n\n/// Expand a node object.\n\npub(crate) async fn expand_node<\n\n\t'a,\n\n\tJ: JsonExpand,\n\n\tT: 'a + Id + Send + Sync,\n\n\tC: ContextMut<T> + Send + Sync,\n\n\tL: Loader + Send + Sync,\n\n>(\n\n\tactive_context: &'a C,\n\n\ttype_scoped_context: &'a C,\n\n\tactive_property: ActiveProperty<'a, J>,\n\n\texpanded_entries: Vec<ExpandedEntry<'a, J, Term<T>>>,\n", "file_path": "src/expansion/node.rs", "rank": 1, "score": 298191.2292051495 }, { "content": "/// Expand a literal value.\n\n/// See <https://www.w3.org/TR/json-ld11-api/#value-expansion>.\n\npub fn expand_literal<J: JsonHash + JsonClone, T: Id, C: Context<T>>(\n\n\tsource: Option<loader::Id>,\n\n\tactive_context: &C,\n\n\tactive_property: ActiveProperty<J>,\n\n\tvalue: LiteralValue<J>,\n\n\twarnings: &mut Vec<Loc<Warning, J::MetaData>>,\n\n) -> Result<Indexed<Object<J, T>>, Error> {\n\n\tlet active_property_definition = active_context.get_opt(active_property.id());\n\n\n\n\tlet active_property_type = if let Some(active_property_definition) = active_property_definition\n\n\t{\n\n\t\tactive_property_definition.typ.clone()\n\n\t} else {\n\n\t\tNone\n\n\t};\n\n\n\n\tmatch active_property_type {\n\n\t\t// If the `active_property` has a type mapping in `active_context` that is `@id`, and the\n\n\t\t// `value` is a string, return a new map containing a single entry where the key is `@id` and\n\n\t\t// the value is the result of IRI expanding `value` using `true` for `document_relative` and\n", "file_path": "src/expansion/literal.rs", "rank": 2, "score": 256632.74104644492 }, { "content": "fn select_nest_result<'a, K: 'a + JsonBuild + JsonMut + JsonIntoMut, T: Id, C: ContextMut<T>, M>(\n\n\tresult: &'a mut K::Object,\n\n\tactive_context: Inversible<T, &C>,\n\n\titem_active_property: &str,\n\n\tcompact_arrays: bool,\n\n\tmeta: M,\n\n) -> Result<(&'a mut K::Object, Container, bool), Error>\n\nwhere\n\n\tM: Fn() -> K::MetaData,\n\n{\n\n\tlet (nest_result, container) = match active_context.get(item_active_property) {\n\n\t\tSome(term_definition) => {\n\n\t\t\tlet nest_result = match &term_definition.nest {\n\n\t\t\t\tSome(nest_term) => {\n\n\t\t\t\t\t// If nest term is not @nest,\n\n\t\t\t\t\t// or a term in the active context that expands to @nest,\n\n\t\t\t\t\t// an invalid @nest value error has been detected,\n\n\t\t\t\t\t// and processing is aborted.\n\n\t\t\t\t\tif nest_term != \"@nest\" {\n\n\t\t\t\t\t\tmatch active_context.get(nest_term.as_ref()) {\n", "file_path": "src/compaction/property.rs", "rank": 3, "score": 252034.9919326434 }, { "content": "/// Type composed of `J` JSON values that can be converted\n\n/// into a `K` JSON value.\n\npub trait AsJson<J: JsonClone, K: JsonFrom<J>> {\n\n\t/// Converts this value into a `K` JSON value using the given\n\n\t/// `meta` function to convert `J::MetaData` into `K::MetaData`.\n\n\tfn as_json_with(&self, meta: impl Clone + Fn(Option<&J::MetaData>) -> K::MetaData) -> K;\n\n\n\n\t/// Converts this value into a `K` JSON value.\n\n\t///\n\n\t/// The `K` value is annotated with the default value of `K::MetaData`.\n\n\tfn as_json(&self) -> K\n\n\twhere\n\n\t\tK::MetaData: Default,\n\n\t{\n\n\t\tself.as_json_with(|_| K::MetaData::default())\n\n\t}\n\n}\n\n\n", "file_path": "src/util/json/build.rs", "rank": 4, "score": 250855.3917039103 }, { "content": "// Default value for `document_relative` is `false` and for `vocab` is `true`.\n\npub fn expand_iri<T: Id, C: Context<T>, M: Clone>(\n\n\tsource: Option<loader::Id>,\n\n\tactive_context: &C,\n\n\tvalue: &str,\n\n\tmetadata: &M,\n\n\tdocument_relative: bool,\n\n\tvocab: bool,\n\n\twarnings: &mut Vec<Loc<Warning, M>>,\n\n) -> Term<T> {\n\n\tif let Ok(keyword) = Keyword::try_from(value) {\n\n\t\tTerm::Keyword(keyword)\n\n\t} else {\n\n\t\t// If value has the form of a keyword, a processor SHOULD generate a warning and return\n\n\t\t// null.\n\n\t\tif is_keyword_like(value) {\n\n\t\t\twarnings.push(Loc::new(\n\n\t\t\t\tWarning::KeywordLikeValue(value.to_string()),\n\n\t\t\t\tsource,\n\n\t\t\t\tmetadata.clone(),\n\n\t\t\t));\n", "file_path": "src/expansion/iri.rs", "rank": 5, "score": 244422.88054142243 }, { "content": "fn optional_string<K: JsonBuild>(s: Option<String>, meta: K::MetaData) -> K {\n\n\tmatch s {\n\n\t\tSome(s) => K::string(s.as_str().into(), meta),\n\n\t\tNone => K::null(meta),\n\n\t}\n\n}\n\n\n\n/// Compaction options.\n\n#[derive(Clone, Copy)]\n\npub struct Options {\n\n\t/// JSON-LD processing mode.\n\n\tpub processing_mode: ProcessingMode,\n\n\n\n\t/// Determines if IRIs are compacted relative to the provided base IRI or document location when compacting.\n\n\tpub compact_to_relative: bool,\n\n\n\n\t/// If set to `true`, arrays with just one element are replaced with that element during compaction.\n\n\t/// If set to `false`, all arrays will remain arrays even if they have just one element.\n\n\tpub compact_arrays: bool,\n\n\n", "file_path": "src/compaction/mod.rs", "rank": 6, "score": 240584.95312258805 }, { "content": "/// Converts a JSON value into the same JSON value represented with another type.\n\nfn json_to_json<J: JsonClone, K: JsonFrom<J>>(\n\n\tinput: &J,\n\n\tm: impl Clone + Fn(Option<&J::MetaData>) -> K::MetaData,\n\n) -> K {\n\n\tlet meta: K::MetaData = m(Some(input.metadata()));\n\n\tmatch input.as_value_ref() {\n\n\t\tValueRef::Null => K::null(meta),\n\n\t\tValueRef::Boolean(b) => K::boolean(b, meta),\n\n\t\tValueRef::Number(n) => K::number(n.clone().into(), meta),\n\n\t\tValueRef::String(s) => K::string((&**s).into(), meta),\n\n\t\tValueRef::Array(a) => K::array(\n\n\t\t\ta.iter()\n\n\t\t\t\t.map(|value| json_to_json(&*value, m.clone()))\n\n\t\t\t\t.collect(),\n\n\t\t\tmeta,\n\n\t\t),\n\n\t\tValueRef::Object(o) => K::object(\n\n\t\t\to.iter()\n\n\t\t\t\t.map(|(key, value)| {\n\n\t\t\t\t\t(\n", "file_path": "src/util/json/build.rs", "rank": 7, "score": 229751.04597655736 }, { "content": "/// Checks if the given context has a protected definition.\n\npub fn has_protected_items<T: Id, C: Context<T>>(active_context: &C) -> bool {\n\n\tfor (_, definition) in active_context.definitions() {\n\n\t\tif definition.protected {\n\n\t\t\treturn true;\n\n\t\t}\n\n\t}\n\n\n\n\tfalse\n\n}\n\n\n", "file_path": "src/context/processing.rs", "rank": 8, "score": 223777.66258637048 }, { "content": "pub fn json_ld_eq<J: Json, K: Json>(a: &J, b: &K) -> bool\n\nwhere\n\n\tJ::Number: PartialEq<K::Number>,\n\n{\n\n\tmatch (a.as_value_ref(), b.as_value_ref()) {\n\n\t\t(ValueRef::Array(a), ValueRef::Array(b)) if a.len() == b.len() => {\n\n\t\t\tlet mut selected = Vec::with_capacity(a.len());\n\n\t\t\tselected.resize(a.len(), false);\n\n\n\n\t\t\t'a_items: for item in a.iter() {\n\n\t\t\t\tfor (i, sel) in selected.iter_mut().enumerate() {\n\n\t\t\t\t\tif !*sel && json_ld_eq(&*item, &*b.get(i).unwrap()) {\n\n\t\t\t\t\t\t*sel = true;\n\n\t\t\t\t\t\tcontinue 'a_items;\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\n\n\t\t\t\treturn false;\n\n\t\t\t}\n\n\n", "file_path": "src/util/json/build.rs", "rank": 9, "score": 221149.21036386586 }, { "content": "/// Local context used for context expansion.\n\n///\n\n/// Local contexts can be seen as \"abstract contexts\" that can be processed to enrich an\n\n/// existing active context.\n\npub trait Local<T: Id = IriBuf>: JsonSendSync {\n\n\t/// Process the local context with specific options.\n\n\tfn process_full<'a, 's: 'a, C: ContextMut<T> + Send + Sync, L: Loader + Send + Sync>(\n\n\t\t&'s self,\n\n\t\tactive_context: &'a C,\n\n\t\tstack: ProcessingStack,\n\n\t\tloader: &'a mut L,\n\n\t\tbase_url: Option<Iri<'a>>,\n\n\t\toptions: ProcessingOptions,\n\n\t) -> BoxFuture<'a, ProcessingResult<'s, Self, C>>\n\n\twhere\n\n\t\tC::LocalContext: From<L::Output> + From<Self>,\n\n\t\tL::Output: Into<Self>,\n\n\t\tT: Send + Sync;\n\n\n\n\t/// Process the local context with specific options.\n\n\tfn process_with<'a, 's: 'a, C: ContextMut<T> + Send + Sync, L: Loader + Send + Sync>(\n\n\t\t&'s self,\n\n\t\tactive_context: &'a C,\n\n\t\tloader: &'a mut L,\n", "file_path": "src/context/mod.rs", "rank": 10, "score": 220148.93547088493 }, { "content": "/// Type that can be compacted with an index.\n\npub trait CompactIndexed<J: JsonSrc, T: Id> {\n\n\t/// Compact with the given optional index.\n\n\tfn compact_indexed<'a, K: JsonFrom<J>, C: ContextMut<T>, L: Loader, M>(\n\n\t\t&'a self,\n\n\t\tindex: Option<&'a str>,\n\n\t\tactive_context: Inversible<T, &'a C>,\n\n\t\ttype_scoped_context: Inversible<T, &'a C>,\n\n\t\tactive_property: Option<&'a str>,\n\n\t\tloader: &'a mut L,\n\n\t\toptions: Options,\n\n\t\tmeta: M,\n\n\t) -> BoxFuture<'a, Result<K, Error>>\n\n\twhere\n\n\t\tJ: 'a,\n\n\t\tT: 'a,\n\n\t\tC: Sync + Send,\n\n\t\tC::LocalContext: Send + Sync + From<L::Output>,\n\n\t\tL: Sync + Send,\n\n\t\tM: 'a + Send + Sync + Clone + Fn(Option<&J::MetaData>) -> K::MetaData;\n\n}\n", "file_path": "src/compaction/mod.rs", "rank": 11, "score": 215402.8616467805 }, { "content": "/// JSON value that can be converted from a `J` value.\n\npub trait JsonFrom<J: Json> = JsonMutSendSync + JsonBuild + JsonIntoMut\n\nwhere <Self as Json>::Number: From<<J as Json>::Number>;\n\n\n", "file_path": "src/util/json/build.rs", "rank": 12, "score": 214278.88358181005 }, { "content": "/// Hash a JSON value.\n\n///\n\n/// This bypasses any implementations of `Hash` for `J`\n\n/// since most JSON implementations (such as `serde_json`) do\n\n/// no provide it.\n\npub fn hash_json<J: JsonHash, H: Hasher>(json: &J, hasher: &mut H) {\n\n\tuse cc_traits::{Iter, MapIter};\n\n\tmatch json.as_value_ref() {\n\n\t\tValueRef::Null => (),\n\n\t\tValueRef::Boolean(b) => b.hash(hasher),\n\n\t\tValueRef::Number(n) => n.hash(hasher),\n\n\t\tValueRef::String(s) => s.hash(hasher),\n\n\t\tValueRef::Array(ary) => {\n\n\t\t\tfor item in ary.iter() {\n\n\t\t\t\thash_json(&*item, hasher)\n\n\t\t\t}\n\n\t\t}\n\n\t\tValueRef::Object(obj) => {\n\n\t\t\t// Elements must be combined with a associative and commutative operation •.\n\n\t\t\t// (u64, •, 0) must form a commutative monoid.\n\n\t\t\t// This is satisfied by • = u64::wrapping_add.\n\n\t\t\tlet mut hash = 0;\n\n\t\t\tfor (key, value) in obj.iter() {\n\n\t\t\t\tlet mut h = DefaultHasher::new();\n\n\t\t\t\t(*key).hash(&mut h);\n\n\t\t\t\thash_json(&*value, &mut h);\n\n\t\t\t\thash = u64::wrapping_add(hash, h.finish());\n\n\t\t\t}\n\n\t\t\thasher.write_u64(hash);\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/util/json.rs", "rank": 13, "score": 199606.56048014964 }, { "content": "/// Type that can be compacted.\n\npub trait Compact<J: JsonSrc, T: Id> {\n\n\t/// Compact a JSON-LD document into a `K` JSON value with the provided\n\n\t/// type scoped context, active property and options.\n\n\t///\n\n\t/// Unless you know what you are doing, you will probably prefer\n\n\t/// to use the [`compact`](Compact::compact) and [`compact_with`](Compact::compact_with) functions.\n\n\tfn compact_full<'a, K: JsonFrom<J>, C: ContextMut<T>, L: Loader, M>(\n\n\t\t&'a self,\n\n\t\tactive_context: Inversible<T, &'a C>,\n\n\t\ttype_scoped_context: Inversible<T, &'a C>,\n\n\t\tactive_property: Option<&'a str>,\n\n\t\tloader: &'a mut L,\n\n\t\toptions: Options,\n\n\t\tmeta: M,\n\n\t) -> BoxFuture<'a, Result<K, Error>>\n\n\twhere\n\n\t\tJ: 'a,\n\n\t\tT: 'a,\n\n\t\tC: Sync + Send,\n\n\t\tC::LocalContext: Send + Sync + From<L::Output>,\n", "file_path": "src/compaction/mod.rs", "rank": 14, "score": 186505.79833668892 }, { "content": "/// Hash a [`HashMap`].\n\n///\n\n/// The standard library does not provide (yet) a `Hash` implementation\n\n/// for the [`HashMap`] type. This can be used instead.\n\n///\n\n/// Note that this function not particularly strong and does\n\n/// not protect against DoS attacks.\n\npub fn hash_map<K: Hash, V: Hash, H: Hasher>(map: &HashMap<K, V>, hasher: &mut H) {\n\n\t// See: https://github.com/rust-lang/rust/pull/48366\n\n\t// Elements must be combined with a associative and commutative operation •.\n\n\t// (u64, •, 0) must form a commutative monoid.\n\n\t// This is satisfied by • = u64::wrapping_add.\n\n\tlet mut hash = 0;\n\n\tfor entry in map {\n\n\t\tlet mut h = DefaultHasher::new();\n\n\t\tentry.hash(&mut h);\n\n\t\thash = u64::wrapping_add(hash, h.finish());\n\n\t}\n\n\n\n\thasher.write_u64(hash);\n\n}\n", "file_path": "src/util/mod.rs", "rank": 15, "score": 180620.1758341911 }, { "content": "/// Default value of `as_array` is false.\n\nfn add_value<K: JsonBuild + JsonMut>(\n\n\tmap: &mut K::Object,\n\n\tkey: &str,\n\n\tvalue: K,\n\n\tas_array: bool,\n\n\tmeta: impl Clone + Fn() -> K::MetaData,\n\n) {\n\n\tmatch map.get(key).map(|value| value.is_array()) {\n\n\t\tSome(false) => {\n\n\t\t\tlet value = map.remove(key).unwrap();\n\n\t\t\tmap.insert(\n\n\t\t\t\tK::new_key(key, meta()),\n\n\t\t\t\tK::array(Some(value).into_iter().collect(), meta()),\n\n\t\t\t);\n\n\t\t}\n\n\t\tNone if as_array => {\n\n\t\t\tmap.insert(K::new_key(key, meta()), K::empty_array(meta()));\n\n\t\t}\n\n\t\t_ => (),\n\n\t}\n", "file_path": "src/compaction/mod.rs", "rank": 16, "score": 176514.44183118083 }, { "content": "/// Converts the given `json` value into an array\n\n/// if it is not already.\n\n///\n\n/// Returns a tuple providing an iterator over the items\n\n/// of the array, and the size of the array.\n\npub fn as_array<J: Json>(json: &J) -> (AsArray<J>, usize) {\n\n\tuse cc_traits::{Iter, Len};\n\n\tmatch json.as_value_ref() {\n\n\t\tValueRef::Array(ary) => (AsArray::Array(ary.iter()), ary.len()),\n\n\t\t_ => (AsArray::NotArray(Some(json)), 1),\n\n\t}\n\n}\n\n\n", "file_path": "src/util/json.rs", "rank": 17, "score": 175736.35360402166 }, { "content": "fn filter_top_level_item<J: JsonHash, T: Id>(item: &Indexed<Object<J, T>>) -> bool {\n\n\t// Remove dangling values.\n\n\t!matches!(item.inner(), Object::Value(_))\n\n}\n\n\n\npub enum ActiveProperty<'a, J: Json> {\n\n\tNone,\n\n\tSome(&'a str, &'a J::MetaData),\n\n}\n\n\n\nimpl<'a, J: Json> ActiveProperty<'a, J> {\n\n\tpub fn is_none(&self) -> bool {\n\n\t\tmatches!(self, Self::None)\n\n\t}\n\n\n\n\tpub fn is_some(&self) -> bool {\n\n\t\tmatches!(self, Self::Some(_, _))\n\n\t}\n\n\n\n\tpub fn id(&self) -> Option<&'a str> {\n", "file_path": "src/expansion/mod.rs", "rank": 18, "score": 175322.5807561758 }, { "content": "/// Unique identifier types.\n\n///\n\n/// While JSON-LD uses [Internationalized Resource Identifiers (IRIs)](https://en.wikipedia.org/wiki/Internationalized_resource_identifier)\n\n/// to uniquely identify each node,\n\n/// this crate does not imposes the internal representation of identifiers.\n\n///\n\n/// Whatever type you choose, it must implement this trait to usure that:\n\n/// - there is a low cost bijection with IRIs,\n\n/// - it can be cloned ([`Clone`]),\n\n/// - it can be compared ([`PartialEq`], [`Eq`]),\n\n/// - it can be hashed ([`Hash`]).\n\n///\n\n/// # Using `enum` types\n\n/// If you know in advance which IRIs will be used by your implementation,\n\n/// one possibility is to use a `enum` type as identifier.\n\n/// This can be done throught the use of the [`Lexicon`](`crate::Lexicon`) type along with the\n\n/// [`iref-enum`](https://crates.io/crates/iref-enum) crate:\n\n/// ```\n\n/// use iref_enum::*;\n\n/// use json_ld::Lexicon;\n\n/// use ijson::IValue;\n\n///\n\n/// /// Vocabulary used in the implementation.\n\n/// #[derive(IriEnum, Clone, Copy, PartialEq, Eq, Hash)]\n\n/// #[iri_prefix(\"rdfs\" = \"http://www.w3.org/2000/01/rdf-schema#\")]\n\n/// #[iri_prefix(\"manifest\" = \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#\")]\n\n/// #[iri_prefix(\"vocab\" = \"https://w3c.github.io/json-ld-api/tests/vocab#\")]\n\n/// pub enum Vocab {\n\n/// #[iri(\"rdfs:comment\")] Comment,\n\n///\n\n/// #[iri(\"manifest:name\")] Name,\n\n/// #[iri(\"manifest:entries\")] Entries,\n\n/// #[iri(\"manifest:action\")] Action,\n\n/// #[iri(\"manifest:result\")] Result,\n\n///\n\n/// #[iri(\"vocab:PositiveEvaluationTest\")] PositiveEvalTest,\n\n/// #[iri(\"vocab:NegativeEvaluationTest\")] NegativeEvalTest,\n\n/// #[iri(\"vocab:option\")] Option,\n\n/// #[iri(\"vocab:specVersion\")] SpecVersion,\n\n/// #[iri(\"vocab:processingMode\")] ProcessingMode,\n\n/// #[iri(\"vocab:expandContext\")] ExpandContext,\n\n/// #[iri(\"vocab:base\")] Base\n\n/// }\n\n///\n\n/// /// A fully functional identifier type.\n\n/// pub type Id = Lexicon<Vocab>;\n\n///\n\n/// fn handle_node(node: &json_ld::Node<IValue, Id>) {\n\n/// for name in node.get(Vocab::Name) { // <- note that we can directly use `Vocab` here.\n\n/// println!(\"node name: {}\", name.as_str().unwrap());\n\n/// }\n\n/// }\n\n/// ```\n\npub trait Id: AsIri + Clone + PartialEq + Eq + Hash {\n\n\t/// Create an identifier from its IRI.\n\n\tfn from_iri(iri: Iri) -> Self;\n\n\n\n\t#[inline(always)]\n\n\tfn as_json<K: JsonBuild>(&self, meta: K::MetaData) -> K {\n\n\t\tK::string(self.as_iri().as_str().into(), meta)\n\n\t}\n\n}\n\n\n\nimpl Id for IriBuf {\n\n\t#[inline(always)]\n\n\tfn from_iri(iri: Iri) -> IriBuf {\n\n\t\tiri.into()\n\n\t}\n\n}\n\n\n\nimpl<T: Id> TermLike for T {\n\n\t#[inline(always)]\n\n\tfn as_str(&self) -> &str {\n\n\t\tself.as_iri().into_str()\n\n\t}\n\n\n\n\t#[inline(always)]\n\n\tfn as_iri(&self) -> Option<Iri> {\n\n\t\tSome(self.as_iri())\n\n\t}\n\n}\n", "file_path": "src/id.rs", "rank": 19, "score": 171743.32087855318 }, { "content": "/// Build an invalid reference and emit a warning.\n\nfn invalid<T: Id, M: Clone>(\n\n\tvalue: String,\n\n\tsource: Option<loader::Id>,\n\n\tmetadata: &M,\n\n\twarnings: &mut Vec<Loc<Warning, M>>,\n\n) -> Term<T> {\n\n\twarnings.push(Loc::new(\n\n\t\tWarning::MalformedIri(value.clone()),\n\n\t\tsource,\n\n\t\tmetadata.clone(),\n\n\t));\n\n\tReference::Invalid(value).into()\n\n}\n", "file_path": "src/expansion/iri.rs", "rank": 20, "score": 170477.73466020502 }, { "content": "/// Types that can be converted into a borrowed node reference.\n\n///\n\n/// This is a convenient trait is used to simplify the use of references.\n\n/// For instance consider the [`Node::get`](crate::Node::get) method, used to get the objects associated to the\n\n/// given reference property for a given node.\n\n/// It essentially have the following signature:\n\n/// ```ignore\n\n/// fn get(&self, id: &Reference<T>) -> Objects;\n\n/// ```\n\n/// However building a `Reference` by hand can be tedious, especially while using [`Lexicon`](crate::Lexicon) and\n\n/// [`Vocab`](crate::Vocab). It can be as verbose as `node.get(&Reference::Id(Lexicon::Id(MyVocab::Term)))`.\n\n/// Thanks to `ToReference` which is implemented by `Lexicon<V>` for any type `V` implementing `Vocab`,\n\n/// it is simplified into `node.get(MyVocab::Term)` (while the first syntax remains correct).\n\n/// The signature of `get` becomes:\n\n/// ```ignore\n\n/// fn get<R: ToReference<T>>(&self, id: R) -> Objects;\n\n/// ```\n\npub trait ToReference<T: Id> {\n\n\t/// The target type of the conversion, which can be borrowed as a `Reference<T>`.\n\n\ttype Reference: Borrow<Reference<T>>;\n\n\n\n\t/// Convert the value into a reference.\n\n\tfn to_ref(&self) -> Self::Reference;\n\n}\n\n\n\nimpl<'a, T: Id> ToReference<T> for &'a Reference<T> {\n\n\ttype Reference = &'a Reference<T>;\n\n\n\n\t#[inline(always)]\n\n\tfn to_ref(&self) -> Self::Reference {\n\n\t\tself\n\n\t}\n\n}\n", "file_path": "src/reference.rs", "rank": 21, "score": 168286.91401530785 }, { "content": "/// Build an invalid reference and emit a warning.\n\nfn invalid_iri<T: Id, M: Clone>(\n\n\tvalue: String,\n\n\tsource: Option<loader::Id>,\n\n\tmetadata: &M,\n\n\twarnings: &mut Vec<Loc<Warning, M>>,\n\n) -> Term<T> {\n\n\twarnings.push(Loc::new(\n\n\t\tWarning::MalformedIri(value.clone()),\n\n\t\tsource,\n\n\t\tmetadata.clone(),\n\n\t));\n\n\tReference::Invalid(value).into()\n\n}\n\n\n", "file_path": "src/context/processing.rs", "rank": 22, "score": 167270.96189073974 }, { "content": "/// JSON type that can be used by the compaction algorithm.\n\npub trait JsonSrc = JsonClone + JsonHash + JsonSendSync;\n\n\n\nmod iri;\n\nmod node;\n\nmod property;\n\nmod value;\n\n\n\npub(crate) use iri::*;\n\nuse node::*;\n\nuse property::*;\n\nuse value::*;\n\n\n", "file_path": "src/compaction/mod.rs", "rank": 23, "score": 166630.49974199955 }, { "content": "/// Hash an optional [`HashSet`].\n\npub fn hash_set_opt<T: Hash, H: Hasher>(set_opt: &Option<HashSet<T>>, hasher: &mut H) {\n\n\tif let Some(set) = set_opt.as_ref() {\n\n\t\thash_set(set, hasher)\n\n\t}\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 24, "score": 166137.72068546986 }, { "content": "pub trait Any<J: JsonHash, T: Id> {\n\n\tfn as_ref(&self) -> Ref<J, T>;\n\n\n\n\t#[inline]\n\n\tfn id<'a>(&'a self) -> Option<&Reference<T>>\n\n\twhere\n\n\t\tJ: 'a,\n\n\t{\n\n\t\tmatch self.as_ref() {\n\n\t\t\tRef::Node(n) => n.id.as_ref(),\n\n\t\t\t_ => None,\n\n\t\t}\n\n\t}\n\n\n\n\t#[inline]\n\n\tfn language<'a>(&'a self) -> Option<LenientLanguageTag>\n\n\twhere\n\n\t\tJ: 'a,\n\n\t\tT: 'a,\n\n\t{\n", "file_path": "src/object/mod.rs", "rank": 25, "score": 166017.21812694194 }, { "content": "/// JSON-LD context.\n\n///\n\n/// A context holds all the term definitions used to expand a JSON-LD value.\n\npub trait Context<T: Id = IriBuf>: Clone {\n\n\t// TODO Later\n\n\t// type Definitions<'a>: Iterator<Item = (&'a str, TermDefinition<T, Self>)>;\n\n\n\n\t/// The type of local contexts associated to this type of contexts.\n\n\ttype LocalContext: Local<T>;\n\n\n\n\t/// Create a newly-initialized active context with the given *base IRI*.\n\n\tfn new(base_iri: Option<Iri>) -> Self;\n\n\n\n\t/// Get the definition of a term.\n\n\tfn get(&self, term: &str) -> Option<&TermDefinition<T, Self>>;\n\n\n\n\tfn get_opt(&self, term: Option<&str>) -> Option<&TermDefinition<T, Self>> {\n\n\t\tif let Some(term) = term {\n\n\t\t\tself.get(term)\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n", "file_path": "src/context/mod.rs", "rank": 26, "score": 163539.3699577334 }, { "content": "type Id = Lexicon<Vocab>;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n\tlet mut loader = Loader::<IValue>::new(|s| serde_json::from_str(s));\n\n\n\n\t// The JSON-LD document to expand.\n\n\tlet doc = loader\n\n\t\t.load(iri!(\n\n\t\t\t\"https://w3c.github.io/json-ld-api/tests/expand-manifest.jsonld\"\n\n\t\t))\n\n\t\t.await\n\n\t\t.unwrap();\n\n\n\n\t// Expansion.\n\n\tlet expanded_doc = doc\n\n\t\t.expand::<context::Json<IValue, Id>, _>(&mut loader)\n\n\t\t.await\n\n\t\t.unwrap();\n\n\n", "file_path": "examples/reqwest-loader.rs", "rank": 27, "score": 162889.98384171817 }, { "content": "pub fn is_keyword(str: &str) -> bool {\n\n\tKeyword::try_from(str).is_ok()\n\n}\n\n\n", "file_path": "src/syntax/keyword.rs", "rank": 28, "score": 156660.1044323514 }, { "content": "pub fn is_keyword_like(s: &str) -> bool {\n\n\tif s.len() > 1 {\n\n\t\tfor (i, c) in s.chars().enumerate() {\n\n\t\t\tif (i == 0 && c != '@') || (i > 0 && !is_alpha(c)) {\n\n\t\t\t\treturn false;\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\ttrue\n\n\t} else {\n\n\t\tfalse\n\n\t}\n\n}\n", "file_path": "src/syntax/keyword.rs", "rank": 29, "score": 156660.1044323514 }, { "content": "// Checks if the input term is an IRI ending with a gen-delim character, or a blank node identifier.\n\nfn is_gen_delim_or_blank<T: Id>(t: &Term<T>) -> bool {\n\n\tmatch t {\n\n\t\tTerm::Ref(Reference::Blank(_)) => true,\n\n\t\tTerm::Ref(Reference::Id(id)) => {\n\n\t\t\tif let Some(c) = id.as_iri().as_str().chars().last() {\n\n\t\t\t\tis_gen_delim(c)\n\n\t\t\t} else {\n\n\t\t\t\tfalse\n\n\t\t\t}\n\n\t\t}\n\n\t\t_ => false,\n\n\t}\n\n}\n\n\n", "file_path": "src/context/processing.rs", "rank": 30, "score": 155886.15333693865 }, { "content": "/// Mutable JSON-LD context.\n\npub trait ContextMut<T: Id = IriBuf>: Context<T> {\n\n\t/// Defines the given term.\n\n\tfn set(\n\n\t\t&mut self,\n\n\t\tterm: &str,\n\n\t\tdefinition: Option<TermDefinition<T, Self>>,\n\n\t) -> Option<TermDefinition<T, Self>>;\n\n\n\n\t/// Sets the base IRI of the context.\n\n\tfn set_base_iri(&mut self, iri: Option<Iri>);\n\n\n\n\t/// Sets the vocabulary.\n\n\tfn set_vocabulary(&mut self, vocab: Option<Term<T>>);\n\n\n\n\t/// Sets the default language.\n\n\tfn set_default_language(&mut self, lang: Option<LenientLanguageTagBuf>);\n\n\n\n\t/// Sets de default language base direction.\n\n\tfn set_default_base_direction(&mut self, dir: Option<Direction>);\n\n\n\n\t/// Sets the previous context.\n\n\tfn set_previous_context(&mut self, previous: Self);\n\n}\n\n\n", "file_path": "src/context/mod.rs", "rank": 31, "score": 155403.9141040239 }, { "content": "type LangDir = Nullable<(Option<LenientLanguageTagBuf>, Option<Direction>)>;\n\n\n", "file_path": "src/context/inverse.rs", "rank": 32, "score": 155231.99265009956 }, { "content": "/// Result of the `expand_node_entries` function.\n\ntype NodeEntriesExpensionResult<J, T> =\n\n\tResult<ExpandedNode<J, T>, Loc<Error, <J as Json>::MetaData>>;\n\n\n", "file_path": "src/expansion/node.rs", "rank": 33, "score": 154524.2454478619 }, { "content": "/// Type returned by the `expand_node_entries` function.\n\n///\n\n/// It is a tuple containing both the node being expanded\n\n/// and a boolean flag set to `true` if the node contains\n\n/// value object entries (in practice, if it has a `@language` entry).\n\ntype ExpandedNode<J, T> = (Indexed<Node<J, T>>, bool);\n\n\n", "file_path": "src/expansion/node.rs", "rank": 34, "score": 153371.23600625206 }, { "content": "pub trait JsonContext = JsonSendSync + JsonClone;\n\n\n\n/// Options of the Context Processing Algorithm.\n\n#[derive(Clone, Copy, PartialEq, Eq)]\n\npub struct ProcessingOptions {\n\n\t/// The processing mode\n\n\tpub processing_mode: ProcessingMode,\n\n\n\n\t/// Override protected definitions.\n\n\tpub override_protected: bool,\n\n\n\n\t/// Propagate the processed context.\n\n\tpub propagate: bool,\n\n}\n\n\n\nimpl ProcessingOptions {\n\n\t/// Return the same set of options, but with `override_protected` set to `true`.\n\n\tpub fn with_override(&self) -> ProcessingOptions {\n\n\t\tlet mut opt = *self;\n\n\t\topt.override_protected = true;\n", "file_path": "src/context/mod.rs", "rank": 35, "score": 150083.0570826471 }, { "content": "/// JSON document loader.\n\n///\n\n/// Each document is uniquely identified by the loader by a `u32`.\n\npub trait Loader {\n\n\t/// The type of documents that can be loaded.\n\n\ttype Document: Json;\n\n\n\n\t/// Returns the unique identifier associated to the given IRI, if any.\n\n\tfn id(&self, iri: Iri<'_>) -> Option<Id>;\n\n\n\n\t/// Returns the unique identifier associated to the given IRI, if any.\n\n\t///\n\n\t/// Returns `None` if the input `iri` is `None`.\n\n\t#[inline(always)]\n\n\tfn id_opt(&self, iri: Option<Iri<'_>>) -> Option<Id> {\n\n\t\tiri.map(|iri| self.id(iri)).flatten()\n\n\t}\n\n\n\n\t/// Returns the IRI with the given identifier, if any.\n\n\tfn iri(&self, id: Id) -> Option<Iri<'_>>;\n\n\n\n\t/// Loads the document behind the given IRI.\n\n\tfn load<'a>(\n", "file_path": "src/loader.rs", "rank": 36, "score": 143972.33304379546 }, { "content": "pub trait Loader {\n\n\ttype Output;\n\n\n\n\t/// Returns the unique identifier associated to the given IRI, if any.\n\n\tfn id(&self, iri: Iri<'_>) -> Option<crate::loader::Id>;\n\n\n\n\t/// Returns the unique identifier associated to the given IRI, if any.\n\n\t///\n\n\t/// Returns `None` if the input `iri` is `None`.\n\n\t#[inline(always)]\n\n\tfn id_opt(&self, iri: Option<Iri<'_>>) -> Option<crate::loader::Id> {\n\n\t\tiri.map(|iri| self.id(iri)).flatten()\n\n\t}\n\n\n\n\t/// Returns the IRI with the given identifier, if any.\n\n\tfn iri(&self, id: crate::loader::Id) -> Option<Iri<'_>>;\n\n\n\n\tfn load_context<'a>(\n\n\t\t&'a mut self,\n\n\t\turl: Iri,\n", "file_path": "src/context/loader.rs", "rank": 37, "score": 141459.24707990457 }, { "content": "pub fn is_json_media_type(ty: &str) -> bool {\n\n\tty == \"application/json\" || ty == \"application/ld+json\"\n\n}\n\n\n\npub async fn load_remote_json_ld_document<J, P>(\n\n\turl: Iri<'_>,\n\n\tparser: &mut P,\n\n) -> Result<J, Error>\n\nwhere\n\n\tP: Send + Sync + FnMut(&str) -> Result<J, Error>,\n\n{\n\n\tlog::info!(\"loading remote document `{}'\", url);\n\n\tuse reqwest::header::*;\n\n\n\n\tlet client = reqwest::Client::new();\n\n\tlet request = client\n\n\t\t.get(url.as_str())\n\n\t\t.header(ACCEPT, \"application/ld+json, application/json\");\n\n\tlet response = request.send().await?;\n\n\n", "file_path": "src/reqwest/mod.rs", "rank": 38, "score": 139598.96823707287 }, { "content": "fn generate_test(entry: &Node<IValue, Id>) {\n\n\tlet name = entry.get(Vocab::Name).next().unwrap().as_str().unwrap();\n\n\tlet url = entry.get(Vocab::Action).next().unwrap().as_iri().unwrap();\n\n\tlet mut base_url = url;\n\n\tlet func_name = func_name(url.path().file_name().unwrap());\n\n\n\n\tlet mut processing_mode = ProcessingMode::JsonLd1_1;\n\n\tlet mut context_url = \"None\".to_string();\n\n\n\n\tfor option in entry.get(Vocab::Option) {\n\n\t\tif let Object::Node(option) = option.as_ref() {\n\n\t\t\tfor normative in option.get(Vocab::Normative) {\n\n\t\t\t\tif let Some(false) = normative.inner().as_bool() {\n\n\t\t\t\t\tinfo!(\"skipping test {} (non normative)\", url);\n\n\t\t\t\t\treturn;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tfor spec_version in option.get(Vocab::SpecVersion) {\n\n\t\t\t\tif let Some(spec_version) = spec_version.as_str() {\n", "file_path": "examples/generate-expand-tests.rs", "rank": 39, "score": 139586.9855861585 }, { "content": "fn generate_test(entry: &Node<IValue, Id>) {\n\n\tlet name = entry.get(Vocab::Name).next().unwrap().as_str().unwrap();\n\n\tlet url = entry.get(Vocab::Action).next().unwrap().as_iri().unwrap();\n\n\tlet mut base_url = url;\n\n\n\n\tif url == \"https://w3c.github.io/json-ld-api/tests/compact/p004-in.jsonld\" {\n\n\t\twarn!(\n\n\t\t\t\"skipping invalid test {} (https://github.com/w3c/json-ld-api/issues/517)\",\n\n\t\t\turl\n\n\t\t);\n\n\t\treturn;\n\n\t}\n\n\n\n\tlet func_name = func_name(url.path().file_name().unwrap());\n\n\n\n\tlet mut processing_mode = ProcessingMode::JsonLd1_1;\n\n\tlet mut compact_arrays = true;\n\n\tlet mut context_url = \"None\".to_string();\n\n\n\n\tfor context in entry.get(Vocab::Context) {\n", "file_path": "examples/generate-compact-tests.rs", "rank": 40, "score": 139586.9855861585 }, { "content": "/// Trait for types that are or wrap a mutable context.\n\n///\n\n/// This trait is used by the [`Document::compact`](crate::Document::compact)\n\n/// function to accept either a context or a wrapper to a context.\n\npub trait ContextMutProxy<T: Id = IriBuf> {\n\n\ttype Target: ContextMut<T>;\n\n\n\n\t/// Returns a reference to the mutable context.\n\n\tfn deref(&self) -> &Self::Target;\n\n}\n\n\n\n/// Context processing result.\n\npub type ProcessingResult<'s, J, C> =\n\n\tResult<Processed<'s, J, C>, Loc<Error, <J as generic_json::Json>::MetaData>>;\n\n\n", "file_path": "src/context/mod.rs", "rank": 41, "score": 139427.98776343325 }, { "content": "type Id = Lexicon<Foaf>;\n\n\n\n#[async_std::main]\n\nasync fn main() {\n\n\t// The JSON-LD document to expand.\n\n\tlet doc: IValue = serde_json::from_str(\n\n\t\tr#\"\n\n\t\t{\n\n\t\t\t\"@context\": {\n\n\t\t\t\t\"name\": \"http://xmlns.com/foaf/0.1/name\",\n\n\t\t\t\t\"email\": \"http://xmlns.com/foaf/0.1/mbox\"\n\n\t\t\t},\n\n\t\t\t\"@id\": \"timothee.haudebourg.net\",\n\n\t\t\t\"name\": \"Timothée Haudebourg\",\n\n\t\t\t\"email\": \"author@haudebourg.net\"\n\n\t\t}\n\n\t\"#,\n\n\t)\n\n\t.unwrap();\n\n\n", "file_path": "examples/custom-ids.rs", "rank": 42, "score": 136247.2458117928 }, { "content": "type Id = Lexicon<Foaf>;\n\n\n\n#[async_std::main]\n\nasync fn main() {\n\n\t// The JSON-LD document to expand.\n\n\tlet doc: IValue = serde_json::from_str(\n\n\t\tr#\"\n\n\t\t{\n\n\t\t\t\"@context\": {\n\n\t\t\t\t\"name\": \"http://xmlns.com/foaf/0.1/name\",\n\n\t\t\t\t\"email\": \"http://xmlns.com/foaf/0.1/mbox\"\n\n\t\t\t},\n\n\t\t\t\"@id\": \"timothee.haudebourg.net\",\n\n\t\t\t\"name\": \"Timothée Haudebourg\",\n\n\t\t\t\"email\": \"author@haudebourg.net\"\n\n\t\t}\n\n\t\"#,\n\n\t)\n\n\t.unwrap();\n\n\n", "file_path": "examples/custom-ids-iref-enum.rs", "rank": 43, "score": 131897.79725483432 }, { "content": "/// JSON document that can be expanded.\n\npub trait JsonExpand = JsonSendSync + JsonHash + JsonClone + JsonLft<'static>;\n\n\n\n/// Expansion options.\n\n#[derive(Clone, Copy, Default)]\n\npub struct Options {\n\n\t/// Sets the processing mode.\n\n\tpub processing_mode: ProcessingMode,\n\n\n\n\t/// Term expansion policy.\n\n\t///\n\n\t/// Default is `Policy::Standard`.\n\n\tpub policy: Policy,\n\n\n\n\t/// If set to true, input document entries are processed lexicographically.\n\n\t/// If false, order is not considered in processing.\n\n\tpub ordered: bool,\n\n}\n\n\n\n/// Key expansion policy.\n\n///\n", "file_path": "src/expansion/mod.rs", "rank": 44, "score": 130942.96413379215 }, { "content": "/// Hash a [`HashSet`].\n\n///\n\n/// The standard library does not provide (yet) a `Hash` implementation\n\n/// for the [`HashSet`] type. This can be used instead.\n\n///\n\n/// Note that this function not particularly strong and does\n\n/// not protect against DoS attacks.\n\npub fn hash_set<T: Hash, H: Hasher>(set: &HashSet<T>, hasher: &mut H) {\n\n\t// See: https://github.com/rust-lang/rust/pull/48366\n\n\t// Elements must be combined with a associative and commutative operation •.\n\n\t// (u64, •, 0) must form a commutative monoid.\n\n\t// This is satisfied by • = u64::wrapping_add.\n\n\tlet mut hash = 0;\n\n\tfor item in set {\n\n\t\tlet mut h = DefaultHasher::new();\n\n\t\titem.hash(&mut h);\n\n\t\thash = u64::wrapping_add(hash, h.finish());\n\n\t}\n\n\n\n\thasher.write_u64(hash);\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 45, "score": 130617.99678782682 }, { "content": "/// Checks if the the given character is included in the given string anywhere but at the first position.\n\nfn contains_after_first(id: &str, c: char) -> bool {\n\n\tif let Some(i) = id.find(c) {\n\n\t\ti > 0\n\n\t} else {\n\n\t\tfalse\n\n\t}\n\n}\n\n\n", "file_path": "src/context/processing.rs", "rank": 46, "score": 130429.11011091601 }, { "content": "/// Checks if the the given character is included in the given string anywhere but at the first or last position.\n\nfn contains_between_boundaries(id: &str, c: char) -> bool {\n\n\tif let Some(i) = id.find(c) {\n\n\t\tlet j = id.rfind(c).unwrap();\n\n\t\ti > 0 && j < id.len() - 1\n\n\t} else {\n\n\t\tfalse\n\n\t}\n\n}\n\n\n\n// fn define<'a>(&mut self, env: &mut DefinitionEnvironment<'a>, term: &str, value: &JsonValue) -> Result<(), Self::Error> {\n\n\n", "file_path": "src/context/processing.rs", "rank": 47, "score": 130429.11011091601 }, { "content": "enum TypeLangValue<'a, T: Id> {\n\n\tType(TypeSelection<T>),\n\n\tLang(LangSelection<'a>),\n\n}\n\n\n", "file_path": "src/compaction/mod.rs", "rank": 48, "score": 127374.06960842901 }, { "content": "/// Follows the `https://www.w3.org/TR/json-ld11-api/#create-term-definition` algorithm.\n\n/// Default value for `base_url` is `None`. Default values for `protected` and `override_protected` are `false`.\n\npub fn define<\n\n\t'a,\n\n\tJ: JsonContext,\n\n\tT: Id + Send + Sync,\n\n\tC: ContextMut<T> + Send + Sync,\n\n\tL: Loader + Send + Sync,\n\n>(\n\n\tactive_context: &'a mut C,\n\n\tlocal_context: &'a LocalContextObject<'a, J::Object>,\n\n\tterm: &'a str,\n\n\tterm_metadata: &'a J::MetaData,\n\n\tdefined: &'a mut HashMap<String, bool>,\n\n\tremote_contexts: ProcessingStack,\n\n\tloader: &'a mut L,\n\n\tbase_url: Option<Iri<'a>>,\n\n\tprotected: bool,\n\n\toptions: ProcessingOptions,\n\n\twarnings: &'a mut Vec<Loc<Warning, J::MetaData>>,\n\n) -> BoxFuture<'a, Result<(), Error>>\n\nwhere\n", "file_path": "src/context/processing.rs", "rank": 49, "score": 125352.30180086978 }, { "content": "/// Expand an element.\n\n///\n\n/// See <https://www.w3.org/TR/json-ld11-api/#expansion-algorithm>.\n\n/// The default specified value for `ordered` and `from_map` is `false`.\n\npub fn expand_element<\n\n\t'a,\n\n\tJ: JsonExpand,\n\n\tT: 'a + Id + Send + Sync,\n\n\tC: ContextMut<T> + Send + Sync,\n\n\tL: Loader + Send + Sync,\n\n>(\n\n\tactive_context: &'a C,\n\n\tactive_property: ActiveProperty<'a, J>,\n\n\telement: &'a J,\n\n\tbase_url: Option<Iri<'a>>,\n\n\tloader: &'a mut L,\n\n\toptions: Options,\n\n\tfrom_map: bool,\n\n\twarnings: &'a mut Vec<Loc<Warning, J::MetaData>>,\n\n) -> BoxFuture<'a, ElementExpansionResult<T, J>>\n\nwhere\n\n\tC::LocalContext: From<L::Output> + From<J> + Send + Sync,\n\n\tL::Output: Into<J>,\n\n{\n", "file_path": "src/expansion/element.rs", "rank": 50, "score": 122742.60828076521 }, { "content": "/// JSON-LD document.\n\n///\n\n/// This trait represent a JSON-LD document that can be expanded into an [`ExpandedDocument`]\n\n/// or compacted. It is the main entry point to the JSON-LD API.\n\n/// It is notably implemented for any type implementing the [generic_json::Json] trait.\n\npub trait Document<T: Id> {\n\n\ttype Json: Json;\n\n\n\n\t/// Document location, if any.\n\n\tfn base_url(&self) -> Option<Iri>;\n\n\n\n\t/// Expand the document with a custom base URL, initial context, document loader and\n\n\t/// expansion options.\n\n\t///\n\n\t/// If you do not wish to set the base URL and expansion options yourself, the\n\n\t/// [`expand`](`Document::expand`) method is more appropriate.\n\n\t///\n\n\t/// This is an asynchronous method since expanding the context may require loading remote\n\n\t/// ressources. It returns a boxed [`Future`](`std::future::Future`) to the result.\n\n\tfn expand_with<'a, C: 'a + ContextMut<T>, L: 'a + Loader>(\n\n\t\t&'a self,\n\n\t\tbase_url: Option<Iri>,\n\n\t\tcontext: &'a C,\n\n\t\tloader: &'a mut L,\n\n\t\toptions: expansion::Options,\n", "file_path": "src/document.rs", "rank": 51, "score": 120645.01346474749 }, { "content": "fn is_alpha(c: char) -> bool {\n\n\tlet c = c as u32;\n\n\t(0x41..=0x5a).contains(&c) || (0x61..=0x7a).contains(&c)\n\n}\n\n\n", "file_path": "src/syntax/keyword.rs", "rank": 52, "score": 120285.75884813514 }, { "content": "fn negative_test(options: Options, input_url: Iri, base_url: Iri, error_code: ErrorCode) {{\n\n\tlet mut loader = FsLoader::<IValue>::new(|s| serde_json::from_str(s));\n\n\tloader.mount(iri!(\"https://w3c.github.io/json-ld-api\"), \"json-ld-api\");\n\n\n\n\tlet input = task::block_on(loader.load(input_url)).unwrap();\n\n\tlet mut input_context: context::Json<IValue, IriBuf> = context::Json::new(Some(base_url));\n\n\n\n\tif let Some(context_url) = options.context {{\n\n\t\tlet local_context = task::block_on(loader.load_context(context_url)).unwrap().into_context();\n\n\t\tinput_context = task::block_on(local_context.process_with(&input_context, &mut loader, Some(base_url), options.into())).unwrap().into_inner();\n\n\t}}\n\n\n\n\tlet result = task::block_on(input.expand_with(Some(base_url), &input_context, &mut loader, options.into()));\n\n\n\n\tmatch result {{\n\n\t\tOk(output) => {{\n\n\t\t\tlet output_json: IValue = output.as_json();\n\n\t\t\tprintln!(\"output=\\n{{}}\", serde_json::to_string_pretty(&output_json).unwrap());\n\n\t\t\tpanic!(\"expansion succeeded where it should have failed with code: {{}}\", error_code)\n\n\t\t}},\n\n\t\tErr(e) => {{\n\n\t\t\tassert_eq!(e.code(), error_code)\n\n\t\t}}\n\n\t}}\n\n}}\n", "file_path": "tests/templates/expand-header.rs", "rank": 53, "score": 118185.44112114781 }, { "content": "fn negative_test(options: Options, input_url: Iri, base_url: Iri, error_code: ErrorCode) {{\n\n\tlet mut loader = FsLoader::<IValue>::new(|s| serde_json::from_str(s));\n\n\tloader.mount(iri!(\"https://w3c.github.io/json-ld-api\"), \"json-ld-api\");\n\n\n\n\tlet input = task::block_on(loader.load(input_url)).unwrap();\n\n\tlet base_json_context = base_json_context(base_url);\n\n\tlet mut input_context: ProcessedOwned<IValue, context::Json<IValue, IriBuf>> =\n\n\t\tProcessedOwned::new(base_json_context, context::Json::new(Some(base_url)));\n\n\n\n\tif let Some(context_url) = options.context {{\n\n\t\tlet local_context = task::block_on(loader.load_context(context_url)).unwrap().into_context();\n\n\t\tinput_context = match task::block_on(local_context.process_with(input_context.as_ref(), &mut loader, Some(base_url), options.into())) {{\n\n\t\t\tOk(context) => context.owned(),\n\n\t\t\tErr(e) => {{\n\n\t\t\t\tassert_eq!(e.code(), error_code);\n\n\t\t\t\treturn\n\n\t\t\t}}\n\n\t\t}};\n\n\t}}\n\n\n", "file_path": "tests/templates/compact-header.rs", "rank": 54, "score": 118185.44112114781 }, { "content": "/// Type that can be converted into a `K` JSON value.\n\npub trait AsAnyJson<K: JsonBuild> {\n\n\t/// Converts this value into a `K` JSON value using the\n\n\t/// given `meta` value as metadata.\n\n\tfn as_json_with(&self, meta: K::MetaData) -> K;\n\n\n\n\t/// Converts this value into a `K` JSON value using the\n\n\t/// default metadata value.\n\n\tfn as_json(&self) -> K\n\n\twhere\n\n\t\tK::MetaData: Default,\n\n\t{\n\n\t\tself.as_json_with(K::MetaData::default())\n\n\t}\n\n}\n\n\n", "file_path": "src/util/json/build.rs", "rank": 55, "score": 115926.41240689947 }, { "content": "fn no_metadata<M>(_: Option<&M>) -> () {{\n\n\t()\n\n}}\n\n\n", "file_path": "tests/templates/compact-header.rs", "rank": 56, "score": 113491.2087715014 }, { "content": "/// Resolve `iri_ref` against the given base IRI.\n\nfn resolve_iri(iri_ref: IriRef, base_iri: Option<Iri>) -> Option<IriBuf> {\n\n\tmatch base_iri {\n\n\t\tSome(base_iri) => Some(iri_ref.resolved(base_iri)),\n\n\t\tNone => match iri_ref.into_iri() {\n\n\t\t\tOk(iri) => Some(iri.into()),\n\n\t\t\tErr(_) => None,\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "src/context/processing.rs", "rank": 57, "score": 112763.18360057272 }, { "content": "fn func_name(id: &str) -> String {\n\n\tlet mut name = \"compact_\".to_string();\n\n\n\n\tfor c in id.chars() {\n\n\t\tmatch c {\n\n\t\t\t'.' | '-' => break,\n\n\t\t\t_ => name.push(c),\n\n\t\t}\n\n\t}\n\n\n\n\tname\n\n}\n\n\n", "file_path": "examples/generate-compact-tests.rs", "rank": 58, "score": 111268.8361599289 }, { "content": "fn func_name(id: &str) -> String {\n\n\tlet mut name = \"expand_\".to_string();\n\n\n\n\tfor c in id.chars() {\n\n\t\tmatch c {\n\n\t\t\t'.' | '-' => break,\n\n\t\t\t_ => name.push(c),\n\n\t\t}\n\n\t}\n\n\n\n\tname\n\n}\n\n\n", "file_path": "examples/generate-expand-tests.rs", "rank": 59, "score": 111268.8361599289 }, { "content": "pub trait TermLike {\n\n\tfn as_iri(&self) -> Option<Iri>;\n\n\n\n\tfn as_str(&self) -> &str;\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Hash)]\n\npub enum Term<T: AsIri> {\n\n\tNull,\n\n\tRef(Reference<T>),\n\n\tKeyword(Keyword),\n\n}\n\n\n\nimpl<T: AsIri> Term<T> {\n\n\tpub fn is_null(&self) -> bool {\n\n\t\tmatches!(self, Term::Null)\n\n\t}\n\n\n\n\tpub fn into_id(self) -> Result<T, Self> {\n\n\t\tmatch self {\n", "file_path": "src/syntax/term.rs", "rank": 60, "score": 110414.28213758089 }, { "content": "/// Vocabulary type.\n\n///\n\n/// Directly using [`IriBuf`] to identify each node\n\n/// can be very expensive.\n\n/// When you know in advance the set of IRIs will be used\n\n/// in your application, it is more effective to use\n\n/// an `enum` type where each variant represents an IRI.\n\n/// In this case, storing and comparing IRIs become very cheap.\n\n///\n\n/// In this setting, such `enum` type can implement the\n\n/// `Vocab` trait, automatically implemented in most cases.\n\n/// It can then be wrapped around the [`Vocab`] type to handle unexpected\n\n/// IRIs, and be used as identifier type instead of `IriBuf`.\n\npub trait Vocab: AsIri + Clone + PartialEq + Eq + Hash {\n\n\tfn from_iri(iri: Iri) -> Option<Self>;\n\n}\n\n\n\nimpl<T: AsIri + Clone + PartialEq + Eq + Hash> Vocab for T\n\nwhere\n\n\tfor<'a> T: TryFrom<Iri<'a>>,\n\n{\n\n\t#[inline]\n\n\tfn from_iri(iri: Iri) -> Option<Self> {\n\n\t\tmatch T::try_from(iri) {\n\n\t\t\tOk(t) => Some(t),\n\n\t\t\tErr(_) => None,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<V: Vocab> ToReference<Lexicon<V>> for V {\n\n\ttype Reference = Reference<Lexicon<V>>;\n\n\n", "file_path": "src/vocab.rs", "rank": 61, "score": 107721.49633706709 }, { "content": "/// Compact the given list of types into the given `result` compacted object.\n\nfn compact_types<\n\n\tJ: JsonSrc,\n\n\tK: JsonFrom<J>,\n\n\tT: Sync + Send + Id,\n\n\tC: ContextMut<T>,\n\n\tM: Clone + Fn(Option<&J::MetaData>) -> K::MetaData,\n\n>(\n\n\tresult: &mut K::Object,\n\n\ttypes: &[Reference<T>],\n\n\tactive_context: Inversible<T, &C>,\n\n\ttype_scoped_context: Inversible<T, &C>,\n\n\toptions: Options,\n\n\tmeta: M,\n\n) -> Result<(), Error> {\n\n\t// If expanded property is @type:\n\n\tif !types.is_empty() {\n\n\t\t// If expanded value is a string,\n\n\t\t// then initialize compacted value by IRI compacting expanded value using\n\n\t\t// type-scoped context for active context.\n\n\t\tlet compacted_value = if types.len() == 1 {\n", "file_path": "src/compaction/node.rs", "rank": 62, "score": 101090.78438681859 }, { "content": "struct InverseType<T: Id> {\n\n\treverse: Option<String>,\n\n\tany: Option<String>,\n\n\tmap: HashMap<Type<T>, String>,\n\n}\n\n\n\nimpl<T: Id> InverseType<T> {\n\n\tfn select(&self, selection: TypeSelection<T>) -> Option<&str> {\n\n\t\tmatch selection {\n\n\t\t\tTypeSelection::Reverse => self.reverse.as_ref(),\n\n\t\t\tTypeSelection::Any => self.any.as_ref(),\n\n\t\t\tTypeSelection::Type(ty) => self.map.get(&ty),\n\n\t\t}\n\n\t\t.map(|v| v.as_str())\n\n\t}\n\n\n\n\tfn set_any(&mut self, term: &str) {\n\n\t\tif self.any.is_none() {\n\n\t\t\tself.any = Some(term.to_string())\n\n\t\t}\n", "file_path": "src/context/inverse.rs", "rank": 63, "score": 96392.56480981573 }, { "content": "fn positive_test(options: Options, input_url: Iri, base_url: Iri, output_url: Iri) {\n\n\tlet mut loader = FsLoader::<IValue>::new(|s| serde_json::from_str(s));\n\n\tloader.mount(iri!(\"file://crate/tests\"), \"tests\");\n\n\n\n\tlet input = task::block_on(loader.load(input_url)).unwrap();\n\n\tlet output = task::block_on(loader.load(output_url)).unwrap();\n\n\tlet mut input_context: context::Json<IValue, IriBuf> = context::Json::new(Some(base_url));\n\n\n\n\tif let Some(context_url) = options.context {\n\n\t\tlet local_context = task::block_on(loader.load_context(context_url))\n\n\t\t\t.unwrap()\n\n\t\t\t.into_context();\n\n\t\tinput_context = task::block_on(local_context.process_with(\n\n\t\t\t&input_context,\n\n\t\t\t&mut loader,\n\n\t\t\tSome(base_url),\n\n\t\t\toptions.into(),\n\n\t\t))\n\n\t\t.unwrap()\n\n\t\t.into_inner();\n", "file_path": "tests/custom.rs", "rank": 64, "score": 84632.62336804457 }, { "content": "fn base_json_context(base_url: Iri) -> IValue {{\n\n\tlet mut object = ijson::IObject::new();\n\n\tobject.insert(\"@base\", IValue::from(base_url.as_str()));\n\n\tobject.into()\n\n}}\n\n\n", "file_path": "tests/templates/compact-header.rs", "rank": 65, "score": 83788.55595186341 }, { "content": "fn is_gen_delim(c: char) -> bool {\n\n\tmatches!(c, ':' | '/' | '?' | '#' | '[' | ']' | '@')\n\n}\n\n\n", "file_path": "src/context/processing.rs", "rank": 66, "score": 81949.26448331005 }, { "content": "fn positive_test(options: Options, input_url: Iri, base_url: Iri, output_url: Iri) {{\n\n\tlet mut loader = FsLoader::<IValue>::new(|s| serde_json::from_str(s));\n\n\tloader.mount(iri!(\"https://w3c.github.io/json-ld-api\"), \"json-ld-api\");\n\n\n\n\tlet input = task::block_on(loader.load(input_url)).unwrap();\n\n\tlet expected_output = task::block_on(loader.load(output_url)).unwrap();\n\n\tlet mut input_context: context::Json<IValue, IriBuf> = context::Json::new(Some(base_url));\n\n\n\n\tif let Some(context_url) = options.context {{\n\n\t\tlet local_context = task::block_on(loader.load_context(context_url)).unwrap().into_context();\n\n\t\tinput_context = task::block_on(local_context.process_with(&input_context, &mut loader, Some(base_url), options.into())).unwrap().into_inner();\n\n\t}}\n\n\n\n\tlet output = task::block_on(input.expand_with(Some(base_url), &input_context, &mut loader, options.into())).unwrap();\n\n\tlet output_json: IValue = output.as_json();\n\n\n\n\tlet success = json_ld_eq(&output_json, &*expected_output);\n\n\n\n\tif !success {{\n\n\t\tprintln!(\n", "file_path": "tests/templates/expand-header.rs", "rank": 67, "score": 81835.72533462285 }, { "content": "fn positive_test(options: Options, input_url: Iri, base_url: Iri, output_url: Iri) {{\n\n\tlet mut loader = FsLoader::<IValue>::new(|s| serde_json::from_str(s));\n\n\tloader.mount(iri!(\"https://w3c.github.io/json-ld-api\"), \"json-ld-api\");\n\n\n\n\tlet input = task::block_on(loader.load(input_url)).unwrap();\n\n\tlet expected_output = task::block_on(loader.load(output_url)).unwrap();\n\n\tlet base_json_context = base_json_context(base_url);\n\n\tlet mut input_context: ProcessedOwned<IValue, context::Json<IValue, IriBuf>> =\n\n\t\tProcessedOwned::new(base_json_context, context::Json::new(Some(base_url)));\n\n\n\n\tif let Some(context_url) = options.context {{\n\n\t\tlet local_context = task::block_on(loader.load_context(context_url))\n\n\t\t\t.unwrap()\n\n\t\t\t.into_context();\n\n\t\tinput_context = task::block_on(local_context.process_with(\n\n\t\t\tinput_context.as_ref(),\n\n\t\t\t&mut loader,\n\n\t\t\tSome(base_url),\n\n\t\t\toptions.into(),\n\n\t\t))\n", "file_path": "tests/templates/compact-header.rs", "rank": 68, "score": 81835.72533462285 }, { "content": "#[test]\n\nfn custom_0124() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/0124-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/0124-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/0124-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 69, "score": 65517.07672935798 }, { "content": "#[test]\n\nfn custom_0125() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/0125-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/0125-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/0125-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n", "file_path": "tests/custom.rs", "rank": 70, "score": 65517.07672935798 }, { "content": "#[test]\n\nfn custom_0120() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/0120-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/0120-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/0120-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 71, "score": 65517.07672935798 }, { "content": "#[test]\n\nfn custom_0122() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/0122-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/0122-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/0122-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 72, "score": 65517.07672935798 }, { "content": "#[test]\n\nfn custom_0123() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/0123-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/0123-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/0123-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 73, "score": 65517.07672935798 }, { "content": "#[test]\n\nfn custom_e112() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/e112-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/e112-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/e112-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n\n// See w3c/json-ld-api#480\n\n// #[test]\n\n// fn custom_e122() {\n\n// \tlet input_url = iri!(\"file://crate/tests/custom/e122-in.jsonld\");\n\n// \tlet base_url = iri!(\"file://crate/tests/custom/e122-in.jsonld\");\n", "file_path": "tests/custom.rs", "rank": 74, "score": 64018.63726351402 }, { "content": "#[test]\n\nfn custom_e111() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/e111-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/e111-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/e111-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 75, "score": 64018.63726351402 }, { "content": "#[test]\n\nfn custom_c037() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/c037-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/c037-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/c037-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 76, "score": 64018.63726351402 }, { "content": "#[test]\n\nfn custom_c038() {\n\n\tlet input_url = iri!(\"file://crate/tests/custom/c038-in.jsonld\");\n\n\tlet base_url = iri!(\"file://crate/tests/custom/c038-in.jsonld\");\n\n\tlet output_url = iri!(\"file://crate/tests/custom/c038-out.jsonld\");\n\n\tpositive_test(\n\n\t\tOptions {\n\n\t\t\tprocessing_mode: ProcessingMode::JsonLd1_1,\n\n\t\t\tcontext: None,\n\n\t\t},\n\n\t\tinput_url,\n\n\t\tbase_url,\n\n\t\toutput_url,\n\n\t)\n\n}\n\n\n", "file_path": "tests/custom.rs", "rank": 77, "score": 64018.63726351402 }, { "content": "/// Default values for `document_relative` and `vocab` should be `false` and `true`.\n\nfn expand_iri<\n\n\t'a,\n\n\tJ: JsonContext,\n\n\tT: Id + Send + Sync,\n\n\tC: ContextMut<T> + Send + Sync,\n\n\tL: Loader + Send + Sync,\n\n>(\n\n\tactive_context: &'a mut C,\n\n\tvalue: &str,\n\n\tsource: Option<loader::Id>,\n\n\tmetadata: &'a J::MetaData,\n\n\tdocument_relative: bool,\n\n\tvocab: bool,\n\n\tlocal_context: &'a LocalContextObject<'a, J::Object>,\n\n\tdefined: &'a mut HashMap<String, bool>,\n\n\tremote_contexts: ProcessingStack,\n\n\tloader: &'a mut L,\n\n\toptions: ProcessingOptions,\n\n\twarnings: &'a mut Vec<Loc<Warning, J::MetaData>>,\n\n) -> impl 'a + Send + Future<Output = Result<Term<T>, Error>>\n", "file_path": "src/context/processing.rs", "rank": 78, "score": 62630.10074348562 }, { "content": "// This function tries to follow the recommended context proessing algorithm.\n\n// See `https://www.w3.org/TR/json-ld11-api/#context-processing-algorithm`.\n\n//\n\n// The recommended default value for `remote_contexts` is the empty set,\n\n// `false` for `override_protected`, and `true` for `propagate`.\n\nfn process_context<\n\n\t'a,\n\n\tJ: JsonContext,\n\n\tT: Id + Send + Sync,\n\n\tC: ContextMut<T> + Send + Sync,\n\n\tL: Loader + Send + Sync,\n\n>(\n\n\tactive_context: &'a C,\n\n\tlocal_context: &'a J,\n\n\tmut remote_contexts: ProcessingStack,\n\n\tloader: &'a mut L,\n\n\tbase_url: Option<Iri>,\n\n\tmut options: ProcessingOptions,\n\n\twarnings: &'a mut Vec<Loc<Warning, J::MetaData>>,\n\n) -> BoxFuture<'a, Result<C, Loc<Error, J::MetaData>>>\n\nwhere\n\n\tC::LocalContext: From<L::Output> + From<J>,\n\n\tL::Output: Into<J>,\n\n{\n\n\tlet source = loader.id_opt(base_url);\n", "file_path": "src/context/processing.rs", "rank": 79, "score": 62629.43209494069 }, { "content": "fn compact_collection_with<\n\n\t'a,\n\n\tJ: 'a + JsonSrc,\n\n\tK: JsonFrom<J>,\n\n\tT: 'a + Sync + Send + Id,\n\n\tO: 'a + Send + Iterator<Item = &'a Indexed<Object<J, T>>>,\n\n\tC: ContextMut<T>,\n\n\tL: Loader,\n\n\tM: 'a,\n\n>(\n\n\titems: O,\n\n\tactive_context: Inversible<T, &'a C>,\n\n\ttype_scoped_context: Inversible<T, &'a C>,\n\n\tactive_property: Option<&'a str>,\n\n\tloader: &'a mut L,\n\n\toptions: Options,\n\n\tmeta: M,\n\n) -> BoxFuture<'a, Result<K, Error>>\n\nwhere\n\n\tC: Sync + Send,\n", "file_path": "src/compaction/mod.rs", "rank": 80, "score": 62625.85728322022 }, { "content": "fn expand_node_entries<\n\n\t'a,\n\n\tJ: JsonExpand,\n\n\tT: 'a + Id + Send + Sync,\n\n\tC: ContextMut<T> + Send + Sync,\n\n\tL: Loader + Send + Sync,\n\n>(\n\n\tmut result: Indexed<Node<J, T>>,\n\n\tmut has_value_object_entries: bool,\n\n\tactive_context: &'a C,\n\n\ttype_scoped_context: &'a C,\n\n\tactive_property: ActiveProperty<'a, J>,\n\n\texpanded_entries: Vec<ExpandedEntry<'a, J, Term<T>>>,\n\n\tbase_url: Option<Iri<'a>>,\n\n\tloader: &'a mut L,\n\n\toptions: Options,\n\n\twarnings: &'a mut Vec<Loc<Warning, J::MetaData>>,\n\n) -> BoxFuture<'a, NodeEntriesExpensionResult<J, T>>\n\nwhere\n\n\tC::LocalContext: From<L::Output> + From<J> + Send + Sync,\n", "file_path": "src/expansion/node.rs", "rank": 81, "score": 61327.94182765542 }, { "content": "struct InverseContainer<T: Id> {\n\n\tlanguage: InverseLang,\n\n\ttyp: InverseType<T>,\n\n\tany: Any,\n\n}\n\n\n", "file_path": "src/context/inverse.rs", "rank": 82, "score": 60469.75033752795 }, { "content": "use crate::loader;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\n/// Value located behind an IRI reference.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Loc<T, M> {\n\n\t/// The value.\n\n\tvalue: T,\n\n\n\n\t/// Source document.\n\n\tsource: Option<loader::Id>,\n\n\n\n\t/// Metadata.\n\n\tmetadata: M,\n\n}\n\n\n\nimpl<T, M> Loc<T, M> {\n\n\t/// Creates a new value from the given `source` attached to the given `metadata`.\n\n\tpub fn new(value: T, source: Option<loader::Id>, metadata: M) -> Self {\n\n\t\tSelf {\n", "file_path": "src/loc.rs", "rank": 83, "score": 43091.23070244042 }, { "content": "\t\t\tvalue,\n\n\t\t\tsource,\n\n\t\t\tmetadata,\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn value(&self) -> &T {\n\n\t\t&self.value\n\n\t}\n\n\n\n\tpub fn source(&self) -> Option<loader::Id> {\n\n\t\tself.source\n\n\t}\n\n\n\n\t/// Returns a reference to the metadata associated to this value.\n\n\tpub fn metadata(&self) -> &M {\n\n\t\t&self.metadata\n\n\t}\n\n\n\n\tpub fn into_parts(self) -> (T, Option<loader::Id>, M) {\n", "file_path": "src/loc.rs", "rank": 84, "score": 43075.87312154348 }, { "content": "\t\tF: FnOnce(M) -> N,\n\n\t{\n\n\t\tLoc::new(self.value, self.source, f(self.metadata))\n\n\t}\n\n}\n\n\n\nimpl<T, M> Deref for Loc<T, M> {\n\n\ttype Target = T;\n\n\n\n\tfn deref(&self) -> &T {\n\n\t\t&self.value\n\n\t}\n\n}\n\n\n\nimpl<T, M> DerefMut for Loc<T, M> {\n\n\tfn deref_mut(&mut self) -> &mut T {\n\n\t\t&mut self.value\n\n\t}\n\n}\n\n\n", "file_path": "src/loc.rs", "rank": 85, "score": 43066.482676149906 }, { "content": "\t\t(self.value, self.source, self.metadata)\n\n\t}\n\n\n\n\tpub fn unwrap(self) -> T {\n\n\t\tself.value\n\n\t}\n\n\n\n\tpub fn cast_metadata<N>(self) -> Loc<T, N>\n\n\twhere\n\n\t\tN: From<M>,\n\n\t{\n\n\t\tLoc::new(self.value, self.source, self.metadata.into())\n\n\t}\n\n\n\n\tpub fn with_metadata<N>(self, metadata: N) -> Loc<T, N> {\n\n\t\tLoc::new(self.value, self.source, metadata)\n\n\t}\n\n\n\n\tpub fn map_metadata<N, F>(self, f: F) -> Loc<T, N>\n\n\twhere\n", "file_path": "src/loc.rs", "rank": 86, "score": 43062.931528317495 }, { "content": "impl<T, M> AsRef<T> for Loc<T, M> {\n\n\tfn as_ref(&self) -> &T {\n\n\t\t&self.value\n\n\t}\n\n}\n\n\n\nimpl<T, M> AsMut<T> for Loc<T, M> {\n\n\tfn as_mut(&mut self) -> &mut T {\n\n\t\t&mut self.value\n\n\t}\n\n}\n", "file_path": "src/loc.rs", "rank": 87, "score": 43062.28078671656 }, { "content": "impl<J: JsonClone, K: JsonFrom<J>, T: AsJson<J, K>> AsJson<J, K> for Indexed<T> {\n\n\tfn as_json_with(&self, meta: impl Clone + Fn(Option<&J::MetaData>) -> K::MetaData) -> K {\n\n\t\tlet mut json = self.value.as_json_with(meta.clone());\n\n\n\n\t\tif let Some(obj) = json.as_object_mut() {\n\n\t\t\tif let Some(index) = &self.index {\n\n\t\t\t\tobj.insert(\n\n\t\t\t\t\tK::new_key(Keyword::Index.into_str(), meta(None)),\n\n\t\t\t\t\tindex.as_json_with(meta(None)),\n\n\t\t\t\t);\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tjson\n\n\t}\n\n}\n", "file_path": "src/indexed.rs", "rank": 88, "score": 43044.23284679195 }, { "content": "\n\n\t#[inline]\n\n\tfn try_from(term: Term<T>) -> Result<Reference<T>, Term<T>> {\n\n\t\tmatch term {\n\n\t\t\tTerm::Ref(prop) => Ok(prop),\n\n\t\t\tterm => Err(term),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<T: AsIri> From<BlankId> for Reference<T> {\n\n\t#[inline(always)]\n\n\tfn from(blank: BlankId) -> Reference<T> {\n\n\t\tReference::Blank(blank)\n\n\t}\n\n}\n\n\n\nimpl<J: JsonClone, K: util::JsonFrom<J>, T: Id> util::AsJson<J, K> for Reference<T> {\n\n\t#[inline]\n\n\tfn as_json_with(&self, meta: impl Clone + Fn(Option<&J::MetaData>) -> K::MetaData) -> K {\n", "file_path": "src/reference.rs", "rank": 89, "score": 43034.452957026595 }, { "content": "use crate::{\n\n\tsyntax::Keyword,\n\n\tutil::{AsAnyJson, AsJson, JsonFrom},\n\n};\n\nuse generic_json::JsonClone;\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::hash::{Hash, Hasher};\n\nuse std::ops::{Deref, DerefMut};\n\n\n\n/// Indexed objects.\n\n///\n\n/// Nodes and value objects may be indexed by a string in JSON-LD.\n\n/// This type is a wrapper around any kind of indexable data.\n\n///\n\n/// It is a pointer type that `Deref` into the underlying value.\n\npub struct Indexed<T> {\n\n\t/// Index.\n\n\tindex: Option<String>,\n\n\n\n\t/// Value.\n", "file_path": "src/indexed.rs", "rank": 90, "score": 43031.36486732287 }, { "content": "\t}\n\n\n\n\t/// Get the index, if any.\n\n\t#[inline(always)]\n\n\tpub fn index(&self) -> Option<&str> {\n\n\t\tmatch &self.index {\n\n\t\t\tSome(index) => Some(index.as_str()),\n\n\t\t\tNone => None,\n\n\t\t}\n\n\t}\n\n\n\n\t/// Set the value index.\n\n\t#[inline(always)]\n\n\tpub fn set_index(&mut self, index: Option<String>) {\n\n\t\tself.index = index\n\n\t}\n\n\n\n\t/// Turn this indexed value into its components: inner value and index.\n\n\t#[inline(always)]\n\n\tpub fn into_parts(self) -> (T, Option<String>) {\n", "file_path": "src/indexed.rs", "rank": 91, "score": 43027.982444060406 }, { "content": "\t\t(self.value, self.index)\n\n\t}\n\n\n\n\t/// Cast the inner value.\n\n\t#[inline(always)]\n\n\tpub fn cast<U: From<T>>(self) -> Indexed<U> {\n\n\t\tIndexed::new(self.value.into(), self.index)\n\n\t}\n\n\n\n\t/// Try to cast the inner value.\n\n\t#[inline(always)]\n\n\tpub fn try_cast<U: TryFrom<T>>(self) -> Result<Indexed<U>, Indexed<U::Error>> {\n\n\t\tmatch self.value.try_into() {\n\n\t\t\tOk(value) => Ok(Indexed::new(value, self.index)),\n\n\t\t\tErr(e) => Err(Indexed::new(e, self.index)),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<T: Hash> Hash for Indexed<T> {\n", "file_path": "src/indexed.rs", "rank": 92, "score": 43021.31904785937 }, { "content": "\tfn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n\t\tmatch self {\n\n\t\t\tReference::Id(id) => write!(f, \"Reference::Id({})\", id.as_iri()),\n\n\t\t\tReference::Blank(b) => write!(f, \"Reference::Blank({})\", b),\n\n\t\t\tReference::Invalid(id) => write!(f, \"Reference::Invalid({})\", id),\n\n\t\t}\n\n\t}\n\n}\n\n\n\n/// Types that can be converted into a borrowed node reference.\n\n///\n\n/// This is a convenient trait is used to simplify the use of references.\n\n/// For instance consider the [`Node::get`](crate::Node::get) method, used to get the objects associated to the\n\n/// given reference property for a given node.\n\n/// It essentially have the following signature:\n\n/// ```ignore\n\n/// fn get(&self, id: &Reference<T>) -> Objects;\n\n/// ```\n\n/// However building a `Reference` by hand can be tedious, especially while using [`Lexicon`](crate::Lexicon) and\n\n/// [`Vocab`](crate::Vocab). It can be as verbose as `node.get(&Reference::Id(Lexicon::Id(MyVocab::Term)))`.\n\n/// Thanks to `ToReference` which is implemented by `Lexicon<V>` for any type `V` implementing `Vocab`,\n\n/// it is simplified into `node.get(MyVocab::Term)` (while the first syntax remains correct).\n\n/// The signature of `get` becomes:\n\n/// ```ignore\n\n/// fn get<R: ToReference<T>>(&self, id: R) -> Objects;\n\n/// ```\n", "file_path": "src/reference.rs", "rank": 93, "score": 43020.72427231729 }, { "content": "\t\tmatch self {\n\n\t\t\tReference::Id(id) => id.as_json(meta(None)),\n\n\t\t\tReference::Blank(b) => b.as_json_with(meta(None)),\n\n\t\t\tReference::Invalid(id) => id.as_json_with(meta(None)),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<T: AsIri> fmt::Display for Reference<T> {\n\n\t#[inline]\n\n\tfn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n\t\tmatch self {\n\n\t\t\tReference::Id(id) => id.as_iri().fmt(f),\n\n\t\t\tReference::Blank(b) => b.fmt(f),\n\n\t\t\tReference::Invalid(id) => id.fmt(f),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<T: AsIri> fmt::Debug for Reference<T> {\n", "file_path": "src/reference.rs", "rank": 94, "score": 43019.389676637285 }, { "content": "use crate::{\n\n\tsyntax::{Term, TermLike},\n\n\tutil::{self, AsAnyJson},\n\n\tBlankId, Id,\n\n};\n\nuse generic_json::JsonClone;\n\nuse iref::{AsIri, Iri, IriBuf};\n\nuse std::borrow::Borrow;\n\nuse std::convert::TryFrom;\n\nuse std::fmt;\n\n\n\n/// Node reference.\n\n///\n\n/// Used to reference a node across a document or to a remote document.\n\n/// It can be an identifier (IRI), a blank node identifier for local blank nodes\n\n/// or an invalid reference (a string that is neither an IRI nor blank node identifier).\n\n#[derive(Clone, PartialEq, Eq, Hash)]\n\npub enum Reference<T: AsIri = IriBuf> {\n\n\t/// Node identifier, essentially an IRI.\n\n\tId(T),\n", "file_path": "src/reference.rs", "rank": 95, "score": 43019.35212517835 }, { "content": "\tvalue: T,\n\n}\n\n\n\nimpl<T> Indexed<T> {\n\n\t/// Create a new (maybe) indexed value.\n\n\t#[inline(always)]\n\n\tpub fn new(value: T, index: Option<String>) -> Indexed<T> {\n\n\t\tIndexed { value, index }\n\n\t}\n\n\n\n\t/// Get a reference to the inner value.\n\n\t#[inline(always)]\n\n\tpub fn inner(&self) -> &T {\n\n\t\t&self.value\n\n\t}\n\n\n\n\t/// Drop the index and return the underlying value.\n\n\t#[inline(always)]\n\n\tpub fn into_inner(self) -> T {\n\n\t\tself.value\n", "file_path": "src/indexed.rs", "rank": 96, "score": 43018.92421824805 }, { "content": "\t#[inline(always)]\n\n\tfn hash<H: Hasher>(&self, h: &mut H) {\n\n\t\tself.value.hash(h);\n\n\t\tself.index.hash(h)\n\n\t}\n\n}\n\n\n\nimpl<T: PartialEq> PartialEq for Indexed<T> {\n\n\t#[inline(always)]\n\n\tfn eq(&self, other: &Self) -> bool {\n\n\t\tself.index == other.index && self.value == other.value\n\n\t}\n\n}\n\n\n\nimpl<T: Eq> Eq for Indexed<T> {}\n\n\n\nimpl<T: Clone> Clone for Indexed<T> {\n\n\t#[inline(always)]\n\n\tfn clone(&self) -> Self {\n\n\t\tIndexed::new(self.value.clone(), self.index.clone())\n", "file_path": "src/indexed.rs", "rank": 97, "score": 43015.16186052932 }, { "content": "\t/// This will either return a string slice of an IRI, or a blank node identifier.\n\n\t#[inline(always)]\n\n\tpub fn as_str(&self) -> &str {\n\n\t\tmatch self {\n\n\t\t\tReference::Id(id) => id.as_iri().into_str(),\n\n\t\t\tReference::Blank(id) => id.as_str(),\n\n\t\t\tReference::Invalid(id) => id.as_str(),\n\n\t\t}\n\n\t}\n\n\n\n\t/// If the renference is a node identifier, returns the node IRI.\n\n\t///\n\n\t/// Returns `None` if it is a blank node reference.\n\n\t#[inline(always)]\n\n\tpub fn as_iri(&self) -> Option<Iri> {\n\n\t\tmatch self {\n\n\t\t\tReference::Id(k) => Some(k.as_iri()),\n\n\t\t\t_ => None,\n\n\t\t}\n\n\t}\n", "file_path": "src/reference.rs", "rank": 98, "score": 43014.94099667911 }, { "content": "\t}\n\n}\n\n\n\nimpl<T> From<T> for Indexed<T> {\n\n\t#[inline(always)]\n\n\tfn from(value: T) -> Indexed<T> {\n\n\t\tIndexed::new(value, None)\n\n\t}\n\n}\n\n\n\nimpl<T> Deref for Indexed<T> {\n\n\ttype Target = T;\n\n\n\n\t#[inline(always)]\n\n\tfn deref(&self) -> &T {\n\n\t\t&self.value\n\n\t}\n\n}\n\n\n\nimpl<T> DerefMut for Indexed<T> {\n", "file_path": "src/indexed.rs", "rank": 99, "score": 43013.79078321117 } ]
Rust
whisper/src/aggregation.rs
GiantPlantsSociety/graphite-rs
d2657ae3ddf110023417ec255f5192ac8fa83bfc
use serde::*; use std::cmp; use std::convert::Into; use std::fmt; use std::str::FromStr; #[allow(clippy::trivially_copy_pass_by_ref)] fn cmp_f64(a: &f64, b: &f64) -> cmp::Ordering { a.partial_cmp(b).unwrap_or(::std::cmp::Ordering::Equal) } #[allow(clippy::trivially_copy_pass_by_ref)] fn cmp_f64_abs(a: &f64, b: &f64) -> cmp::Ordering { cmp_f64(&a.abs(), &b.abs()) } #[derive(Clone, Copy, Debug, PartialEq, Deserialize)] #[serde(rename_all = "lowercase")] pub enum AggregationMethod { Average, Sum, Last, Max, Min, AvgZero, AbsMax, AbsMin, } impl AggregationMethod { pub fn from_type(aggregation_type: u32) -> Option<Self> { match aggregation_type { 1 => Some(AggregationMethod::Average), 2 => Some(AggregationMethod::Sum), 3 => Some(AggregationMethod::Last), 4 => Some(AggregationMethod::Max), 5 => Some(AggregationMethod::Min), 6 => Some(AggregationMethod::AvgZero), 7 => Some(AggregationMethod::AbsMax), 8 => Some(AggregationMethod::AbsMin), _ => None, } } pub fn to_type(self) -> u32 { match self { AggregationMethod::Average => 1, AggregationMethod::Sum => 2, AggregationMethod::Last => 3, AggregationMethod::Max => 4, AggregationMethod::Min => 5, AggregationMethod::AvgZero => 6, AggregationMethod::AbsMax => 7, AggregationMethod::AbsMin => 8, } } pub fn aggregate(self, values: &[Option<f64>]) -> Result<f64, &'static str> { match self { AggregationMethod::Average => { let sum: f64 = values.iter().filter_map(|v| *v).sum(); let count = values.iter().filter_map(|v| *v).count(); Ok(sum / count as f64) } AggregationMethod::Sum => { let sum: f64 = values.iter().filter_map(|v| *v).sum(); Ok(sum) } AggregationMethod::Last => { if let Some(Some(v)) = values.iter().rev().find(|v| v.is_some()) { Ok(*v) } else { Err("Empty list of values") } } AggregationMethod::Max => values .iter() .filter_map(|v| *v) .max_by(cmp_f64) .ok_or("Empty list of values"), AggregationMethod::Min => values .iter() .filter_map(|v| *v) .min_by(cmp_f64) .ok_or("Empty list of values"), AggregationMethod::AvgZero => { let sum: f64 = values.iter().filter_map(|v| *v).sum(); let len = values.len(); Ok(sum / len as f64) } AggregationMethod::AbsMax => values .iter() .filter_map(|v| *v) .max_by(cmp_f64_abs) .ok_or("Empty list of values"), AggregationMethod::AbsMin => values .iter() .filter_map(|v| *v) .min_by(cmp_f64_abs) .ok_or("Empty list of values"), } } } impl ::std::default::Default for AggregationMethod { fn default() -> Self { AggregationMethod::Average } } impl FromStr for AggregationMethod { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "average" => Ok(AggregationMethod::Average), "sum" => Ok(AggregationMethod::Sum), "last" => Ok(AggregationMethod::Last), "max" => Ok(AggregationMethod::Max), "min" => Ok(AggregationMethod::Min), "avg_zero" => Ok(AggregationMethod::AvgZero), "absmax" => Ok(AggregationMethod::AbsMax), "absmin" => Ok(AggregationMethod::AbsMin), _ => Err(format!("Unsupported aggregation method '{}'.", s)), } } } impl Into<&'static str> for AggregationMethod { fn into(self) -> &'static str { match self { AggregationMethod::Average => "average", AggregationMethod::Sum => "sum", AggregationMethod::Last => "last", AggregationMethod::Max => "max", AggregationMethod::Min => "min", AggregationMethod::AvgZero => "avg_zero", AggregationMethod::AbsMax => "absmax", AggregationMethod::AbsMin => "absmin", } } } impl fmt::Display for AggregationMethod { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let s: &str = (*self).into(); write!(f, "{}", s) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_display() { assert_eq!(AggregationMethod::Average.to_string(), "average"); assert_eq!(AggregationMethod::Sum.to_string(), "sum"); assert_eq!(AggregationMethod::Last.to_string(), "last"); assert_eq!(AggregationMethod::Max.to_string(), "max"); assert_eq!(AggregationMethod::Min.to_string(), "min"); assert_eq!(AggregationMethod::AvgZero.to_string(), "avg_zero"); assert_eq!(AggregationMethod::AbsMax.to_string(), "absmax"); assert_eq!(AggregationMethod::AbsMin.to_string(), "absmin"); assert_eq!(AggregationMethod::default().to_string(), "average"); } #[test] fn test_convert() { assert_eq!( AggregationMethod::from_str(&AggregationMethod::Average.to_string()), Ok(AggregationMethod::Average) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Sum.to_string()), Ok(AggregationMethod::Sum) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Last.to_string()), Ok(AggregationMethod::Last) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Max.to_string()), Ok(AggregationMethod::Max) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Min.to_string()), Ok(AggregationMethod::Min) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::AvgZero.to_string()), Ok(AggregationMethod::AvgZero) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::AbsMax.to_string()), Ok(AggregationMethod::AbsMax) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::AbsMin.to_string()), Ok(AggregationMethod::AbsMin) ); assert!(AggregationMethod::from_str("test").is_err()); } #[test] fn test_aggregate() { assert_eq!( AggregationMethod::Average.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(2.5) ); assert_eq!( AggregationMethod::Min.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(1.0) ); assert_eq!( AggregationMethod::Max.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(4.0) ); assert_eq!( AggregationMethod::Last.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(4.0) ); assert_eq!( AggregationMethod::Last.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, Some(4.0), None ]), Ok(4.0) ); assert_eq!( AggregationMethod::Sum.aggregate(&[ Some(10.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(19.0) ); assert_eq!( AggregationMethod::AvgZero.aggregate(&[ Some(1.0), Some(2.0), Some(3.0), Some(4.0), None, None, None, None ]), Ok(1.25) ); assert_eq!( AggregationMethod::AbsMax.aggregate(&[Some(-3.0), Some(-2.0), Some(1.0), Some(2.0)]), Ok(-3.0) ); assert_eq!( AggregationMethod::AbsMax.aggregate(&[Some(-2.0), Some(-1.0), Some(2.0), Some(3.0)]), Ok(3.0) ); assert_eq!( AggregationMethod::AbsMin.aggregate(&[Some(-3.0), Some(-2.0), Some(1.0), Some(2.0)]), Ok(1.0) ); assert_eq!( AggregationMethod::AbsMin.aggregate(&[Some(-2.0), Some(-1.0), Some(2.0), Some(3.0)]), Ok(-1.0) ); assert!(AggregationMethod::Last.aggregate(&[]).is_err()); } #[test] fn test_from_to_type() { for i in 1..9 { let method = AggregationMethod::from_type(i).unwrap(); assert_eq!(AggregationMethod::to_type(method), i); } assert_eq!(AggregationMethod::from_type(9), None); } }
use serde::*; use std::cmp; use std::convert::Into; use std::fmt; use std::str::FromStr; #[allow(clippy::trivially_copy_pass_by_ref)] fn cmp_f64(a: &f64, b: &f64) -> cmp::Ordering { a.partial_cmp(b).unwrap_or(::std::cmp::Ordering::Equal) } #[allow(clippy::trivially_copy_pass_by_ref)] fn cmp_f64_abs(a: &f64, b: &f64) -> cmp::Ordering { cmp_f64(&a.abs(), &b.abs()) } #[derive(Clone, Copy, Debug, PartialEq, Deserialize)] #[serde(rename_all = "lowercase")] pub enum AggregationMethod { Average, Sum,
} } pub fn aggregate(self, values: &[Option<f64>]) -> Result<f64, &'static str> { match self { AggregationMethod::Average => { let sum: f64 = values.iter().filter_map(|v| *v).sum(); let count = values.iter().filter_map(|v| *v).count(); Ok(sum / count as f64) } AggregationMethod::Sum => { let sum: f64 = values.iter().filter_map(|v| *v).sum(); Ok(sum) } AggregationMethod::Last => { if let Some(Some(v)) = values.iter().rev().find(|v| v.is_some()) { Ok(*v) } else { Err("Empty list of values") } } AggregationMethod::Max => values .iter() .filter_map(|v| *v) .max_by(cmp_f64) .ok_or("Empty list of values"), AggregationMethod::Min => values .iter() .filter_map(|v| *v) .min_by(cmp_f64) .ok_or("Empty list of values"), AggregationMethod::AvgZero => { let sum: f64 = values.iter().filter_map(|v| *v).sum(); let len = values.len(); Ok(sum / len as f64) } AggregationMethod::AbsMax => values .iter() .filter_map(|v| *v) .max_by(cmp_f64_abs) .ok_or("Empty list of values"), AggregationMethod::AbsMin => values .iter() .filter_map(|v| *v) .min_by(cmp_f64_abs) .ok_or("Empty list of values"), } } } impl ::std::default::Default for AggregationMethod { fn default() -> Self { AggregationMethod::Average } } impl FromStr for AggregationMethod { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "average" => Ok(AggregationMethod::Average), "sum" => Ok(AggregationMethod::Sum), "last" => Ok(AggregationMethod::Last), "max" => Ok(AggregationMethod::Max), "min" => Ok(AggregationMethod::Min), "avg_zero" => Ok(AggregationMethod::AvgZero), "absmax" => Ok(AggregationMethod::AbsMax), "absmin" => Ok(AggregationMethod::AbsMin), _ => Err(format!("Unsupported aggregation method '{}'.", s)), } } } impl Into<&'static str> for AggregationMethod { fn into(self) -> &'static str { match self { AggregationMethod::Average => "average", AggregationMethod::Sum => "sum", AggregationMethod::Last => "last", AggregationMethod::Max => "max", AggregationMethod::Min => "min", AggregationMethod::AvgZero => "avg_zero", AggregationMethod::AbsMax => "absmax", AggregationMethod::AbsMin => "absmin", } } } impl fmt::Display for AggregationMethod { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let s: &str = (*self).into(); write!(f, "{}", s) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_display() { assert_eq!(AggregationMethod::Average.to_string(), "average"); assert_eq!(AggregationMethod::Sum.to_string(), "sum"); assert_eq!(AggregationMethod::Last.to_string(), "last"); assert_eq!(AggregationMethod::Max.to_string(), "max"); assert_eq!(AggregationMethod::Min.to_string(), "min"); assert_eq!(AggregationMethod::AvgZero.to_string(), "avg_zero"); assert_eq!(AggregationMethod::AbsMax.to_string(), "absmax"); assert_eq!(AggregationMethod::AbsMin.to_string(), "absmin"); assert_eq!(AggregationMethod::default().to_string(), "average"); } #[test] fn test_convert() { assert_eq!( AggregationMethod::from_str(&AggregationMethod::Average.to_string()), Ok(AggregationMethod::Average) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Sum.to_string()), Ok(AggregationMethod::Sum) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Last.to_string()), Ok(AggregationMethod::Last) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Max.to_string()), Ok(AggregationMethod::Max) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::Min.to_string()), Ok(AggregationMethod::Min) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::AvgZero.to_string()), Ok(AggregationMethod::AvgZero) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::AbsMax.to_string()), Ok(AggregationMethod::AbsMax) ); assert_eq!( AggregationMethod::from_str(&AggregationMethod::AbsMin.to_string()), Ok(AggregationMethod::AbsMin) ); assert!(AggregationMethod::from_str("test").is_err()); } #[test] fn test_aggregate() { assert_eq!( AggregationMethod::Average.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(2.5) ); assert_eq!( AggregationMethod::Min.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(1.0) ); assert_eq!( AggregationMethod::Max.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(4.0) ); assert_eq!( AggregationMethod::Last.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(4.0) ); assert_eq!( AggregationMethod::Last.aggregate(&[ Some(1.0), None, Some(2.0), None, Some(3.0), None, Some(4.0), None ]), Ok(4.0) ); assert_eq!( AggregationMethod::Sum.aggregate(&[ Some(10.0), None, Some(2.0), None, Some(3.0), None, None, Some(4.0) ]), Ok(19.0) ); assert_eq!( AggregationMethod::AvgZero.aggregate(&[ Some(1.0), Some(2.0), Some(3.0), Some(4.0), None, None, None, None ]), Ok(1.25) ); assert_eq!( AggregationMethod::AbsMax.aggregate(&[Some(-3.0), Some(-2.0), Some(1.0), Some(2.0)]), Ok(-3.0) ); assert_eq!( AggregationMethod::AbsMax.aggregate(&[Some(-2.0), Some(-1.0), Some(2.0), Some(3.0)]), Ok(3.0) ); assert_eq!( AggregationMethod::AbsMin.aggregate(&[Some(-3.0), Some(-2.0), Some(1.0), Some(2.0)]), Ok(1.0) ); assert_eq!( AggregationMethod::AbsMin.aggregate(&[Some(-2.0), Some(-1.0), Some(2.0), Some(3.0)]), Ok(-1.0) ); assert!(AggregationMethod::Last.aggregate(&[]).is_err()); } #[test] fn test_from_to_type() { for i in 1..9 { let method = AggregationMethod::from_type(i).unwrap(); assert_eq!(AggregationMethod::to_type(method), i); } assert_eq!(AggregationMethod::from_type(9), None); } }
Last, Max, Min, AvgZero, AbsMax, AbsMin, } impl AggregationMethod { pub fn from_type(aggregation_type: u32) -> Option<Self> { match aggregation_type { 1 => Some(AggregationMethod::Average), 2 => Some(AggregationMethod::Sum), 3 => Some(AggregationMethod::Last), 4 => Some(AggregationMethod::Max), 5 => Some(AggregationMethod::Min), 6 => Some(AggregationMethod::AvgZero), 7 => Some(AggregationMethod::AbsMax), 8 => Some(AggregationMethod::AbsMin), _ => None, } } pub fn to_type(self) -> u32 { match self { AggregationMethod::Average => 1, AggregationMethod::Sum => 2, AggregationMethod::Last => 3, AggregationMethod::Max => 4, AggregationMethod::Min => 5, AggregationMethod::AvgZero => 6, AggregationMethod::AbsMax => 7, AggregationMethod::AbsMin => 8,
random
[ { "content": "pub fn diff(\n\n path1: &Path,\n\n path2: &Path,\n\n ignore_empty: bool,\n\n mut until_time: u32,\n\n now: u32,\n\n) -> Result<Vec<DiffArchive>, io::Error> {\n\n let mut file1 = WhisperFile::open(path1)?;\n\n let mut file2 = WhisperFile::open(path2)?;\n\n\n\n if file1.info().archives != file2.info().archives {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Archive configurations are unalike. Resize the input before diffing\",\n\n ));\n\n }\n\n\n\n let mut archives = file1.info().archives.clone();\n\n archives.sort_by_key(|a| a.retention());\n\n\n", "file_path": "whisper/src/diff.rs", "rank": 2, "score": 114835.86518919212 }, { "content": "pub fn fetch(\n\n filename: &Path,\n\n aggregation: AggregationMethod,\n\n resolution: Option<u32>,\n\n start: u64,\n\n end: u64,\n\n) -> Result<Data, Error> {\n\n let c_filename = CString::new(filename.to_str().unwrap().as_bytes()).unwrap();\n\n let c_aggregation =\n\n CString::new(str::to_ascii_uppercase(aggregation.into()).as_bytes()).unwrap();\n\n\n\n let mut start = start as c_long;\n\n let mut end = end as c_long;\n\n let mut step = u64::from(resolution.unwrap_or(1)) as c_ulong;\n\n let mut ds_cnt = 0;\n\n let mut ds_namv = ptr::null_mut();\n\n let mut data = ptr::null_mut();\n\n let status = unsafe {\n\n rrd_fetch_r(\n\n c_filename.as_ptr(),\n", "file_path": "rrd/src/lib.rs", "rank": 3, "score": 114835.86518919212 }, { "content": "pub fn merge(\n\n path_src: &Path,\n\n path_dst: &Path,\n\n time_from: u32,\n\n time_to: u32,\n\n now: u32,\n\n) -> Result<(), io::Error> {\n\n // if now is None:\n\n // now = int(time.time())\n\n\n\n // if (time_to is not None):\n\n // untilTime = time_to\n\n // else:\n\n // untilTime = now\n\n\n\n // if (time_from is not None):\n\n // fromTime = time_from\n\n // else:\n\n // fromTime = 0\n\n\n", "file_path": "whisper/src/merge.rs", "rank": 4, "score": 114835.86518919212 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn resize(\n\n path_src: &Path,\n\n path_new: Option<&Path>,\n\n retentions: &[Retention],\n\n x_files_factor: f32,\n\n aggregation_method: AggregationMethod,\n\n aggregate: bool,\n\n nobackup: bool,\n\n now: u32,\n\n) -> Result<(), Error> {\n\n let path_dst = match path_new {\n\n None => {\n\n let tmpfile = PathBuf::from(format!(\"{}.tmp\", path_src.display()));\n\n if tmpfile.is_file() {\n\n println!(\n\n \"Removing previous temporary database file: {}\",\n\n tmpfile.display()\n\n );\n\n remove_file(&tmpfile)?;\n\n }\n", "file_path": "whisper/src/resize.rs", "rank": 5, "score": 114835.86518919212 }, { "content": "pub fn copy_test_file(temp_dir: &TempDir, filename: &str) -> PathBuf {\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n let tmp_file_path = temp_dir.path().join(filename);\n\n\n\n fs::copy(&file_path, &tmp_file_path).unwrap();\n\n\n\n tmp_file_path\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 6, "score": 109909.69735819429 }, { "content": "pub fn create_and_update_points(\n\n path: &PathBuf,\n\n points: &[Point],\n\n now: u32,\n\n) -> Result<WhisperFile, Box<dyn Error>> {\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 60,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n file.update_many(&points, now)?;\n\n\n\n Ok(file)\n\n}\n", "file_path": "whisper_tests/src/lib.rs", "rank": 7, "score": 107820.42367963331 }, { "content": "pub fn create_and_update_many(\n\n path: &PathBuf,\n\n timestamps: &[u32],\n\n now: u32,\n\n) -> Result<WhisperFile, Box<dyn Error>> {\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 60,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n let points: Vec<Point> = timestamps\n\n .iter()\n\n .map(|interval| Point {\n\n interval: *interval,\n\n value: rand::random(),\n\n })\n\n .collect();\n\n\n\n file.update_many(&points, now)?;\n\n\n\n Ok(file)\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 8, "score": 107820.42367963331 }, { "content": "pub fn de_time_parse<'de, D>(deserializer: D) -> Result<u32, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n time_parse(String::deserialize(deserializer)?).map_err(serde::de::Error::custom)\n\n}\n\n\n", "file_path": "diamond-api/src/parse/time.rs", "rank": 9, "score": 103851.42968592388 }, { "content": "pub fn get_temp_dir() -> TempDir {\n\n Builder::new()\n\n .prefix(\"whisper\")\n\n .tempdir()\n\n .expect(\"Temp dir created\")\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 10, "score": 101462.9537054646 }, { "content": "fn format_none(float: Option<f64>) -> String {\n\n match float {\n\n Some(x) => format!(\"{:.1}\", x),\n\n None => \"None\".to_string(),\n\n }\n\n}\n\n\n\nimpl fmt::Display for DiffArchiveInfo {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for archive in &self.archives {\n\n if f.alternate() {\n\n writeln!(\n\n f,\n\n \"Archive {} ({} of {} datapoints differ)\",\n\n archive.index, archive.points, archive.total\n\n )?;\n\n writeln!(\n\n f,\n\n \"{:>7} {:>11} {:>13} {:>13}\",\n\n \"\", \"timestamp\", \"value_a\", \"value_b\"\n", "file_path": "whisper/src/diff.rs", "rank": 11, "score": 99953.44945209098 }, { "content": "pub fn app_config(ctx: Context) -> impl Fn(&mut ServiceConfig) {\n\n move |config: &mut ServiceConfig| {\n\n config\n\n .app_data(ctx.clone())\n\n .service(resource(\"/render\").to(render_handler))\n\n .service(resource(\"/metrics/find\").to(find_handler))\n\n .service(resource(\"/metrics\").to(find_handler));\n\n }\n\n}\n", "file_path": "diamond-api/src/application.rs", "rank": 12, "score": 99567.44648528646 }, { "content": "#[inline]\n\npub fn line_update<P: AsRef<Path>>(\n\n message: &str,\n\n dir: P,\n\n config: &WhisperConfig,\n\n now: u32,\n\n) -> Result<(), Box<dyn Error>> {\n\n let metric: MetricPoint = message.parse()?;\n\n let metric_path: MetricPath = metric.name.parse()?;\n\n\n\n let file_path = dir.as_ref().join(metric_path.0);\n\n\n\n let mut file = if file_path.exists() {\n\n WhisperFile::open(&file_path)?\n\n } else {\n\n let dir_path = file_path.parent().unwrap();\n\n fs::create_dir_all(&dir_path)?;\n\n\n\n WhisperBuilder::default()\n\n .add_retentions(&config.retentions)\n\n .x_files_factor(config.x_files_factor)\n\n .aggregation_method(config.aggregation_method)\n\n .build(&file_path)?\n\n };\n\n\n\n file.update(&metric.point, now)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "diamond/src/lib.rs", "rank": 13, "score": 99224.08309380221 }, { "content": "pub fn random_string(len: usize) -> String {\n\n Alphanumeric.sample_string(&mut thread_rng(), len)\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 14, "score": 97319.71507534919 }, { "content": "#[inline]\n\npub fn update_silently(line: &str, conf: &Settings) {\n\n let now = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n\n .as_secs() as u32;\n\n\n\n line_update(&line, &conf.db_path, &conf.whisper, now).unwrap_or_else(|e| eprintln!(\"{}\", e));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use settings::{Net, WhisperConfig};\n\n use std::convert::From;\n\n use std::io;\n\n use std::net::IpAddr::V4;\n\n use std::path::{Path, PathBuf};\n\n use tempfile::Builder;\n\n use whisper::aggregation::AggregationMethod;\n\n use whisper::retention::Retention;\n", "file_path": "diamond/src/lib.rs", "rank": 15, "score": 95532.7581596298 }, { "content": "pub fn parse_duration(s: &str) -> Result<u32, String> {\n\n lazy_static! {\n\n static ref RETENTION_DEF_RE: Regex = Regex::new(r#\"(?i)^\\s*(\\d+)([a-z]*)\\s*$\"#).unwrap();\n\n }\n\n\n\n let captures = RETENTION_DEF_RE\n\n .captures(s)\n\n .ok_or_else(|| format!(\"Invalid duration definition '{}'\", s))?;\n\n\n\n let mut precision = u32::from_str_radix(captures.get(1).unwrap().as_str(), 10).unwrap();\n\n if !captures.get(2).unwrap().as_str().is_empty() {\n\n precision *= get_unit_multiplier(captures.get(2).unwrap().as_str())?;\n\n }\n\n\n\n if precision == 0 {\n\n Err(\"Precision cannot be zero\".to_owned())\n\n } else {\n\n Ok(precision)\n\n }\n\n}\n", "file_path": "whisper/src/retention.rs", "rank": 16, "score": 93902.32954636353 }, { "content": "pub fn time_parse(s: String) -> Result<u32, ParseError> {\n\n if s.starts_with('-') {\n\n // Relative time\n\n let (multi, count) = match &s.chars().last().unwrap() {\n\n 's' => (1, 1),\n\n 'h' => (3600, 1),\n\n 'd' => (3600 * 24, 1),\n\n 'w' => (3600 * 24 * 7, 1),\n\n 'y' => (3600 * 24 * 365, 1),\n\n 'n' if s.ends_with(\"min\") => (60, 3),\n\n 'n' if s.ends_with(\"mon\") => (3600 * 24 * 30, 3),\n\n _ => return Err(ParseError::Time),\n\n };\n\n\n\n let s2 = &s[1..s.len() - count];\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as u32;\n\n\n\n let v = now - s2.parse::<u32>()? * multi;\n\n Ok(v)\n\n } else {\n", "file_path": "diamond-api/src/parse/time.rs", "rank": 17, "score": 88849.37404166203 }, { "content": "#[cfg(not(target_os = \"unix\"))]\n\npub fn fallocate(fd: &mut File, offset: usize, len: usize) -> Result<()> {\n\n use std::io::{Seek, SeekFrom, Write};\n\n\n\n fd.seek(SeekFrom::Start(offset as u64))?;\n\n let zeroes = [0u8; 16384];\n\n let mut remaining = len;\n\n while remaining > zeroes.len() {\n\n fd.write_all(&zeroes)?;\n\n remaining -= zeroes.len();\n\n }\n\n fd.write_all(&zeroes[0..remaining])?;\n\n Ok(())\n\n}\n", "file_path": "whisper/src/fallocate.rs", "rank": 18, "score": 84715.47651211075 }, { "content": "pub fn get_file_path(temp_dir: &TempDir, prefix: &str) -> PathBuf {\n\n let file_name = format!(\"{}_{}.wsp\", prefix, random_string(10));\n\n let mut path = temp_dir.path().to_path_buf();\n\n path.push(file_name);\n\n path\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 19, "score": 84602.37508034328 }, { "content": "fn points_to_values(points: &[Point], start: u32, step: u32) -> Vec<Option<f64>> {\n\n let mut values = Vec::with_capacity(points.len());\n\n for (i, point) in points.iter().enumerate() {\n\n if point.interval == start + (i as u32) * step {\n\n values.push(Some(point.value));\n\n } else {\n\n values.push(None);\n\n }\n\n }\n\n values\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 20, "score": 83816.8413023235 }, { "content": "pub fn info(filename: &Path, daemon: Option<&Path>, noflush: bool) -> Result<Info, Error> {\n\n let mut c_args = Vec::<*const c_char>::new();\n\n\n\n let info_str = CString::new(\"info\").unwrap();\n\n c_args.push(info_str.as_ptr());\n\n\n\n let c_filename = CString::new(filename.to_str().unwrap().as_bytes()).unwrap();\n\n c_args.push(c_filename.as_ptr());\n\n\n\n if let Some(daemon_path) = daemon {\n\n let daemon_str = CString::new(\"--daemon\").unwrap();\n\n c_args.push(daemon_str.as_ptr());\n\n let c_daemon_path = CString::new(daemon_path.to_str().unwrap().as_bytes()).unwrap();\n\n c_args.push(c_daemon_path.as_ptr());\n\n }\n\n\n\n if noflush {\n\n let noflush_str = CString::new(\"--noflush\").unwrap();\n\n c_args.push(noflush_str.as_ptr());\n\n }\n", "file_path": "rrd/src/lib.rs", "rank": 21, "score": 79545.98394601604 }, { "content": "pub fn fill(src: &Path, dst: &Path, from: u32, now: u32) -> Result<(), io::Error> {\n\n let mut start_from = from;\n\n let mut file_dst = WhisperFile::open(dst)?;\n\n\n\n let mut archives = file_dst.info().archives.clone();\n\n archives.sort_by_key(|a| a.retention());\n\n\n\n for archive in &archives {\n\n let from_time = now - archive.retention();\n\n\n\n if from_time >= start_from {\n\n continue;\n\n }\n\n\n\n let interval = Interval::new(from_time, start_from).unwrap();\n\n let data_dst = file_dst.fetch(archive.seconds_per_point, interval, now)?;\n\n\n\n let mut start = data_dst.from_interval;\n\n let end = data_dst.until_interval;\n\n let step = data_dst.step;\n", "file_path": "whisper/src/fill.rs", "rank": 22, "score": 78533.18504043702 }, { "content": "pub trait Storage {\n\n fn find(\n\n &self,\n\n path_expression: &PathExpression,\n\n ) -> Result<Vec<MetricResponseLeaf>, ResponseError>;\n\n\n\n fn query(\n\n &self,\n\n path_expression: &PathExpression,\n\n interval: Interval,\n\n now: u64,\n\n ) -> Result<Vec<StorageResponse>, ResponseError>;\n\n}\n", "file_path": "diamond-api/src/storage/storage.rs", "rank": 23, "score": 65337.58809623959 }, { "content": "#[test]\n\nfn calling_with_sum_and_xfactor() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"sum\", \"0.2\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"Updated aggregation method\").from_utf8())\n\n .stdout(predicate::str::contains(\"(average -> sum)\").from_utf8())\n\n .stdout(predicate::str::contains(\"0.2\").not().from_utf8());\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-whisper-set-aggregation-method.rs", "rank": 24, "score": 63308.4484557553 }, { "content": "fn path_element_enum(input: &str) -> IResult<&str, Vec<String>, VerboseError<&str>> {\n\n delimited(\n\n c('{'),\n\n separated_list1(tag(\",\"), partial_path_element),\n\n c('}'),\n\n )(input)\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 25, "score": 56231.67486242715 }, { "content": "fn match_group_single(input: &str) -> IResult<&str, BTreeSet<char>, VerboseError<&str>> {\n\n let (input, single) = none_of(\"]\")(input)?;\n\n let group_single = [single].iter().cloned().collect();\n\n Ok((input, group_single))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 26, "score": 53974.08273540145 }, { "content": "fn path_element_group(input: &str) -> IResult<&str, BTreeSet<char>, VerboseError<&str>> {\n\n let (input, _) = c('[')(input)?;\n\n let (input, start_dash) = opt(c('-'))(input)?;\n\n\n\n let (input, mut chars) = fold_many1(\n\n alt((match_group_range, match_group_single)),\n\n BTreeSet::new,\n\n |mut acc: BTreeSet<char>, chars: BTreeSet<char>| {\n\n acc.extend(chars);\n\n acc\n\n },\n\n )(input)?;\n\n\n\n let (input, end_dash) = opt(c('-'))(input)?;\n\n let (input, _) = c(']')(input)?;\n\n\n\n if start_dash.is_some() || end_dash.is_some() {\n\n chars.insert('-');\n\n }\n\n\n\n Ok((input, chars))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 27, "score": 53974.08273540145 }, { "content": "fn match_group_range(input: &str) -> IResult<&str, BTreeSet<char>, VerboseError<&str>> {\n\n let (input, from_char) = none_of(\"]\")(input)?;\n\n let (input, _) = c('-')(input)?;\n\n let (input, to_char) = none_of(\"]\")(input)?;\n\n\n\n let range = ((from_char as u8)..=(to_char as u8))\n\n .map(|c| c as char)\n\n .collect();\n\n\n\n Ok((input, range))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 28, "score": 53974.08273540145 }, { "content": "fn main() {\n\n #[cfg(target_os = \"freebsd\")]\n\n println!(\"cargo:rustc-link-search=native=/usr/local/lib\");\n\n println!(\"cargo:rustc-link-lib=rrd\");\n\n}\n", "file_path": "rrd-sys/build.rs", "rank": 29, "score": 49119.92117937884 }, { "content": "fn main() {\n\n let args = Args::from_args();\n\n if let Err(err) = run(&args) {\n\n eprintln!(\"{}\", err);\n\n exit(1);\n\n }\n\n}\n", "file_path": "rrd2whisper/src/main.rs", "rank": 30, "score": 49119.92117937884 }, { "content": "fn fill_interval(\n\n src: &Path,\n\n dst: &Path,\n\n tstart: u32,\n\n tsuntil: u32,\n\n now: u32,\n\n) -> Result<(), io::Error> {\n\n let mut tstop = tsuntil;\n\n\n\n let mut file_src = WhisperFile::open(src)?;\n\n let mut file_dst = WhisperFile::open(dst)?;\n\n\n\n let mut archives = file_src.info().archives.clone();\n\n archives.sort_by_key(|a| a.retention());\n\n\n\n // find oldest point in time, stored by both files\n\n let src_time = now - file_src.info().max_retention;\n\n\n\n if (tstart < src_time) && (tstop < src_time) {\n\n return Ok(());\n", "file_path": "whisper/src/fill.rs", "rank": 31, "score": 47804.13866519788 }, { "content": "fn migrate_points(\n\n path_src: &Path,\n\n path_dst: &Path,\n\n aggregate: bool,\n\n now: u32,\n\n) -> Result<(), Error> {\n\n if !path_src.is_file() {\n\n return Err(Error::FileNotExist(path_src.to_owned()).into());\n\n }\n\n\n\n if aggregate {\n\n println!(\"Migrating data with aggregation...\");\n\n migrate_aggregate(path_src, path_dst, now)?;\n\n } else {\n\n println!(\"Migrating data without aggregation...\");\n\n migrate_nonaggregate(path_src, path_dst, now)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/src/resize.rs", "rank": 32, "score": 47804.13866519788 }, { "content": "fn file_update(\n\n fh: &mut fs::File,\n\n header: &WhisperMetadata,\n\n point: &Point,\n\n now: u32,\n\n) -> Result<(), io::Error> {\n\n let timestamp = point.interval;\n\n\n\n if now >= timestamp + header.max_retention || now < timestamp {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Timestamp not covered by any archives in this database.\",\n\n ));\n\n }\n\n\n\n // Find the highest-precision archive that covers timestamp\n\n let archive_index = header\n\n .archives\n\n .iter()\n\n .position(|a| timestamp + a.retention() >= now)\n", "file_path": "whisper/src/lib.rs", "rank": 33, "score": 47804.13866519788 }, { "content": "fn points_to_data(\n\n points: &Option<Vec<Point>>,\n\n interval: Interval,\n\n seconds_per_point: u32,\n\n) -> ArchiveData {\n\n let values = match points {\n\n None => {\n\n let count = (interval.until() - interval.from()) / seconds_per_point;\n\n vec![None; count as usize]\n\n }\n\n Some(points) => points_to_values(&points, interval.from(), seconds_per_point),\n\n };\n\n\n\n ArchiveData {\n\n from_interval: interval.from(),\n\n until_interval: interval.until(),\n\n step: seconds_per_point,\n\n values,\n\n }\n\n}\n", "file_path": "whisper/src/lib.rs", "rank": 34, "score": 47804.13866519788 }, { "content": "fn file_update_many(\n\n fh: &mut fs::File,\n\n header: &WhisperMetadata,\n\n points: &[Point],\n\n now: u32,\n\n) -> Result<(), io::Error> {\n\n let mut archive_index = 0;\n\n let mut current_points = vec![];\n\n\n\n for point in points {\n\n while point.interval + header.archives[archive_index].retention() < now {\n\n // We can't fit any more points in this archive\n\n if !current_points.is_empty() {\n\n // Commit all the points we've found that it can fit\n\n current_points.reverse(); // Put points in chronological order\n\n __archive_update_many(fh, &header, archive_index, &current_points)?;\n\n current_points.clear();\n\n }\n\n archive_index += 1;\n\n if archive_index >= header.archives.len() {\n", "file_path": "whisper/src/lib.rs", "rank": 35, "score": 46583.64373679648 }, { "content": "fn get_and_clear_error() -> Error {\n\n unsafe {\n\n let c_err = rrd_get_error();\n\n let error = Error(\n\n CStr::from_ptr(c_err)\n\n .to_str()\n\n .map_err(|e| Error(e.to_string()))\n\n .unwrap()\n\n .to_owned(),\n\n );\n\n rrd_clear_error();\n\n error\n\n }\n\n}\n\n\n", "file_path": "rrd/src/lib.rs", "rank": 36, "score": 44657.88359273298 }, { "content": "fn current_time() -> u32 {\n\n let since_epoch = SystemTime::now().duration_since(UNIX_EPOCH).expect(\"Time\");\n\n since_epoch.as_secs() as u32\n\n}\n\n\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 37, "score": 44657.88359273298 }, { "content": "fn parse_template(\n\n source: Source,\n\n all_args: Option<Vec<(Option<String>, LiteralValue)>>,\n\n) -> Result<Template, String> {\n\n let (args, named_args) = split_args(all_args.unwrap_or_default()).ok_or_else(|| {\n\n format!(\n\n \"Bad call of template {:?}: positional argument after named one.\",\n\n source\n\n )\n\n })?;\n\n Ok(Template {\n\n source,\n\n args,\n\n named_args,\n\n })\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 38, "score": 44389.90881478431 }, { "content": "fn call_args(\n\n input: &str,\n\n) -> IResult<&str, (String, Vec<(Option<String>, Arg)>), VerboseError<&str>> {\n\n let (input, function) = ident(input)?;\n\n let (input, _) = c('(')(input)?;\n\n let (input, all_args) = separated_list0(c(','), call_arg)(input)?;\n\n let (input, _) = c(')')(input)?;\n\n\n\n Ok((input, (function, all_args)))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 39, "score": 44389.90881478431 }, { "content": "fn template_internal(\n\n input: &str,\n\n) -> IResult<&str, (Source, Option<Vec<(Option<String>, LiteralValue)>>), VerboseError<&str>> {\n\n let (input, _) = tag(\"template\")(input)?;\n\n let (input, _) = tag(\"(\")(input)?;\n\n let (input, source) = source(input)?;\n\n\n\n let (input, all_args) =\n\n opt(preceded(tag(\",\"), separated_list1(tag(\",\"), template_arg)))(input)?;\n\n let (input, _) = tag(\")\")(input)?;\n\n\n\n Ok((input, (source, all_args)))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 40, "score": 44389.90881478431 }, { "content": "fn walk_tree(\n\n dir: &Path,\n\n path_prefix: &MetricName,\n\n path_words: &[PathWord],\n\n acc: &mut Vec<(MetricName, PathBuf)>,\n\n) -> Result<(), ResponseError> {\n\n match path_words.len() {\n\n 0 => {}\n\n 1 => {\n\n let regex = path_words[0].to_regex().map_err(|_| ResponseError::Path)?;\n\n for entry in fs::read_dir(&dir)? {\n\n let path = entry?.path();\n\n if let Some(file_name) = file_name(&path) {\n\n if regex.is_match(&file_name) {\n\n let storage_path = path_prefix.join(file_name);\n\n acc.push((storage_path, path));\n\n }\n\n }\n\n }\n\n }\n", "file_path": "diamond-api/src/storage/whisper_fs.rs", "rank": 41, "score": 44389.90881478431 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_infoval() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_infoval>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(rrd_infoval))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_infoval>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(rrd_infoval))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_infoval>())).u_cnt as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_infoval),\n\n \"::\",\n\n stringify!(u_cnt)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 42, "score": 43400.51738399056 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_info_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_info_t>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(rrd_info_t))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_info_t>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(rrd_info_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_info_t>())).key as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_info_t),\n\n \"::\",\n\n stringify!(key)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 43, "score": 43400.51738399056 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_blob_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_blob_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(rrd_blob_t))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_blob_t>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(rrd_blob_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_blob_t>())).size as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_blob_t),\n\n \"::\",\n\n stringify!(size)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 44, "score": 43400.51738399056 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_file_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_file_t>(),\n\n 40usize,\n\n \"Size of rrd_file_t\"\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_file_t>(),\n\n 8usize,\n\n \"Alignment of rrd_file_t\"\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_file_t>())).header_len as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_file_t),\n\n \"::\",\n\n stringify!(header_len)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 45, "score": 43400.51738399056 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_context() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_context>(),\n\n 4352usize,\n\n concat!(\"Size of: \", stringify!(rrd_context))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_context>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(rrd_context))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_context>())).lib_errstr as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_context),\n\n \"::\",\n\n stringify!(lib_errstr)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 46, "score": 43400.51738399056 }, { "content": "fn get_temp_dir() -> TempDir {\n\n Builder::new()\n\n .prefix(\"rrd\")\n\n .tempdir()\n\n .expect(\"Temp dir created\")\n\n}\n\n\n", "file_path": "rrd_tests/tests/fetch.rs", "rank": 47, "score": 42609.95381770953 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_simple_file_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_simple_file_t>(),\n\n 4usize,\n\n \"Size of rrd_simple_file_t\"\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_simple_file_t>(),\n\n 4usize,\n\n \"Alignment of rrd_simple_file_t\"\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_simple_file_t>())).fd as *const _ as usize },\n\n 0usize,\n\n \"Offset of field rrd_simple_file_t::fd\"\n\n );\n\n}\n\n\n", "file_path": "rrd-sys/src/tests.rs", "rank": 48, "score": 42473.71183284797 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_time_value() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_time_value>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(rrd_time_value))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_time_value>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(rrd_time_value))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_time_value>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_time_value),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 49, "score": 42473.71183284797 }, { "content": "fn write_archive<F: Write + Seek>(\n\n fh: &mut F,\n\n archive: &ArchiveInfo,\n\n points: &[Point],\n\n base_interval: u32,\n\n) -> Result<(), io::Error> {\n\n let point_size = 12;\n\n\n\n let first_interval = points[0].interval;\n\n\n\n let offset = instant_offset(archive, base_interval, first_interval);\n\n\n\n let available_tail_space = (archive.points - offset) as usize;\n\n\n\n if available_tail_space < points.len() {\n\n let (tail, head) = points.split_at(available_tail_space);\n\n\n\n fh.seek(io::SeekFrom::Start(\n\n (archive.offset + offset * point_size).into(),\n\n ))?;\n", "file_path": "whisper/src/lib.rs", "rank": 50, "score": 42257.39168814495 }, { "content": "fn read_archive<R: Read + Seek>(\n\n fh: &mut R,\n\n archive: &ArchiveInfo,\n\n from_index: u32,\n\n until_index: u32,\n\n) -> Result<Vec<Point>, io::Error> {\n\n let from_index = from_index % archive.points;\n\n let until_index = until_index % archive.points;\n\n\n\n let mut series =\n\n Vec::with_capacity(((archive.points + until_index - from_index) % archive.points) as usize);\n\n\n\n let point_size = 12;\n\n let from_offset = archive.offset + from_index * point_size;\n\n\n\n fh.seek(io::SeekFrom::Start(from_offset.into()))?;\n\n if from_index < until_index {\n\n // If we don't wrap around the archive\n\n for _i in from_index..until_index {\n\n series.push(Point::read(fh)?);\n", "file_path": "whisper/src/lib.rs", "rank": 51, "score": 42257.39168814495 }, { "content": "fn __propagate<F: Read + Write + Seek>(\n\n fh: &mut F,\n\n header: &WhisperMetadata,\n\n timestamp: u32,\n\n higher: &ArchiveInfo,\n\n lower: &ArchiveInfo,\n\n) -> Result<bool, io::Error> {\n\n let lower_interval_start = timestamp - (timestamp % lower.seconds_per_point);\n\n\n\n fh.seek(io::SeekFrom::Start(higher.offset.into()))?;\n\n let higher_base = Point::read(fh)?;\n\n\n\n let higher_first_index = instant_offset(higher, higher_base.interval, lower_interval_start);\n\n\n\n let higher_last_index = {\n\n let higher_points = lower.seconds_per_point / higher.seconds_per_point;\n\n (higher_first_index + higher_points) % higher.points\n\n };\n\n\n\n let series = read_archive(fh, higher, higher_first_index, higher_last_index)?;\n", "file_path": "whisper/src/lib.rs", "rank": 52, "score": 41683.0399814578 }, { "content": "fn archive_fetch_interval<R: Read + Seek>(\n\n fh: &mut R,\n\n archive: &ArchiveInfo,\n\n interval: Interval,\n\n) -> Result<Option<Vec<Point>>, io::Error> {\n\n let base = archive.read_base(fh)?;\n\n if base.interval == 0 {\n\n Ok(None)\n\n } else {\n\n let from_index = instant_offset(archive, base.interval, interval.from());\n\n let until_index = instant_offset(archive, base.interval, interval.until());\n\n let points = read_archive(fh, &archive, from_index, until_index)?;\n\n Ok(Some(points))\n\n }\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 53, "score": 41268.0002573512 }, { "content": "fn random_string(len: usize) -> String {\n\n Alphanumeric.sample_string(&mut thread_rng(), len)\n\n}\n\n\n", "file_path": "rrd_tests/tests/fetch.rs", "rank": 54, "score": 41268.0002573512 }, { "content": "fn test_create(bench: &mut Bencher) {\n\n let temp_dir = get_temp_dir();\n\n let mut index = 1;\n\n let i = &mut index;\n\n bench.iter(|| {\n\n let path = get_file_path(&temp_dir, \"whisper_create\");\n\n create_file(&path).expect(\"creating\");\n\n *i += 1;\n\n });\n\n}\n\n\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 55, "score": 41268.0002573512 }, { "content": "fn test_update(bench: &mut Bencher) {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"whisper_update\");\n\n let mut file = create_file(&path).expect(\"Create file for update\");\n\n\n\n let mut current_value = 0.5;\n\n let i = &mut current_value;\n\n let now = current_time();\n\n\n\n bench.iter(|| {\n\n for j in 0..SECONDS_AGO {\n\n file.update(\n\n &Point {\n\n interval: now - SECONDS_AGO + j,\n\n value: *i,\n\n },\n\n now,\n\n )\n\n .expect(\"update\");\n\n *i += VALUE_STEP;\n\n }\n\n });\n\n}\n\n\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 56, "score": 41268.0002573512 }, { "content": "fn test_fetch(bench: &mut Bencher) {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"whisper_fetch\");\n\n let mut file = create_file(&path).expect(\"Create file for fetching\");\n\n\n\n let mut current_value = 0.5;\n\n let now = current_time();\n\n\n\n for j in 0..SECONDS_AGO {\n\n file.update(\n\n &Point {\n\n interval: now - SECONDS_AGO + j,\n\n value: current_value,\n\n },\n\n now,\n\n )\n\n .expect(\"update\");\n\n current_value += VALUE_STEP;\n\n }\n\n\n\n let from_time = now - SECONDS_AGO;\n\n let until_time = from_time + 1000;\n\n let interval = Interval::new(from_time, until_time).expect(\"interval\");\n\n bench.iter(|| {\n\n let seconds_per_point = file.suggest_archive(interval, now).expect(\"Archive\");\n\n file.fetch(seconds_per_point, interval, now).expect(\"fetch\");\n\n });\n\n}\n\n\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 57, "score": 41268.0002573512 }, { "content": "fn test_update_fetch(bench: &mut Bencher) {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"whisper_update\");\n\n let mut file = create_file(&path).expect(\"Create file for update\");\n\n\n\n let mut current_value = 0.5;\n\n let i = &mut current_value;\n\n let now = current_time();\n\n\n\n let from_time = now - SECONDS_AGO;\n\n let until_time = from_time + 1000;\n\n let interval = Interval::new(from_time, until_time).expect(\"interval\");\n\n bench.iter(|| {\n\n for j in 0..SECONDS_AGO {\n\n file.update(\n\n &Point {\n\n interval: now - SECONDS_AGO + j,\n\n value: *i,\n\n },\n\n now,\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 58, "score": 40341.19470620861 }, { "content": "fn write_archive_point<F: Read + Write + Seek>(\n\n fh: &mut F,\n\n archive: &ArchiveInfo,\n\n point: &Point,\n\n) -> Result<(), io::Error> {\n\n let base = archive.read_base(fh)?;\n\n let index = instant_offset(archive, base.interval, point.interval);\n\n fh.seek(io::SeekFrom::Start(\n\n (archive.offset + index * POINT_SIZE as u32).into(),\n\n ))?;\n\n point.write(fh)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 59, "score": 39766.84299952147 }, { "content": "#[test]\n\n#[allow(clippy::unreadable_literal)]\n\nfn issue8_many() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"issue8\");\n\n\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 60,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n file.update_many(\n\n &[Point {\n\n interval: 1528240818,\n\n value: 123.0,\n\n }],\n\n 1528240900,\n\n )?;\n\n\n\n let points = file.dump(60)?;\n\n assert_eq!(points[0].interval, 1528240800);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper_tests/tests/issue8.rs", "rank": 60, "score": 39766.84299952147 }, { "content": "#[test]\n\nfn issue22_original() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"issue22\");\n\n\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 1,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n let now = 1000;\n\n\n\n file.update(\n\n &Point {\n\n interval: now - 1,\n\n value: 100.0,\n\n },\n\n now,\n\n )?;\n", "file_path": "whisper_tests/tests/issue22.rs", "rank": 61, "score": 39766.84299952147 }, { "content": "#[test]\n\n#[allow(clippy::unreadable_literal)]\n\nfn issue8_single() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"issue8\");\n\n\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 60,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n file.update(\n\n &Point {\n\n interval: 1528240818,\n\n value: 123.0,\n\n },\n\n 1528240900,\n\n )?;\n\n\n\n let points = file.dump(60)?;\n\n assert_eq!(points[0].interval, 1528240800);\n\n\n\n Ok(())\n\n}\n", "file_path": "whisper_tests/tests/issue8.rs", "rank": 62, "score": 39766.84299952147 }, { "content": "fn __archive_update_many<F: Read + Write + Seek>(\n\n fh: &mut F,\n\n header: &WhisperMetadata,\n\n archive_index: usize,\n\n points: &[Point],\n\n) -> Result<(), io::Error> {\n\n let archive = &header.archives[archive_index];\n\n\n\n let aligned_points: Vec<Point> = points\n\n .iter()\n\n .map(|p| p.align(archive.seconds_per_point))\n\n .collect();\n\n\n\n let chunks: Vec<Vec<Point>> = pack_points(&aligned_points, archive.seconds_per_point);\n\n\n\n // Read base point and determine where our writes will start\n\n let base = archive.read_base(fh)?;\n\n\n\n let base_interval = if base.interval == 0 {\n\n // This file's first update\n", "file_path": "whisper/src/lib.rs", "rank": 63, "score": 39766.84299952147 }, { "content": "fn adjust_instant_up(instant: u32, step: u32) -> u32 {\n\n (instant + step - 1) / step * step\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 64, "score": 39245.105957990614 }, { "content": "fn get_unit_multiplier(s: &str) -> Result<u32, String> {\n\n if s.is_empty() || \"seconds\".starts_with(s) {\n\n Ok(1)\n\n } else if \"minutes\".starts_with(s) {\n\n Ok(60)\n\n } else if \"hours\".starts_with(s) {\n\n Ok(3600)\n\n } else if \"days\".starts_with(s) {\n\n Ok(86400)\n\n } else if \"weeks\".starts_with(s) {\n\n Ok(86400 * 7)\n\n } else if \"years\".starts_with(s) {\n\n Ok(86400 * 365)\n\n } else {\n\n Err(format!(\"Invalid unit '{}'\", s))\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub struct Retention {\n", "file_path": "whisper/src/retention.rs", "rank": 65, "score": 39245.105957990614 }, { "content": "fn adjust_instant(instant: u32, step: u32) -> u32 {\n\n instant - (instant % step)\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 66, "score": 39245.105957990614 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-whisper-fetch.rs", "rank": 67, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_as_json() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"--json\"])\n\n .assert()\n\n .success()\n\n .stdout(\n\n predicate::str::contains(\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 68, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 69, "score": 38896.8669074688 }, { "content": "#[test]\n\n#[allow(clippy::unreadable_literal)]\n\nfn test_update_snapshot() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"update_snapshot\");\n\n\n\n {\n\n WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 1,\n\n points: 10,\n\n })\n\n .build(path.clone())?;\n\n }\n\n\n\n let header = Vec::new()\n\n .u32(1) // aggregation method\n\n .u32(10) // max retention\n\n .f32(0.5) // x files factor\n\n .u32(1) // archives\n\n // archive info\n\n .u32(28) // offset\n", "file_path": "whisper_tests/tests/update.rs", "rank": 70, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_as_plain() -> Result<(), Box<dyn Error>> {\n\n let filename = \"dump.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap()])\n\n .assert()\n\n .success()\n\n .stdout(\n\n predicate::str::contains(\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 71, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn test_read_rrd() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"read_rrd\");\n\n\n\n Command::new(\"rrdtool\")\n\n .arg(\"create\")\n\n .arg(&path)\n\n .args(&[\"--step\", \"300\"])\n\n .arg(\"DS:temp:GAUGE:600:-273:5000\")\n\n .arg(\"RRA:AVERAGE:0.5:1:1200\")\n\n .arg(\"RRA:MIN:0.6:12:2400\")\n\n .arg(\"RRA:MAX:0.7:12:2400\")\n\n .arg(\"RRA:AVERAGE:0.8:12:2400\")\n\n .status()?;\n\n\n\n let rrd_info = rrd::info(&path, None, false)?;\n\n\n\n let info: HashMap<String, rrd::Value> = rrd_info.iter().collect();\n\n let seconds_per_pdp = info[\"step\"].as_long().unwrap();\n\n assert_eq!(seconds_per_pdp, 300);\n", "file_path": "rrd_tests/tests/fetch.rs", "rank": 72, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 73, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_as_pretty() -> Result<(), Box<dyn Error>> {\n\n let filename = \"dump.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"--time-format\", \"%c\"])\n\n .assert()\n\n .success()\n\n .stdout(\n\n predicate::str::contains(\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 74, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-whisper-resize.rs", "rank": 75, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn issue22_many_archives() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"issue22_many\");\n\n\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 2,\n\n points: 10,\n\n })\n\n .add_retention(Retention {\n\n seconds_per_point: 4,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n let now = 1000;\n\n\n\n for item in (2..18).step_by(2) {\n\n let delta = 18 - item;\n\n file.update(\n", "file_path": "whisper_tests/tests/issue22.rs", "rank": 76, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn issue22_many_archives_once() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n let path = get_file_path(&temp_dir, \"issue22_many\");\n\n\n\n let mut file = WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 2,\n\n points: 10,\n\n })\n\n .add_retention(Retention {\n\n seconds_per_point: 4,\n\n points: 10,\n\n })\n\n .build(path)?;\n\n\n\n let now = 1000;\n\n\n\n file.update_many(\n\n &[\n\n Point {\n", "file_path": "whisper_tests/tests/issue22.rs", "rank": 77, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_as_plain() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap()])\n\n .assert()\n\n .success()\n\n .stdout(\n\n predicate::str::contains(\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 78, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 79, "score": 38896.8669074688 }, { "content": "fn validate_archive_list(archives: &[Retention]) -> Result<(), BuilderError> {\n\n for (i, pair) in archives.windows(2).enumerate() {\n\n let archive = &pair[0];\n\n let next_archive = &pair[1];\n\n\n\n if archive.seconds_per_point >= next_archive.seconds_per_point {\n\n return Err(BuilderError::SamePrecision(i, *archive, *next_archive));\n\n }\n\n\n\n if next_archive.seconds_per_point % archive.seconds_per_point != 0 {\n\n return Err(BuilderError::UndividablePrecision(\n\n i,\n\n *archive,\n\n *next_archive,\n\n ));\n\n }\n\n\n\n let retention = archive.retention();\n\n let next_retention = next_archive.retention();\n\n\n", "file_path": "whisper/src/builder.rs", "rank": 80, "score": 38896.8669074688 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-update.rs", "rank": 81, "score": 38896.8669074688 }, { "content": "fn run(args: &Args) -> Result<(), Box<dyn Error>> {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs();\n\n\n\n let rrd_info = rrd::info(&args.rrd_path, None, false).unwrap();\n\n\n\n let info: HashMap<String, rrd::Value> = rrd_info.iter().collect();\n\n\n\n let seconds_per_pdp = &info[\"step\"].as_long().unwrap();\n\n\n\n let rras = rrd_info.rras();\n\n\n\n let datasources = rrd_info.datasources();\n\n\n\n // Grab the archive configuration\n\n let relevant_rras: Vec<_> = rras\n\n .iter()\n\n .filter(|rra| rra.cf == args.aggregation_method)\n\n .collect();\n\n\n\n if relevant_rras.is_empty() {\n", "file_path": "rrd2whisper/src/main.rs", "rank": 82, "score": 38769.07041755324 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 83, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_creating_simple() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"60:1440\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"Created: \").from_utf8())\n\n .stdout(predicate::str::contains(path.to_str().unwrap()).from_utf8())\n\n .stdout(predicate::str::contains(\"(17308 bytes)\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 84, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_value() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\", \"1:value\"])\n\n .assert()\n\n .code(1);\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-whisper-update.rs", "rank": 85, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_as_plain_for_unknown() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n let error = \"Unknown field \\\"unknown\\\". Valid fields are maxRetention, xFilesFactor, aggregationMethod, archives, fileSize\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"unknown\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error).from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 86, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_path() -> Result<(), Box<dyn Error>> {\n\n #[cfg(unix)]\n\n let error_msg = \"No such file or directory (os error 2)\";\n\n #[cfg(windows)]\n\n let error_msg = \"The system cannot find the file specified. (os error 2)\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error_msg).from_utf8());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 87, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_path() -> Result<(), Box<dyn Error>> {\n\n #[cfg(unix)]\n\n let error_msg = \"No such file or directory (os error 2)\";\n\n #[cfg(windows)]\n\n let error_msg = \"The system cannot find the file specified. (os error 2)\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error_msg).from_utf8());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 88, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 89, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-fetch.rs", "rank": 90, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-resize.rs", "rank": 91, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_path() -> Result<(), Box<dyn Error>> {\n\n #[cfg(unix)]\n\n let error_msg = \"No such file or directory (os error 2)\";\n\n #[cfg(windows)]\n\n let error_msg = \"The system cannot find the file specified. (os error 2)\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\", \"1:1\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error_msg).from_utf8());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-update.rs", "rank": 92, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-update.rs", "rank": 93, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_timestamp() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\", \"nottimestamp:1\"])\n\n .assert()\n\n .code(1);\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-update.rs", "rank": 94, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_path() -> Result<(), Box<dyn Error>> {\n\n #[cfg(unix)]\n\n let error_msg = \"No such file or directory (os error 2)\";\n\n #[cfg(windows)]\n\n let error_msg = \"The system cannot find the path specified. (os error 3)\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid/path\", \"60:1440\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error_msg).from_utf8());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 95, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_method() -> Result<(), Box<dyn Error>> {\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(\"info.wsp\")\n\n .tempfile()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let error =\n\n \"error: Invalid value for '--aggregationMethod <aggregation-method>': Unsupported aggregation method 'unknown'\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[\n\n path.to_str().unwrap(),\n\n \"60:1440\",\n\n \"--aggregationMethod\",\n\n \"unknown\",\n\n ])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error).from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 96, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_with_invalid_xfactor() -> Result<(), Box<dyn Error>> {\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(\"info.wsp\")\n\n .tempfile()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let error = \"error: Invalid value for '--xFilesFactor <x-files-factor>': invalid float literal\";\n\n\n\n // TODO: validate nan as invalid value\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"60:1440\", \"--xFilesFactor\", \"nano\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error).from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 97, "score": 38078.64879389722 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 98, "score": 38078.64879389722 }, { "content": "use super::*;\n\nuse crate::interval::Interval;\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::path::Path;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct DiffPoint {\n\n #[serde(rename = \"timestamp\")]\n\n pub interval: u32,\n\n #[serde(rename = \"value_a\")]\n\n pub value1: Option<f64>,\n\n #[serde(rename = \"value_b\")]\n\n pub value2: Option<f64>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct DiffArchive {\n\n #[serde(rename = \"archive\")]\n\n pub index: usize,\n", "file_path": "whisper/src/diff.rs", "rank": 99, "score": 21.811678259990707 } ]
Rust
src/algorithms/leaky_bucket.rs
jbg/ratelimit_meter
df4d7a3f9b26dffe4468ad2b05a512589296dd64
use crate::lib::*; use crate::thread_safety::ThreadsafeWrapper; use crate::{ algorithms::{Algorithm, RateLimitState, RateLimitStateWithClock}, instant, InconsistentCapacity, NegativeMultiDecision, NonConformance, }; #[derive(Debug, Clone, Eq, PartialEq)] pub struct LeakyBucket<P: instant::Relative = instant::TimeSource> { full: Duration, token_interval: Duration, point: PhantomData<P>, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct State<P: instant::Relative>(ThreadsafeWrapper<BucketState<P>>); impl<P: instant::Relative> Default for State<P> { fn default() -> Self { State(Default::default()) } } impl<P: instant::Relative> RateLimitState<LeakyBucket<P>, P> for State<P> {} impl<P: instant::Absolute> RateLimitStateWithClock<LeakyBucket<P>, P> for State<P> { fn last_touched(&self, _params: &LeakyBucket<P>) -> P { let data = self.0.snapshot(); data.last_update.unwrap_or_else(P::now) + data.level } } #[cfg(feature = "std")] mod std { use crate::instant::Relative; use evmap::ShallowCopy; impl<P: Relative> ShallowCopy for super::State<P> { unsafe fn shallow_copy(&mut self) -> Self { super::State(self.0.shallow_copy()) } } } #[derive(Debug, PartialEq)] pub struct TooEarly<P: instant::Relative>(P, Duration); impl<P: instant::Relative> fmt::Display for TooEarly<P> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "rate-limited until {:?}", self.0 + self.1) } } impl<P: instant::Relative> NonConformance<P> for TooEarly<P> { #[inline] fn earliest_possible(&self) -> P { self.0 + self.1 } } #[derive(Debug, Clone, PartialEq, Eq)] struct BucketState<P: instant::Relative> { level: Duration, last_update: Option<P>, } impl<P: instant::Relative> Default for BucketState<P> { fn default() -> Self { BucketState { level: Duration::new(0, 0), last_update: None, } } } impl<P: instant::Relative> Algorithm<P> for LeakyBucket<P> { type BucketState = State<P>; type NegativeDecision = TooEarly<P>; fn construct( capacity: NonZeroU32, cell_weight: NonZeroU32, per_time_unit: Duration, ) -> Result<Self, InconsistentCapacity> { if capacity < cell_weight { return Err(InconsistentCapacity::new(capacity, cell_weight)); } let token_interval = (per_time_unit * cell_weight.get()) / capacity.get(); Ok(LeakyBucket { full: per_time_unit, token_interval, point: PhantomData, }) } fn test_n_and_update( &self, state: &Self::BucketState, n: u32, t0: P, ) -> Result<(), NegativeMultiDecision<TooEarly<P>>> { let full = self.full; let weight = self.token_interval * n; if weight > self.full { return Err(NegativeMultiDecision::InsufficientCapacity(n)); } state.0.measure_and_replace(|state| { let mut new = BucketState { last_update: Some(t0), level: Duration::new(0, 0), }; let last = state.last_update.unwrap_or(t0); let t0 = cmp::max(t0, last); new.level = state.level - cmp::min(t0.duration_since(last), state.level); if weight + new.level <= full { new.level += weight; (Ok(()), Some(new)) } else { let wait_period = (weight + new.level) - full; ( Err(NegativeMultiDecision::BatchNonConforming( n, TooEarly(t0, wait_period), )), None, ) } }) } }
use crate::lib::*; use crate::thread_safety::ThreadsafeWrapper; use crate::{ algorithms::{Algorithm, RateLimitState, RateLimitStateWithClock}, instant, InconsistentCapacity, NegativeMultiDecision, NonConformance, }; #[derive(Debug, Clone, Eq, PartialEq)] pub struct LeakyBucket<P: instant::Relative = instant::TimeSource> { full: Duration, token_interval: Duration, point: PhantomData<P>, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct State<P: instant::Relative>(ThreadsafeWrapper<BucketState<P>>); impl<P: instant::Relative> Default for State<P> { fn default() -> Self { State(Default::default()) } } impl<P: instant::Relative> RateLimitState<LeakyBucket<P>, P> for State<P> {} impl<P: instant::Absolute> RateLimitStateWithClock<LeakyBucket<P>, P> for State<P> { fn last_touched(&self, _params: &LeakyBucket<P>) -> P { let data = self.0.snapshot(); data.last_update.unwrap_or_else(P::now) + data.level } } #[cfg(feature = "std")] mod std { use crate::instant::Relative; use evmap::ShallowCopy; impl<P: Relative> ShallowCopy for super::State<P> { unsafe fn shallow_copy(&mut self) -> Self { super::State(self.0.shallow_copy()) } } } #[derive(Debug, PartialEq)] pub struct TooEarly<P: instant::Relative>(P, Duration); impl<P: instant::Relative> fmt::Display for TooEarly<P> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "rate-limited until {:?}", self.0 + self.1) } } impl<P: instant::Relative> NonConformance<P> for TooEarly<P> { #[inline] fn earliest_possible(&self) -> P { self.0 + self.1 } } #[derive(Debug, Clone, PartialEq, Eq)] struct BucketState<P: instant::Relative> { level: Duration, last_update: Option<P>, } impl<P: instant::R
(per_time_unit * cell_weight.get()) / capacity.get(); Ok(LeakyBucket { full: per_time_unit, token_interval, point: PhantomData, }) } fn test_n_and_update( &self, state: &Self::BucketState, n: u32, t0: P, ) -> Result<(), NegativeMultiDecision<TooEarly<P>>> { let full = self.full; let weight = self.token_interval * n; if weight > self.full { return Err(NegativeMultiDecision::InsufficientCapacity(n)); } state.0.measure_and_replace(|state| { let mut new = BucketState { last_update: Some(t0), level: Duration::new(0, 0), }; let last = state.last_update.unwrap_or(t0); let t0 = cmp::max(t0, last); new.level = state.level - cmp::min(t0.duration_since(last), state.level); if weight + new.level <= full { new.level += weight; (Ok(()), Some(new)) } else { let wait_period = (weight + new.level) - full; ( Err(NegativeMultiDecision::BatchNonConforming( n, TooEarly(t0, wait_period), )), None, ) } }) } }
elative> Default for BucketState<P> { fn default() -> Self { BucketState { level: Duration::new(0, 0), last_update: None, } } } impl<P: instant::Relative> Algorithm<P> for LeakyBucket<P> { type BucketState = State<P>; type NegativeDecision = TooEarly<P>; fn construct( capacity: NonZeroU32, cell_weight: NonZeroU32, per_time_unit: Duration, ) -> Result<Self, InconsistentCapacity> { if capacity < cell_weight { return Err(InconsistentCapacity::new(capacity, cell_weight)); } let token_interval =
random
[ { "content": "/// Trait that all rate limit states have to implement around\n\n/// housekeeping in keyed rate limiters.\n\npub trait RateLimitState<P, I: instant::Relative>: Default + Send + Sync + Eq + fmt::Debug {}\n\n\n", "file_path": "src/algorithms.rs", "rank": 0, "score": 139496.12516807122 }, { "content": "#[derive(Debug, Eq, PartialEq, Clone)]\n\nstruct Tat<P: instant::Relative>(Option<P>);\n\n\n\nimpl<P: instant::Relative> Default for Tat<P> {\n\n fn default() -> Self {\n\n Tat(None)\n\n }\n\n}\n\n\n\n/// The GCRA's state about a single rate limiting history.\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct State<P: instant::Relative>(ThreadsafeWrapper<Tat<P>>);\n\n\n\nimpl<P: instant::Relative> Default for State<P> {\n\n fn default() -> Self {\n\n State(Default::default())\n\n }\n\n}\n\n\n\nimpl<P: instant::Relative> RateLimitState<GCRA<P>, P> for State<P> {}\n\n\n", "file_path": "src/algorithms/gcra.rs", "rank": 1, "score": 124157.37552766272 }, { "content": "/// The trait that implementations of metered rate-limiter algorithms\n\n/// have to implement.\n\n///\n\n/// Implementing structures are expected to represent the \"parameters\"\n\n/// (e.g., the allowed requests/s), and keep the information necessary\n\n/// to make a decision, e.g. concrete usage statistics for an\n\n/// in-memory rate limiter, in the associated structure\n\n/// [`BucketState`](#associatedtype.BucketState).\n\npub trait Algorithm<P: instant::Relative = instant::TimeSource>:\n\n Send + Sync + Sized + fmt::Debug\n\n{\n\n /// The state of a single rate limiting bucket.\n\n ///\n\n /// Every new rate limiting state is initialized as `Default`. The\n\n /// states must be safe to share across threads (this crate uses a\n\n /// `parking_lot` Mutex to allow that).\n\n type BucketState: RateLimitState<Self, P>;\n\n\n\n /// The type returned when a rate limiting decision for a single\n\n /// cell is negative. Each rate limiting algorithm can decide to\n\n /// return the type that suits it best, but most algorithms'\n\n /// decisions also implement\n\n /// [`NonConformance`](trait.NonConformance.html), to ease\n\n /// handling of how long to wait.\n\n type NegativeDecision: PartialEq + fmt::Display + fmt::Debug + Send + Sync;\n\n\n\n /// Constructs a rate limiter with the given parameters:\n\n /// `capacity` is the number of cells to allow, weighing\n", "file_path": "src/algorithms.rs", "rank": 2, "score": 117515.92745916202 }, { "content": "/// Trait that all rate limit states implement if there is a real-time\n\n/// clock available.\n\npub trait RateLimitStateWithClock<P, I: instant::Absolute>: RateLimitState<P, I> {\n\n /// Returns the last time instant that the state had any relevance\n\n /// (i.e. the rate limiter would behave exactly as if it was a new\n\n /// rate limiter after this time).\n\n ///\n\n /// If the state has not been touched for a given amount of time,\n\n /// the keyed rate limiter will expire it.\n\n ///\n\n /// # Thread safety\n\n /// This uses a bucket state snapshot to determine eligibility;\n\n /// race conditions can occur.\n\n fn last_touched(&self, params: &P) -> I;\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nmod std {\n\n use crate::instant;\n\n use evmap::ShallowCopy;\n\n\n", "file_path": "src/algorithms.rs", "rank": 3, "score": 116724.11369812317 }, { "content": "/// Provides additional information about non-conforming cells, most\n\n/// importantly the earliest time until the next cell could be\n\n/// considered conforming.\n\n///\n\n/// Since this does not account for effects like thundering herds,\n\n/// users should always add random jitter to the times given.\n\npub trait NonConformance<P: instant::Relative = instant::TimeSource> {\n\n /// Returns the earliest time at which a decision could be\n\n /// conforming (excluding conforming decisions made by the Decider\n\n /// that are made in the meantime).\n\n fn earliest_possible(&self) -> P;\n\n\n\n /// Returns the minimum amount of time from the time that the\n\n /// decision was made (relative to the `at` argument in a\n\n /// `Decider`'s `check_at` method) that must pass before a\n\n /// decision can be conforming. Since Durations can not be\n\n /// negative, a zero duration is returned if `from` is already\n\n /// after that duration.\n\n fn wait_time_from(&self, from: P) -> Duration {\n\n let earliest = self.earliest_possible();\n\n earliest.duration_since(earliest.min(from))\n\n }\n\n}\n\n\n", "file_path": "src/algorithms.rs", "rank": 5, "score": 115208.94038735001 }, { "content": "pub fn bench_all(c: &mut Criterion) {\n\n let id = \"algorithm/no_op\";\n\n\n\n let bm = Benchmark::new(id, move |b| {\n\n let algo = AlgorithmForTest::<Allower, Always>::default();\n\n let now = Always::now();\n\n let ms = Duration::from_millis(20);\n\n let state = algo.state();\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(algo.check(&state, now + (ms * i)).is_ok());\n\n });\n\n })\n\n .throughput(Throughput::Elements(1));\n\n c.bench(id, bm);\n\n}\n", "file_path": "benches/no_op.rs", "rank": 6, "score": 114628.35866274267 }, { "content": "pub fn bench_all(c: &mut Criterion) {\n\n bench_plain_algorithm_1elem(c);\n\n bench_plain_algorithm_multi(c);\n\n}\n\n\n", "file_path": "benches/algorithms.rs", "rank": 7, "score": 114628.35866274267 }, { "content": "pub trait NonConformanceExt<P: instant::Absolute>: NonConformance<P> {\n\n /// Returns the minimum amount of time (down to 0) that needs to\n\n /// pass from the current instant for the Decider to consider a\n\n /// cell conforming again.\n\n fn wait_time(&self) -> Duration {\n\n self.wait_time_from(P::now())\n\n }\n\n}\n\n\n\nimpl<P: instant::Absolute, T> NonConformanceExt<P> for T where T: NonConformance<P> {}\n\n\n", "file_path": "src/algorithms.rs", "rank": 8, "score": 113106.80373277623 }, { "content": "pub fn bench_all(c: &mut Criterion) {\n\n bench_direct(c);\n\n bench_keyed(c);\n\n}\n\n\n", "file_path": "benches/single_threaded.rs", "rank": 9, "score": 111481.45522854674 }, { "content": "pub fn bench_all(c: &mut Criterion) {\n\n bench_direct(c);\n\n bench_keyed(c);\n\n}\n\n\n", "file_path": "benches/multi_threaded.rs", "rank": 10, "score": 111481.45522854674 }, { "content": "/// Returns a \"current\" moment that's suitable for tests.\n\npub fn current_moment() -> instant::TimeSource {\n\n #[cfg(feature = \"std\")]\n\n return Instant::now();\n\n\n\n #[cfg(not(feature = \"std\"))]\n\n return Duration::from_secs(90);\n\n}\n", "file_path": "src/test_utilities.rs", "rank": 11, "score": 109358.07062296395 }, { "content": " /// Trait implemented by all rate limit states that are compatible\n\n /// with the KeyedRateLimiters.\n\n pub trait KeyableRateLimitState<P, I: instant::Absolute>:\n\n super::RateLimitStateWithClock<P, I> + ShallowCopy\n\n {\n\n }\n\n\n\n #[cfg(feature = \"std\")]\n\n impl<T, P, I> KeyableRateLimitState<P, I> for T\n\n where\n\n T: super::RateLimitStateWithClock<P, I> + ShallowCopy,\n\n I: instant::Absolute,\n\n {\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\npub use self::std::*;\n", "file_path": "src/algorithms.rs", "rank": 12, "score": 105479.9741761017 }, { "content": "fn bench_keyed(c: &mut Criterion) {\n\n let id = \"multi_threaded/keyed\";\n\n\n\n let bm = ParameterizedBenchmark::new(\n\n id,\n\n |b, ref v| {\n\n bench_with_variants!(v, lim: KeyedBucket, {\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(20);\n\n let mut children = vec![];\n\n\n\n for _i in 0..19 {\n\n let mut lim = lim.clone();\n\n let mut b = b.clone();\n\n children.push(thread::spawn(move || {\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(lim.check_at(i % 100, now + (ms * i)).is_ok());\n\n });\n", "file_path": "benches/multi_threaded.rs", "rank": 13, "score": 75033.94699496083 }, { "content": "fn bench_keyed(c: &mut Criterion) {\n\n let id = \"single_threaded/keyed\";\n\n let bm = ParameterizedBenchmark::new(\n\n id,\n\n move |b, ref v| {\n\n bench_with_variants!(v, rl: KeyedBucket, {\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(20);\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(rl.check_at(i % 100, now + (ms * i)).is_ok());\n\n });\n\n });\n\n },\n\n Variant::ALL,\n\n )\n\n .throughput(|_s| Throughput::Elements(1));\n\n c.bench(id, bm);\n\n}\n", "file_path": "benches/single_threaded.rs", "rank": 14, "score": 75033.94699496083 }, { "content": "fn bench_direct(c: &mut Criterion) {\n\n let id = \"single_threaded/direct\";\n\n let bm = ParameterizedBenchmark::new(\n\n id,\n\n move |b, ref v| {\n\n bench_with_variants!(v, rl: DirectBucket, {\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(20);\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(rl.check_at(now + (ms * i)).is_ok());\n\n });\n\n });\n\n },\n\n Variant::ALL,\n\n )\n\n .throughput(|_s| Throughput::Elements(1));\n\n c.bench(id, bm);\n\n}\n\n\n", "file_path": "benches/single_threaded.rs", "rank": 15, "score": 75033.94699496083 }, { "content": "fn bench_direct(c: &mut Criterion) {\n\n let id = \"multi_threaded/direct\";\n\n\n\n let bm = ParameterizedBenchmark::new(\n\n id,\n\n |b, ref v| {\n\n bench_with_variants!(v, lim: DirectBucket, {\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(20);\n\n let mut children = vec![];\n\n\n\n for _i in 0..19 {\n\n let mut lim = lim.clone();\n\n let mut b = b.clone();\n\n children.push(thread::spawn(move || {\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(lim.check_at(now + (ms * i)).is_ok());\n\n });\n", "file_path": "benches/multi_threaded.rs", "rank": 16, "score": 75033.94699496083 }, { "content": "/// A point in time that is used as a reference for measuring a rate\n\n/// limit. On the clock, it has meaning only relative to some other point in time.\n\n///\n\n/// When using `no_std`, users of this crate are expected to provide\n\n/// an impl of `Relative` that corresponds to their system's time source.\n\npub trait Relative:\n\n Sized\n\n + Sub<Duration, Output = Self>\n\n + Add<Duration, Output = Self>\n\n + PartialEq\n\n + Eq\n\n + Ord\n\n + Copy\n\n + Clone\n\n + Send\n\n + Sync\n\n + Debug\n\n{\n\n /// Returns the amount of time elapsed from an earlier point in time.\n\n fn duration_since(&self, earlier: Self) -> Duration;\n\n}\n\n\n", "file_path": "src/instant.rs", "rank": 17, "score": 73695.49256833472 }, { "content": "fn bench_plain_algorithm_1elem(c: &mut Criterion) {\n\n let id = \"algorithm/1\";\n\n let bm = ParameterizedBenchmark::new(\n\n id,\n\n move |b, ref v| {\n\n bench_with_algorithm_variants!(v, algo, {\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(20);\n\n let state = algo.state();\n\n\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(algo.check(&state, now + (ms * i)).is_ok());\n\n });\n\n });\n\n },\n\n Variant::ALL,\n\n )\n\n .throughput(|_s| Throughput::Elements(1));\n\n c.bench(id, bm);\n\n}\n\n\n", "file_path": "benches/algorithms.rs", "rank": 18, "score": 73118.24639956064 }, { "content": "fn bench_plain_algorithm_multi(c: &mut Criterion) {\n\n let id = \"algorithm/multi\";\n\n let elements: u32 = 10;\n\n let bm = ParameterizedBenchmark::new(\n\n id,\n\n move |b, ref v| {\n\n bench_with_algorithm_variants!(v, algo, {\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(20);\n\n let state = algo.state();\n\n\n\n let mut i = 0;\n\n b.iter(|| {\n\n i += 1;\n\n black_box(algo.check_n(&state, elements, now + (ms * i)).is_ok());\n\n });\n\n });\n\n },\n\n Variant::ALL,\n\n )\n\n .throughput(|_s| Throughput::Elements(1));\n\n c.bench(id, bm);\n\n}\n", "file_path": "benches/algorithms.rs", "rank": 19, "score": 73118.24639956064 }, { "content": "/// A point in time as given by a source of time. It is assumed to be\n\n/// monotonically moving forward.\n\npub trait Absolute: Relative {\n\n /// Returns the current moment in time, as given by the time\n\n /// source.\n\n fn now() -> Self;\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nmod std {\n\n use std::time::{Duration, Instant};\n\n\n\n impl super::Relative for Instant {\n\n fn duration_since(&self, earlier: Self) -> Duration {\n\n self.duration_since(earlier)\n\n }\n\n }\n\n\n\n impl super::Absolute for Instant {\n\n #[inline]\n\n fn now() -> Self {\n\n Instant::now()\n", "file_path": "src/instant.rs", "rank": 20, "score": 69378.26417382841 }, { "content": "#[test]\n\nfn nonconformance_wait_time_from() {\n\n let gcra = GCRA::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n gcra.test_and_update(&state, now).unwrap();\n\n gcra.test_and_update(&state, now).unwrap();\n\n if let Err(failure) = gcra.test_and_update(&state, now) {\n\n assert_eq!(ms * 2000, failure.wait_time_from(now));\n\n assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 2000));\n\n assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 2001));\n\n } else {\n\n assert!(false, \"Second attempt should fail\");\n\n }\n\n}\n", "file_path": "tests/gcra.rs", "rank": 21, "score": 67048.10666363464 }, { "content": "struct LeakCheck {\n\n usage_before: i64,\n\n n_iter: usize,\n\n}\n\n\n\nimpl Drop for LeakCheck {\n\n fn drop(&mut self) {\n\n let usage_after = resident_memsize();\n\n assert!(\n\n usage_after <= self.usage_before + LEAK_TOLERANCE,\n\n \"Plausible memory leak!\\nAfter {} iterations, usage before: {}, usage after: {}\",\n\n self.n_iter,\n\n self.usage_before,\n\n usage_after\n\n );\n\n }\n\n}\n\n\n\nimpl LeakCheck {\n\n fn new(n_iter: usize) -> Self {\n\n LeakCheck {\n\n n_iter,\n\n usage_before: resident_memsize(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/memory.rs", "rank": 22, "score": 50558.59703728661 }, { "content": "#[test]\n\nfn expiration() {\n\n let ms = Duration::from_millis(1);\n\n let now = Instant::now();\n\n let then = now + ms * 2000; // two seconds later\n\n\n\n fn make_bucket<'a>() -> KeyedRateLimiter<&'a str> {\n\n let ms = Duration::from_millis(1);\n\n let now = Instant::now();\n\n let mut lim = KeyedRateLimiter::<&str>::new(nonzero!(1u32), Duration::from_secs(1));\n\n lim.check_at(\"foo\", now).unwrap();\n\n lim.check_at(\"bar\", now + ms * 200).unwrap();\n\n lim.check_at(\"baz\", now + ms * 800).unwrap();\n\n lim\n\n }\n\n\n\n // clean up all keys that are indistinguishable from unoccupied keys:\n\n let mut lim = make_bucket();\n\n let mut removed = lim.cleanup_at(None, then);\n\n removed.sort();\n\n assert_eq!(vec![\"bar\", \"baz\", \"foo\"], removed);\n", "file_path": "tests/keyed.rs", "rank": 23, "score": 43383.07841204845 }, { "content": "#[test]\n\nfn actual_threadsafety() {\n\n let gcra = GCRA::construct(nonzero!(20u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();\n\n\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n let mut children = vec![];\n\n\n\n gcra.test_and_update(&state, now).unwrap();\n\n for _i in 0..20 {\n\n let state = state.clone();\n\n let gcra = gcra.clone();\n\n children.push(thread::spawn(move || {\n\n gcra.test_and_update(&state, now).unwrap();\n\n }));\n\n }\n\n for child in children {\n\n child.join().unwrap();\n\n }\n\n assert_ne!(Ok(()), gcra.test_and_update(&state, now + ms * 2));\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now + ms * 1000));\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 24, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn allows_after_interval() {\n\n let gcra = GCRA::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();;\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n gcra.test_and_update(&state, now).unwrap();\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now + ms));\n\n assert_ne!(Ok(()), gcra.test_and_update(&state, now + ms * 2));\n\n // should be ok again in 1s:\n\n let next = now + Duration::from_secs(1);\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, next));\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 25, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn actual_threadsafety() {\n\n let mut lim = KeyedRateLimiter::<&str, GCRA>::new(nonzero!(20u32), Duration::from_secs(1));\n\n let now = Instant::now();\n\n let ms = Duration::from_millis(1);\n\n let mut children = vec![];\n\n\n\n lim.check_at(\"foo\", now).unwrap();\n\n for _i in 0..20 {\n\n let mut lim = lim.clone();\n\n children.push(thread::spawn(move || {\n\n lim.check_at(\"foo\", now).unwrap();\n\n }));\n\n }\n\n for child in children {\n\n child.join().unwrap();\n\n }\n\n assert!(!lim.check_at(\"foo\", now + ms * 2).is_ok());\n\n assert_eq!(Ok(()), lim.check_at(\"foo\", now + ms * 1000));\n\n}\n", "file_path": "tests/keyed.rs", "rank": 26, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn memleak_gcra() {\n\n let mut bucket = DirectRateLimiter::<GCRA>::build_with_capacity(nonzero!(1_000_000u32))\n\n .build()\n\n .unwrap();\n\n let leak_check = LeakCheck::new(500_000);\n\n\n\n for _i in 0..leak_check.n_iter {\n\n drop(bucket.check());\n\n }\n\n}\n\n\n", "file_path": "tests/memory.rs", "rank": 27, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn rejects_too_many() {\n\n let gcra = GCRA::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();;\n\n let now = current_moment();\n\n gcra.test_and_update(&state, now).unwrap();\n\n gcra.test_and_update(&state, now).unwrap();\n\n assert_ne!(\n\n Ok(()),\n\n gcra.test_and_update(&state, now),\n\n \"{:?} {:?}\",\n\n &state,\n\n &gcra\n\n );\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 28, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn allows_n_after_interval() {\n\n let gcra = GCRA::construct(nonzero!(2u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();;\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n assert_eq!(Ok(()), gcra.test_n_and_update(&state, 2, now));\n\n assert!(!gcra.test_n_and_update(&state, 2, now + ms).is_ok());\n\n // should be ok again in 1.5s:\n\n let next = now + Duration::from_secs(1);\n\n assert_eq!(\n\n Ok(()),\n\n gcra.test_n_and_update(&state, 2, next),\n\n \"now: {:?}\",\n\n next\n\n );\n\n\n\n // should always accommodate 0 cells:\n\n assert_eq!(Ok(()), gcra.test_n_and_update(&state, 0, next));\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 29, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn memleak_leakybucket() {\n\n let mut bucket = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(1_000_000u32));\n\n let leak_check = LeakCheck::new(500_000);\n\n\n\n for _i in 0..leak_check.n_iter {\n\n drop(bucket.check());\n\n }\n\n}\n\n\n", "file_path": "tests/memory.rs", "rank": 30, "score": 42025.238489357966 }, { "content": "#[test]\n\nfn actual_threadsafety() {\n\n let mut lim = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(20u32));\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n let mut children = vec![];\n\n\n\n lim.check_at(now).unwrap();\n\n for _i in 0..20 {\n\n let mut lim = lim.clone();\n\n children.push(thread::spawn(move || lim.check_at(now).is_ok()));\n\n }\n\n for child in children {\n\n child.join().unwrap();\n\n }\n\n assert!(!lim.check_at(now + ms * 2).is_ok());\n\n assert_eq!(Ok(()), lim.check_at(now + ms * 1000));\n\n}\n\n\n", "file_path": "tests/leaky_bucket.rs", "rank": 31, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn accepts_first_cell() {\n\n let gcra = GCRA::construct(nonzero!(5u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();;\n\n let now = current_moment();\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now));\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 32, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn memleak_gcra_threaded() {\n\n let bucket = DirectRateLimiter::<GCRA>::build_with_capacity(nonzero!(1_000_000u32))\n\n .build()\n\n .unwrap();\n\n let leak_check = LeakCheck::new(5_000);\n\n\n\n for _i in 0..leak_check.n_iter {\n\n let mut bucket = bucket.clone();\n\n thread::spawn(move || drop(bucket.check())).join().unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/memory.rs", "rank": 33, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn correct_wait_time() {\n\n // Bucket adding a new element per 200ms:\n\n let gcra = GCRA::construct(nonzero!(5u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();\n\n let mut now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n let mut conforming = 0;\n\n for _i in 0..20 {\n\n now += ms;\n\n let res = gcra.test_and_update(&state, now);\n\n match res {\n\n Ok(()) => {\n\n conforming += 1;\n\n }\n\n Err(wait) => {\n\n now += wait.wait_time_from(now);\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now));\n\n conforming += 1;\n\n }\n\n }\n\n }\n\n assert_eq!(20, conforming);\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 34, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn memleak_gcra_multi() {\n\n let mut bucket = DirectRateLimiter::<GCRA>::build_with_capacity(nonzero!(1_000_000u32))\n\n .build()\n\n .unwrap();\n\n let leak_check = LeakCheck::new(500_000);\n\n\n\n for _i in 0..leak_check.n_iter {\n\n drop(bucket.check_n(2));\n\n }\n\n}\n\n\n", "file_path": "tests/memory.rs", "rank": 35, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn memleak_leakybucket_threaded() {\n\n let bucket = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(1_000_000u32));\n\n let leak_check = LeakCheck::new(5_000);\n\n\n\n for _i in 0..leak_check.n_iter {\n\n let mut bucket = bucket.clone();\n\n thread::spawn(move || drop(bucket.check())).join().unwrap();\n\n }\n\n}\n", "file_path": "tests/memory.rs", "rank": 36, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn rejects_too_many() {\n\n let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(2u32));\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n assert_eq!(Ok(()), lb.check_at(now));\n\n assert_eq!(Ok(()), lb.check_at(now));\n\n\n\n assert_ne!(Ok(()), lb.check_at(now + ms * 2));\n\n\n\n // should be ok again in 1s:\n\n let next = now + Duration::from_millis(1002);\n\n assert_eq!(Ok(()), lb.check_at(next));\n\n assert_eq!(Ok(()), lb.check_at(next + ms));\n\n\n\n assert_ne!(Ok(()), lb.check_at(next + ms * 2), \"{:?}\", lb);\n\n}\n\n\n", "file_path": "tests/leaky_bucket.rs", "rank": 37, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn correctly_handles_per() {\n\n let ms = Duration::from_millis(1);\n\n let gcra = GCRA::construct(nonzero!(1u32), nonzero!(1u32), ms * 20).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();;\n\n let now = current_moment();\n\n\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now));\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now + ms));\n\n assert!(!gcra.test_and_update(&state, now + ms * 10).is_ok());\n\n assert_eq!(Ok(()), gcra.test_and_update(&state, now + ms * 20));\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 38, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn never_allows_more_than_capacity() {\n\n let ms = Duration::from_millis(1);\n\n let gcra = GCRA::construct(nonzero!(5u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <GCRA as Algorithm>::BucketState::default();\n\n let now = current_moment();\n\n\n\n // Should not allow the first 15 cells on a capacity 5 bucket:\n\n assert!(gcra.test_n_and_update(&state, 15, now).is_err());\n\n\n\n // After 3 and 20 seconds, it should not allow 15 on that bucket either:\n\n assert!(gcra\n\n .test_n_and_update(&state, 15, now + (ms * 3 * 1000))\n\n .is_err());\n\n\n\n let result = gcra.test_n_and_update(&state, 15, now + (ms * 20 * 1000));\n\n match result {\n\n Err(NegativeMultiDecision::InsufficientCapacity(n)) => assert_eq!(n, 15),\n\n _ => panic!(\"Did not expect {:?}\", result),\n\n }\n\n}\n\n\n", "file_path": "tests/gcra.rs", "rank": 39, "score": 40794.88990300173 }, { "content": "#[test]\n\nfn tooearly_wait_time_from() {\n\n let lim =\n\n LeakyBucket::construct(nonzero!(1u32), nonzero!(1u32), Duration::from_secs(1)).unwrap();\n\n let state = <LeakyBucket as Algorithm>::BucketState::default();\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n lim.test_and_update(&state, now).unwrap();\n\n if let Err(failure) = lim.test_and_update(&state, now) {\n\n assert_eq!(ms * 1000, failure.wait_time_from(now));\n\n assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 1000));\n\n assert_eq!(Duration::new(0, 0), failure.wait_time_from(now + ms * 2001));\n\n } else {\n\n assert!(false, \"Second attempt should fail\");\n\n }\n\n}\n", "file_path": "tests/leaky_bucket.rs", "rank": 40, "score": 39674.88204212858 }, { "content": "#[test]\n\nfn correct_wait_time() {\n\n // Bucket adding a new element per 200ms:\n\n let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));\n\n let mut now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n let mut conforming = 0;\n\n for _i in 0..20 {\n\n now += ms;\n\n let res = lb.check_at(now);\n\n match res {\n\n Ok(()) => {\n\n conforming += 1;\n\n }\n\n Err(wait) => {\n\n now += wait.wait_time_from(now);\n\n assert_eq!(Ok(()), lb.check_at(now));\n\n conforming += 1;\n\n }\n\n }\n\n }\n\n assert_eq!(20, conforming);\n\n}\n\n\n", "file_path": "tests/leaky_bucket.rs", "rank": 41, "score": 39674.88204212858 }, { "content": "#[test]\n\nfn never_allows_more_than_capacity() {\n\n let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n\n\n // Should not allow the first 15 cells on a capacity 5 bucket:\n\n assert_ne!(Ok(()), lb.check_n_at(15, now));\n\n\n\n // After 3 and 20 seconds, it should not allow 15 on that bucket either:\n\n assert_ne!(Ok(()), lb.check_n_at(15, now + (ms * 3 * 1000)));\n\n let result = lb.check_n_at(15, now + (ms * 20 * 1000));\n\n match result {\n\n Err(NegativeMultiDecision::InsufficientCapacity(n)) => assert_eq!(n, 15),\n\n _ => panic!(\"Did not expect {:?}\", result),\n\n }\n\n}\n\n\n", "file_path": "tests/leaky_bucket.rs", "rank": 42, "score": 39674.88204212858 }, { "content": "#[test]\n\nfn accepts_first_cell() {\n\n let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));\n\n assert_eq!(Ok(()), lb.check_at(current_moment()));\n\n}\n\n\n", "file_path": "tests/leaky_bucket.rs", "rank": 43, "score": 39674.88204212858 }, { "content": "#[test]\n\nfn prevents_time_travel() {\n\n let mut lb = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(5u32));\n\n let now = current_moment();\n\n let ms = Duration::from_millis(1);\n\n\n\n assert!(lb.check_at(now).is_ok());\n\n assert!(lb.check_at(now - ms).is_ok());\n\n assert!(lb.check_at(now - ms * 500).is_ok());\n\n}\n\n\n", "file_path": "tests/leaky_bucket.rs", "rank": 44, "score": 39674.88204212858 }, { "content": "#[test]\n\nfn different_states_per_key() {\n\n let mut lim = KeyedRateLimiter::<&str>::new(nonzero!(1u32), Duration::from_secs(1));\n\n let ms = Duration::from_millis(1);\n\n let now = Instant::now();\n\n assert_eq!(Ok(()), lim.check_at(\"foo\", now + ms));\n\n assert_eq!(Ok(()), lim.check_at(\"bar\", now + ms));\n\n assert_eq!(Ok(()), lim.check_at(\"baz\", now + ms));\n\n\n\n assert_ne!(Ok(()), lim.check_at(\"foo\", now + ms * 3), \"{:?}\", lim);\n\n assert_ne!(Ok(()), lim.check_at(\"bar\", now + ms * 3), \"{:?}\", lim);\n\n assert_ne!(Ok(()), lim.check_at(\"baz\", now + ms * 3), \"{:?}\", lim);\n\n}\n\n\n", "file_path": "tests/keyed.rs", "rank": 45, "score": 39674.88204212858 }, { "content": "fn resident_memsize() -> i64 {\n\n let mut out: libc::rusage = unsafe { std::mem::zeroed() };\n\n assert!(unsafe { libc::getrusage(libc::RUSAGE_SELF, &mut out) } == 0);\n\n out.ru_maxrss\n\n}\n\n\n\nconst LEAK_TOLERANCE: i64 = 1024 * 1024 * 10;\n\n\n", "file_path": "tests/memory.rs", "rank": 46, "score": 39220.26010447035 }, { "content": "type MapWriteHandle<K, P, A, H> =\n\n Arc<Mutex<WriteHandle<K, <A as Algorithm<P>>::BucketState, (), H>>>;\n\n\n\n/// An in-memory rate limiter that regulates a single rate limit for\n\n/// multiple keys.\n\n///\n\n/// Keyed rate limiters can be used to e.g. enforce a per-IP address\n\n/// or a per-customer request limit on the server side.\n\n///\n\n/// This implementation of the keyed rate limiter uses\n\n/// [`evmap`](../../../evmap/index.html), a read lock-free, concurrent\n\n/// hash map. Addition of new keys (e.g. a new customer making their\n\n/// first request) is synchronized and happens one at a time (it\n\n/// synchronizes writes to minimize the effects from `evmap`'s\n\n/// eventually consistent behavior on key addition), while reads of\n\n/// existing keys all happen simultaneously, then get synchronized by\n\n/// the rate limiting algorithm itself.\n\n///\n\n/// ```\n\n/// # use std::num::NonZeroU32;\n", "file_path": "src/state/keyed.rs", "rank": 47, "score": 37306.38464417921 }, { "content": "use crate::lib::*;\n\n\n\n/// The default time representation in use by rate limiters. To\n\n/// override it, pass a different `P` type argument to the algorithm\n\n/// and rate limiter bucket.\n\n///\n\n/// ## When using `std`\n\n/// The default time source is `Instant` when using std.\n\n///\n\n/// ## When using `no_std`\n\n/// In situations where `std` is not available, the fallback default\n\n/// time source is Duration. It only allows comparisons to a relative,\n\n/// fixed, point in time. Users are expected to determine that point\n\n/// in time and stick to it.\n\n#[cfg(feature = \"std\")]\n\npub type TimeSource = Instant;\n\n#[cfg(not(feature = \"std\"))]\n\npub type TimeSource = Duration;\n\n\n\n/// A point in time that is used as a reference for measuring a rate\n\n/// limit. On the clock, it has meaning only relative to some other point in time.\n\n///\n\n/// When using `no_std`, users of this crate are expected to provide\n\n/// an impl of `Relative` that corresponds to their system's time source.\n", "file_path": "src/instant.rs", "rank": 48, "score": 30060.91376934474 }, { "content": " }\n\n }\n\n\n\n use std::time::SystemTime;\n\n\n\n impl super::Relative for SystemTime {\n\n /// Returns the difference in times between the two\n\n /// SystemTimes. Due to the fallible nature of SystemTimes,\n\n /// returns the zero duration if a negative duration would\n\n /// result (e.g. due to system clock adjustments).\n\n fn duration_since(&self, earlier: Self) -> Duration {\n\n self.duration_since(earlier)\n\n .unwrap_or_else(|_| Duration::new(0, 0))\n\n }\n\n }\n\n\n\n impl super::Absolute for SystemTime {\n\n #[inline]\n\n fn now() -> Self {\n\n SystemTime::now()\n", "file_path": "src/instant.rs", "rank": 49, "score": 30054.390020360617 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Relative for Duration {\n\n fn duration_since(&self, earlier: Self) -> Duration {\n\n *self - earlier\n\n }\n\n}\n", "file_path": "src/instant.rs", "rank": 50, "score": 30046.95033589908 }, { "content": "[![Build Status](https://travis-ci.org/antifuchs/ratelimit_meter.svg?branch=master)](https://travis-ci.org/antifuchs/ratelimit_meter) [![Docs](https://docs.rs/ratelimit_meter/badge.svg)](https://docs.rs/ratelimit_meter/) [![crates.io](https://img.shields.io/crates/v/ratelimit_meter.svg)](https://crates.io/crates/ratelimit_meter)\n\n\n\n# Rate-Limiting with leaky buckets in Rust\n\n\n\nThis crate implements two rate-limiting algorithms in Rust:\n\n* a [leaky bucket](https://en.wikipedia.org/wiki/Leaky_bucket#As_a_meter) and\n\n* a variation on the leaky bucket, the\n\n [generic cell rate algorithm](https://en.wikipedia.org/wiki/Generic_cell_rate_algorithm) (GCRA)\n\n for rate-limiting and scheduling.\n\n\n\n`ratelimit_meter` is usable in `no_std` mode, with a few trade-offs on\n\nfeatures.\n\n\n\n## Installation\n\n\n\nAdd the crate `ratelimit_meter` to your `Cargo.toml`\n\nfile; [the crates.io page](https://crates.io/crates/ratelimit_meter)\n\ncan give you the exact thing to paste.\n\n\n\n## API Docs\n\n\n\nFind them [on docs.rs](https://docs.rs/ratelimit_meter/) for the latest version!\n\n\n", "file_path": "README.md", "rank": 51, "score": 18969.353158102935 }, { "content": "## Design and implementation\n\n\n\nUnlike some other token bucket algorithms, the GCRA one assumes that\n\nall units of work are of the same \"weight\", and so allows some\n\noptimizations which result in much more concise and fast code (it does\n\nnot even use multiplication or division in the \"hot\" path for a\n\nsingle-cell decision).\n\n\n\nAll rate-limiting algorithm implementations in this crate are\n\nthread-safe. Here are some benchmarks for repeated decisions (run on\n\nmy macbook pro, this will differ on your hardware, etc etc):\n\n\n\n```\n\n$ cargo bench\n\n Finished release [optimized] target(s) in 0.16s\n\n Running target/release/deps/ratelimit_meter-9874176533f7e1a0\n\n\n\nrunning 1 test\n\ntest test_wait_time_from ... ignored\n\n\n\ntest result: ok. 0 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out\n\n\n\n Running target/release/deps/criterion-67011381a5f6ed00\n\nmulti_threaded/20_threads/GCRA\n\n time: [1.9664 us 2.0747 us 2.1503 us]\n\n thrpt: [465.04 Kelem/s 482.00 Kelem/s 508.55 Kelem/s]\n\nFound 10 outliers among 100 measurements (10.00%)\n\n 4 (4.00%) low severe\n\n 4 (4.00%) low mild\n\n 2 (2.00%) high mild\n\nmulti_threaded/20_threads/LeakyBucket\n\n time: [2.4536 us 2.4878 us 2.5189 us]\n\n thrpt: [396.99 Kelem/s 401.96 Kelem/s 407.56 Kelem/s]\n\nFound 8 outliers among 100 measurements (8.00%)\n\n 5 (5.00%) low severe\n\n 3 (3.00%) low mild\n\n\n\nsingle_threaded/1_element/GCRA\n\n time: [68.613 ns 68.779 ns 68.959 ns]\n\n thrpt: [14.501 Melem/s 14.539 Melem/s 14.575 Melem/s]\n\nFound 13 outliers among 100 measurements (13.00%)\n\n 9 (9.00%) high mild\n\n 4 (4.00%) high severe\n\nsingle_threaded/1_element/LeakyBucket\n\n time: [64.513 ns 64.855 ns 65.272 ns]\n\n thrpt: [15.321 Melem/s 15.419 Melem/s 15.501 Melem/s]\n\nFound 16 outliers among 100 measurements (16.00%)\n\n 4 (4.00%) high mild\n", "file_path": "README.md", "rank": 52, "score": 18965.72636748784 }, { "content": "# Thanks for contributing to this project!\n\n\n\nI'm completely thrilled that you find this project useful enough to\n\nspend your time on!\n\n\n\n## Code of Conduct\n\n\n\nContributors are expected to adhere to the\n\n[Contributor Covenant Code of Conduct](http://contributor-covenant.org/version/1/4/),\n\nversion 1.4. See [CoC.md](CoC.md) for the full text.\n\n\n\n## Things you might do\n\n\n\nFeel free to:\n\n\n\n* [Report issues](../../issues)\n\n* [Send me a pull request](../../pulls) or\n\n* Just get in touch with me: asf@boinkor.net!\n", "file_path": "CONTRIBUTING.md", "rank": 53, "score": 18965.015095831455 }, { "content": " 12 (12.00%) high severe\n\n\n\nsingle_threaded/multi_element/GCRA\n\n time: [96.461 ns 96.976 ns 97.578 ns]\n\n thrpt: [102.48 Melem/s 103.12 Melem/s 103.67 Melem/s]\n\nFound 11 outliers among 100 measurements (11.00%)\n\n 4 (4.00%) high mild\n\n 7 (7.00%) high severe\n\nsingle_threaded/multi_element/LeakyBucket\n\n time: [69.500 ns 70.359 ns 71.349 ns]\n\n thrpt: [140.16 Melem/s 142.13 Melem/s 143.88 Melem/s]\n\nFound 9 outliers among 100 measurements (9.00%)\n\n 6 (6.00%) high mild\n\n 3 (3.00%) high severe\n\n\n\nno-op single-element decision\n\n time: [23.755 ns 23.817 ns 23.883 ns]\n\nFound 11 outliers among 100 measurements (11.00%)\n\n 5 (5.00%) high mild\n\n 6 (6.00%) high severe\n\n\n\nno-op multi-element decision\n\n time: [22.772 ns 22.940 ns 23.125 ns]\n\nFound 5 outliers among 100 measurements (5.00%)\n\n 5 (5.00%) high mild\n\n```\n\n\n\n## Contributions welcome!\n\n\n\nI am actively hoping that this project gives people joy in using\n\nrate-limiting techniques. You can use these techniques for so many\n\nthings (from throttling API requests to ensuring you don't spam people\n\nwith emails about the same thing)!\n\n\n\nSo if you have any thoughts about the API design, the internals, or\n\nyou want to implement other rate-limiting algotrithms, I would be\n\nthrilled to have your input. See [CONTRIBUTING.md](CONTRIBUTING.md)\n\nfor details!\n", "file_path": "README.md", "rank": 54, "score": 18962.89914375821 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, gender identity and expression, level of experience,\n\nnationality, personal appearance, race, religion, or sexual identity and\n\norientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\nadvances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CoC.md", "rank": 55, "score": 18338.047629384757 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at asf@boinkor.net. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at [http://contributor-covenant.org/version/1/4][version]\n\n\n\n[homepage]: http://contributor-covenant.org\n\n[version]: http://contributor-covenant.org/version/1/4/\n", "file_path": "CoC.md", "rank": 56, "score": 18337.627411178226 }, { "content": "/// ```\n\n/// # use std::num::NonZeroU32;\n\n/// # use std::time::Duration;\n\n/// use ratelimit_meter::{KeyedRateLimiter};\n\n/// # #[macro_use] extern crate nonzero_ext;\n\n/// # extern crate ratelimit_meter;\n\n/// # fn main () {\n\n/// let mut limiter = KeyedRateLimiter::<&str>::new(nonzero!(100u32), Duration::from_secs(5));\n\n/// limiter.check(\"hi there\");\n\n/// // time passes...\n\n///\n\n/// // remove all keys that have been expireable for 10 minutes:\n\n/// limiter.cleanup(Duration::from_secs(600));\n\n/// # }\n\n/// ```\n\n#[derive(Clone)]\n\npub struct KeyedRateLimiter<\n\n K: Eq + Hash + Clone,\n\n A: Algorithm<P> = DefaultAlgorithm,\n\n P: instant::Absolute = Instant,\n", "file_path": "src/state/keyed.rs", "rank": 58, "score": 28.710847934759506 }, { "content": "//! The Generic Cell Rate Algorithm\n\n\n\nuse crate::lib::*;\n\n\n\nuse crate::{\n\n algorithms::{Algorithm, NonConformance, RateLimitState, RateLimitStateWithClock},\n\n instant,\n\n thread_safety::ThreadsafeWrapper,\n\n InconsistentCapacity, NegativeMultiDecision,\n\n};\n\n\n\n#[cfg(feature = \"std\")]\n\nmod std {\n\n use crate::instant::Relative;\n\n use evmap::ShallowCopy;\n\n\n\n impl<P: Relative> ShallowCopy for super::State<P> {\n\n unsafe fn shallow_copy(&mut self) -> Self {\n\n super::State(self.0.shallow_copy())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n", "file_path": "src/algorithms/gcra.rs", "rank": 59, "score": 25.9584064230702 }, { "content": "impl<T> Eq for ThreadsafeWrapper<T> where T: fmt::Debug + Default + Clone + PartialEq + Eq {}\n\n\n\nimpl<T> fmt::Debug for ThreadsafeWrapper<T>\n\nwhere\n\n T: fmt::Debug + Default + Clone + PartialEq + Eq,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let data = self.data.lock();\n\n data.fmt(f)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nmod std {\n\n use super::*;\n\n use evmap::ShallowCopy;\n\n\n\n impl<T> ShallowCopy for ThreadsafeWrapper<T>\n\n where\n\n T: fmt::Debug + Default + Clone + PartialEq + Eq,\n", "file_path": "src/thread_safety.rs", "rank": 61, "score": 23.908005863300627 }, { "content": "\n\n#[cfg(feature = \"std\")]\n\npub use self::state::KeyedRateLimiter;\n\n\n\npub use self::errors::*;\n\n\n\n/// A facade around all the types we need from std/core crates, to\n\n/// avoid unnecessary cfg-conditionalization everywhere.\n\nmod lib {\n\n mod core {\n\n #[cfg(not(feature = \"std\"))]\n\n pub use core::*;\n\n\n\n #[cfg(feature = \"std\")]\n\n pub use std::*;\n\n }\n\n\n\n pub use self::core::clone::Clone;\n\n pub use self::core::cmp::{Eq, Ord, PartialEq};\n\n pub use self::core::default::Default;\n", "file_path": "src/lib.rs", "rank": 62, "score": 23.615734455797163 }, { "content": " /// # #[macro_use] extern crate nonzero_ext;\n\n /// # extern crate ratelimit_meter;\n\n /// # fn main () {\n\n /// let _gcra = DirectRateLimiter::<LeakyBucket>::per_second(nonzero!(100u32));\n\n /// # }\n\n /// ```\n\n pub fn per_second(capacity: NonZeroU32) -> Self {\n\n Self::new(capacity, Duration::from_secs(1))\n\n }\n\n\n\n /// Return a builder that can be used to construct a rate limiter using\n\n /// the parameters passed to the Builder.\n\n pub fn build_with_capacity(capacity: NonZeroU32) -> Builder<P, A> {\n\n Builder {\n\n capacity,\n\n cell_weight: nonzero!(1u32),\n\n time_unit: Duration::from_secs(1),\n\n end_result: PhantomData,\n\n point_result: PhantomData,\n\n }\n", "file_path": "src/state/direct.rs", "rank": 63, "score": 23.460696391772323 }, { "content": "use crate::lib::*;\n\n\n\n#[cfg(feature = \"std\")]\n\nuse parking_lot::Mutex;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nuse spin::Mutex;\n\n\n\n#[derive(Clone)]\n\n/// Wraps the atomic operations on a Decider's state in a threadsafe\n\n/// fashion.\n\npub(crate) struct ThreadsafeWrapper<T>\n\nwhere\n\n T: fmt::Debug + Default + Clone + PartialEq + Eq,\n\n{\n\n data: Arc<Mutex<T>>,\n\n}\n\n\n\nimpl<T> Default for ThreadsafeWrapper<T>\n\nwhere\n", "file_path": "src/thread_safety.rs", "rank": 64, "score": 22.0587262527091 }, { "content": "impl<P, A, K> KeyedRateLimiter<K, A, P>\n\nwhere\n\n P: instant::Absolute,\n\n A: Algorithm<P>,\n\n A::BucketState: KeyableRateLimitState<A, P>,\n\n K: Eq + Hash + Clone,\n\n{\n\n /// Construct a new rate limiter that allows `capacity` cells per\n\n /// time unit through.\n\n /// # Examples\n\n /// ```\n\n /// # use std::num::NonZeroU32;\n\n /// # use std::time::Duration;\n\n /// use ratelimit_meter::{KeyedRateLimiter};\n\n /// # #[macro_use] extern crate nonzero_ext;\n\n /// # extern crate ratelimit_meter;\n\n /// # fn main () {\n\n /// let _limiter = KeyedRateLimiter::<&str>::new(nonzero!(100u32), Duration::from_secs(5));\n\n /// # }\n\n /// ```\n", "file_path": "src/state/keyed.rs", "rank": 65, "score": 21.889820355539253 }, { "content": "/// // After a sufficient time period, cells are allowed again:\n\n/// assert_eq!(Ok(()), limiter.check_at(now + ms*50));\n\n/// # }\n\n/// # #[cfg(not(feature = \"std\"))] fn main() {}\n\n/// ```\n\n#[derive(Debug, Clone)]\n\npub struct GCRA<P: instant::Relative = instant::TimeSource> {\n\n // The \"weight\" of a single packet in units of time.\n\n t: Duration,\n\n\n\n // The \"capacity\" of the bucket.\n\n tau: Duration,\n\n\n\n point: PhantomData<P>,\n\n}\n\n\n\nimpl<P: instant::Relative> Algorithm<P> for GCRA<P> {\n\n type BucketState = State<P>;\n\n\n\n type NegativeDecision = NotUntil<P>;\n", "file_path": "src/algorithms/gcra.rs", "rank": 66, "score": 21.740449115466646 }, { "content": " ///\n\n /// # Panics\n\n /// Panics if an error occurs in acquiring any locks.\n\n pub(crate) fn measure_and_replace<F, E>(&self, f: F) -> Result<(), E>\n\n where\n\n F: Fn(&T) -> (Result<(), E>, Option<T>),\n\n {\n\n let mut data = self.data.lock();\n\n let (decision, new_data) = f(&*data);\n\n if let Some(new_data) = new_data {\n\n *data = new_data;\n\n }\n\n decision\n\n }\n\n\n\n /// Retrieves and returns a snapshot of the bucket state. This\n\n /// isn't thread safe, but can be used to restore an old copy of\n\n /// the bucket if necessary.\n\n ///\n\n /// # Thread safety\n\n /// This function operates threadsafely, but you're literally\n\n /// taking a copy of data that will change. Relying on the data\n\n /// that is returned *will* race.\n\n pub(crate) fn snapshot(&self) -> T {\n\n let data = self.data.lock();\n\n data.clone()\n\n }\n\n}\n", "file_path": "src/thread_safety.rs", "rank": 67, "score": 21.351725284997993 }, { "content": "//! use std::time::Duration;\n\n//! use ratelimit_meter::{DirectRateLimiter, GCRA};\n\n//!\n\n//! # #[macro_use] extern crate nonzero_ext;\n\n//! # extern crate ratelimit_meter;\n\n//! # #[cfg(feature = \"std\")]\n\n//! # fn main () {\n\n//! // Allow 50 units/second across all threads:\n\n//! let mut lim = DirectRateLimiter::<GCRA>::per_second(nonzero!(50u32));\n\n//! let mut thread_lim = lim.clone();\n\n//! thread::spawn(move || { assert_eq!(Ok(()), thread_lim.check());});\n\n//! assert_eq!(Ok(()), lim.check());\n\n//! # }\n\n//! # #[cfg(not(feature = \"std\"))]\n\n//! # fn main() {}\n\n//! ```\n\n//!\n\n//! ## Usage with `no_std`\n\n//!\n\n//! `ratelimit_meter` can be used in `no_std` crates, with a reduced\n", "file_path": "src/lib.rs", "rank": 68, "score": 21.17109467435181 }, { "content": " ///\n\n /// ```\n\n /// # use std::time::Duration;\n\n /// use ratelimit_meter::{KeyedRateLimiter, GCRA};\n\n /// # #[macro_use] extern crate nonzero_ext;\n\n /// # extern crate ratelimit_meter;\n\n /// # fn main () {\n\n /// let _limiter = KeyedRateLimiter::<&str, GCRA>::per_second(nonzero!(100u32));\n\n /// # }\n\n /// ```\n\n pub fn per_second(capacity: NonZeroU32) -> Self {\n\n Self::new(capacity, Duration::from_secs(1))\n\n }\n\n\n\n /// Return a constructor that can be used to construct a keyed\n\n /// rate limiter with the builder pattern.\n\n pub fn build_with_capacity(capacity: NonZeroU32) -> Builder<K, P, A, RandomState> {\n\n Builder {\n\n capacity,\n\n ..Default::default()\n", "file_path": "src/state/keyed.rs", "rank": 70, "score": 21.08975637377748 }, { "content": "use crate::lib::*;\n\nuse crate::{algorithms::Algorithm, instant, NegativeMultiDecision};\n\n\n\n/// A representation of a bare in-memory algorithm, without any bucket\n\n/// attached.\n\n#[derive(Debug)]\n\npub struct AlgorithmForTest<A: Algorithm<P>, P: instant::Relative>(A, PhantomData<P>);\n\n\n\nimpl<'a, A, P> AlgorithmForTest<A, P>\n\nwhere\n\n A: Algorithm<P>,\n\n P: instant::Relative,\n\n{\n\n pub fn new<U: Into<Option<NonZeroU32>>, D: Into<Option<Duration>>>(\n\n cap: NonZeroU32,\n\n weight: U,\n\n duration: D,\n\n ) -> Self {\n\n AlgorithmForTest(\n\n A::construct(\n", "file_path": "src/test_utilities/algorithms.rs", "rank": 71, "score": 21.035475491519005 }, { "content": " }\n\n}\n\n\n\n/// A constructor for keyed rate limiters.\n\npub struct Builder<K: Eq + Hash + Clone, P: instant::Absolute, A: Algorithm<P>, H: BuildHasher> {\n\n end_result: PhantomData<(K, P, A)>,\n\n capacity: NonZeroU32,\n\n cell_weight: NonZeroU32,\n\n per_time_unit: Duration,\n\n hasher: H,\n\n map_capacity: Option<usize>,\n\n}\n\n\n\nimpl<K, A, P> Default for Builder<K, P, A, RandomState>\n\nwhere\n\n K: Eq + Hash + Clone,\n\n P: instant::Absolute,\n\n A: Algorithm<P>,\n\n A::BucketState: KeyableRateLimitState<A, P>,\n\n{\n", "file_path": "src/state/keyed.rs", "rank": 72, "score": 20.834384360374457 }, { "content": " fn default() -> Self {\n\n DirectBucket(DirectRateLimiter::per_second(nonzero!(50u32)))\n\n }\n\n}\n\nimpl<A, P> DirectBucket<A, P>\n\nwhere\n\n P: instant::Relative,\n\n A: Algorithm<P>,\n\n{\n\n pub fn limiter(self) -> DirectRateLimiter<A, P> {\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nmod std {\n\n use super::*;\n\n use crate::{algorithms::KeyableRateLimitState, instant::Absolute, KeyedRateLimiter};\n\n\n\n pub struct KeyedBucket<A: Algorithm<P>, P: Absolute>(KeyedRateLimiter<u32, A, P>)\n", "file_path": "src/test_utilities/variants.rs", "rank": 73, "score": 20.491963677853345 }, { "content": " /// Allows all cells through unconditionally.\n\n fn test_n_and_update(\n\n &self,\n\n _state: &Self::BucketState,\n\n _n: u32,\n\n _t0: Always,\n\n ) -> Result<(), NegativeMultiDecision<Impossible>> {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A pseudo-instant that never changes.\n\n///\n\n/// It is used to implement the `Allower` rate-limiter type, which\n\n/// never denies any requests.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Always();\n\nimpl instant::Relative for Always {\n\n fn duration_since(&self, _other: Self) -> Duration {\n\n Duration::new(0, 0)\n", "file_path": "src/example_algorithms.rs", "rank": 74, "score": 19.808726105280748 }, { "content": "///\n\n/// ```\n\n/// # use ratelimit_meter::{DirectRateLimiter, GCRA};\n\n/// # use std::num::NonZeroU32;\n\n/// # use std::time::{Instant, Duration};\n\n/// # #[macro_use] extern crate nonzero_ext;\n\n/// # extern crate ratelimit_meter;\n\n/// # #[cfg(feature = \"std\")]\n\n/// # fn main () {\n\n/// let mut limiter = DirectRateLimiter::<GCRA>::per_second(nonzero!(20u32));\n\n/// let now = Instant::now();\n\n/// let ms = Duration::from_millis(1);\n\n/// assert_eq!(Ok(()), limiter.check_at(now)); // the first cell is free\n\n/// for i in 0..20 {\n\n/// // Spam a lot:\n\n/// assert!(limiter.check_at(now).is_ok(), \"at {}\", i);\n\n/// }\n\n/// // We have exceeded the bucket capacity:\n\n/// assert!(limiter.check_at(now).is_err());\n\n///\n", "file_path": "src/algorithms/gcra.rs", "rank": 75, "score": 19.62834894516806 }, { "content": " {\n\n unsafe fn shallow_copy(&mut self) -> Self {\n\n ThreadsafeWrapper {\n\n data: self.data.shallow_copy(),\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> ThreadsafeWrapper<T>\n\nwhere\n\n T: fmt::Debug + Default + Clone + PartialEq + Eq,\n\n{\n\n #[inline]\n\n /// Wraps retrieving a bucket's data, calls a function to make a\n\n /// decision and return a new state, and then tries to set the\n\n /// state on the bucket.\n\n ///\n\n /// This function can loop and call the decision closure again if\n\n /// the bucket state couldn't be set.\n", "file_path": "src/thread_safety.rs", "rank": 76, "score": 19.44059592608305 }, { "content": "use crate::lib::*;\n\nuse crate::{\n\n algorithms::{Algorithm, RateLimitState, RateLimitStateWithClock},\n\n instant,\n\n instant::Absolute,\n\n DirectRateLimiter, InconsistentCapacity, NegativeMultiDecision,\n\n};\n\n\n\n/// The most naive implementation of a rate-limiter ever: Always\n\n/// allows every cell through.\n\n/// # Example\n\n/// ```\n\n/// use ratelimit_meter::DirectRateLimiter;\n\n/// use ratelimit_meter::example_algorithms::Allower;\n\n/// let mut allower = Allower::ratelimiter();\n\n/// assert!(allower.check().is_ok());\n\n/// ```\n\n#[derive(Default, Copy, Clone, Debug)]\n\npub struct Allower {}\n\n\n", "file_path": "src/example_algorithms.rs", "rank": 77, "score": 19.204893846907993 }, { "content": " pub use self::core::fmt::Debug;\n\n pub use self::core::marker::{Copy, PhantomData, Send, Sized, Sync};\n\n pub use self::core::num::NonZeroU32;\n\n pub use self::core::ops::{Add, Sub};\n\n pub use self::core::time::Duration;\n\n\n\n pub use self::core::cmp;\n\n pub use self::core::fmt;\n\n\n\n /// Imports that are only available on std.\n\n #[cfg(feature = \"std\")]\n\n mod std {\n\n pub use std::collections::hash_map::RandomState;\n\n pub use std::hash::{BuildHasher, Hash};\n\n pub use std::sync::Arc;\n\n pub use std::time::Instant;\n\n }\n\n\n\n #[cfg(feature = \"no_std\")]\n\n mod no_std {\n", "file_path": "src/lib.rs", "rank": 78, "score": 19.143042328014413 }, { "content": "/// An object that allows incrementally constructing rate Limiter\n\n/// objects.\n\npub struct Builder<P, A>\n\nwhere\n\n P: instant::Relative,\n\n A: Algorithm<P> + Sized,\n\n{\n\n capacity: NonZeroU32,\n\n cell_weight: NonZeroU32,\n\n time_unit: Duration,\n\n end_result: PhantomData<A>,\n\n point_result: PhantomData<P>,\n\n}\n\n\n\nimpl<P, A> Builder<P, A>\n\nwhere\n\n P: instant::Relative,\n\n A: Algorithm<P> + Sized,\n\n{\n\n /// Sets the \"weight\" of each cell being checked against the\n", "file_path": "src/state/direct.rs", "rank": 79, "score": 19.101538227945493 }, { "content": "#![cfg(feature = \"std\")]\n\n\n\nextern crate ratelimit_meter;\n\n#[macro_use]\n\nextern crate nonzero_ext;\n\n\n\nuse ratelimit_meter::{KeyedRateLimiter, GCRA};\n\nuse std::thread;\n\nuse std::time::{Duration, Instant};\n\n\n\n#[test]\n", "file_path": "tests/keyed.rs", "rank": 80, "score": 18.74248605809714 }, { "content": "use crate::algorithms::Algorithm;\n\nuse crate::instant;\n\nuse crate::state::DirectRateLimiter;\n\n\n\n#[derive(Debug)]\n\npub enum Variant {\n\n GCRA,\n\n LeakyBucket,\n\n}\n\n\n\nimpl Variant {\n\n pub const ALL: &'static [Variant; 2] = &[Variant::GCRA, Variant::LeakyBucket];\n\n}\n\n\n\npub struct DirectBucket<A: Algorithm<P>, P: instant::Relative>(DirectRateLimiter<A, P>);\n\nimpl<A, P> Default for DirectBucket<A, P>\n\nwhere\n\n P: instant::Relative,\n\n A: Algorithm<P>,\n\n{\n", "file_path": "src/test_utilities/variants.rs", "rank": 81, "score": 18.619089299560475 }, { "content": " state: A::BucketState,\n\n algorithm: A,\n\n}\n\n\n\nimpl<A, P> DirectRateLimiter<A, P>\n\nwhere\n\n P: instant::Relative,\n\n A: Algorithm<P>,\n\n{\n\n /// Construct a new rate limiter that allows `capacity` cells per\n\n /// time unit through.\n\n /// # Examples\n\n /// You can construct a GCRA rate limiter like so:\n\n /// ```\n\n /// # use std::num::NonZeroU32;\n\n /// # use std::time::Duration;\n\n /// use ratelimit_meter::{DirectRateLimiter, GCRA};\n\n /// # #[macro_use] extern crate nonzero_ext;\n\n /// # extern crate ratelimit_meter;\n\n /// # fn main () {\n", "file_path": "src/state/direct.rs", "rank": 82, "score": 18.442602500029345 }, { "content": "\n\npub mod algorithms;\n\nmod errors;\n\npub mod example_algorithms;\n\npub mod instant;\n\npub mod state;\n\npub mod test_utilities;\n\nmod thread_safety;\n\n\n\n#[macro_use]\n\nextern crate nonzero_ext;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nextern crate alloc;\n\n\n\npub use self::algorithms::LeakyBucket;\n\npub use self::algorithms::NonConformance;\n\npub use self::algorithms::GCRA;\n\n\n\npub use self::state::DirectRateLimiter;\n", "file_path": "src/lib.rs", "rank": 83, "score": 18.305046802853873 }, { "content": " /// let _gcra = DirectRateLimiter::<GCRA>::new(nonzero!(100u32), Duration::from_secs(5));\n\n /// # }\n\n /// ```\n\n ///\n\n /// and similarly, for a leaky bucket:\n\n /// ```\n\n /// # use std::time::Duration;\n\n /// use ratelimit_meter::{DirectRateLimiter, LeakyBucket};\n\n /// # #[macro_use] extern crate nonzero_ext;\n\n /// # extern crate ratelimit_meter;\n\n /// # fn main () {\n\n /// let _lb = DirectRateLimiter::<LeakyBucket>::new(nonzero!(100u32), Duration::from_secs(5));\n\n /// # }\n\n /// ```\n\n pub fn new(capacity: NonZeroU32, per_time_unit: Duration) -> Self {\n\n DirectRateLimiter {\n\n state: <A as Algorithm<P>>::BucketState::default(),\n\n algorithm: <A as Algorithm<P>>::construct(capacity, nonzero!(1u32), per_time_unit)\n\n .unwrap(),\n\n }\n", "file_path": "src/state/direct.rs", "rank": 84, "score": 18.244131637078375 }, { "content": "//! An in-memory rate limiter that can make decisions for a single\n\n//! situation.\n\n\n\nuse crate::lib::*;\n\n\n\nuse crate::{\n\n algorithms::{Algorithm, DefaultAlgorithm},\n\n instant, InconsistentCapacity, NegativeMultiDecision,\n\n};\n\n\n\n/// An in-memory rate limiter that makes direct (un-keyed)\n\n/// rate-limiting decisions. Direct rate limiters can be used to\n\n/// e.g. regulate the transmission of packets on a single connection,\n\n/// or to ensure that an API client stays within a server's rate\n\n/// limit.\n\n#[derive(Debug, Clone)]\n\npub struct DirectRateLimiter<\n\n A: Algorithm<P> = DefaultAlgorithm,\n\n P: instant::Relative = instant::TimeSource,\n\n> {\n", "file_path": "src/state/direct.rs", "rank": 85, "score": 17.422378766741474 }, { "content": " T: fmt::Debug + Default + Clone + PartialEq + Eq,\n\n{\n\n fn default() -> Self {\n\n ThreadsafeWrapper {\n\n data: Arc::new(Mutex::new(T::default())),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> PartialEq<Self> for ThreadsafeWrapper<T>\n\nwhere\n\n T: fmt::Debug + Default + Clone + PartialEq + Eq,\n\n{\n\n fn eq(&self, other: &Self) -> bool {\n\n let mine = self.data.lock();\n\n let other = other.data.lock();\n\n *other == *mine\n\n }\n\n}\n\n\n", "file_path": "src/thread_safety.rs", "rank": 86, "score": 17.13876654596169 }, { "content": "//! fn sub(self, rhs: Duration) -> Always {\n\n//! self.0 - rhs\n\n//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! Then, using that type to create a rate limiter with that time\n\n//! source is a little more verbose. It looks like this:\n\n//!\n\n//! ```rust,ignore\n\n//! let mut lim = DirectRateLimiter::<GCRA<MyInstant>,MyInstant>::per_second(nonzero!(50u32));\n\n//! lim.check().ok();\n\n//! ```\n\n\n\n// Allow using the alloc crate\n\n#![cfg_attr(not(feature = \"std\"), feature(alloc))]\n\n// Allow using ratelimit_meter without std\n\n#![cfg_attr(not(feature = \"std\"), no_std)]\n\n// Deny warnings\n\n#![cfg_attr(feature = \"cargo-clippy\", deny(warnings))]\n", "file_path": "src/lib.rs", "rank": 87, "score": 16.400751236142703 }, { "content": "#![doc(hidden)]\n\n//! A module for code shared between integration tests & benchmarks in this crate.\n\n\n\npub mod algorithms;\n\npub mod variants;\n\n\n\nuse crate::lib::*;\n\n\n\nuse crate::instant;\n\n\n\n/// Returns a \"current\" moment that's suitable for tests.\n", "file_path": "src/test_utilities.rs", "rank": 88, "score": 16.361968665074052 }, { "content": " cap,\n\n weight.into().unwrap_or(nonzero!(1u32)),\n\n duration\n\n .into()\n\n .unwrap_or(crate::lib::Duration::from_secs(1)),\n\n )\n\n .unwrap(),\n\n PhantomData,\n\n )\n\n }\n\n\n\n pub fn algorithm(&'a self) -> &'a A {\n\n &self.0\n\n }\n\n\n\n pub fn state(&self) -> A::BucketState {\n\n A::BucketState::default()\n\n }\n\n\n\n pub fn check(&self, state: &A::BucketState, t0: P) -> Result<(), A::NegativeDecision> {\n", "file_path": "src/test_utilities/algorithms.rs", "rank": 89, "score": 16.328230948686638 }, { "content": " self.0\n\n }\n\n }\n\n}\n\n#[cfg(feature = \"std\")]\n\npub use self::std::*;\n\n\n\n// I really wish I could just have a function that returns an impl\n\n// Trait that was usable in all the benchmarks, but alas it should not\n\n// be so.\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! bench_with_variants {\n\n ($variant:expr, $var:ident : $bucket:tt, $code:block) => {\n\n match $variant {\n\n $crate::test_utilities::variants::Variant::GCRA => {\n\n let mut $var =\n\n $bucket::<::ratelimit_meter::GCRA<Instant>, Instant>::default().limiter();\n\n $code\n\n }\n", "file_path": "src/test_utilities/variants.rs", "rank": 90, "score": 16.186615322549 }, { "content": "impl<P: instant::Absolute> RateLimitStateWithClock<GCRA<P>, P> for State<P> {\n\n fn last_touched(&self, params: &GCRA<P>) -> P {\n\n let data = self.0.snapshot();\n\n data.0.unwrap_or_else(P::now) + params.tau\n\n }\n\n}\n\n\n\n/// Returned in case of a negative rate-limiting decision. Indicates\n\n/// the earliest instant that a cell might get accepted again.\n\n///\n\n/// To avoid thundering herd effects, client code should always add a\n\n/// random amount of jitter to wait time estimates.\n\n#[derive(Debug, PartialEq)]\n\npub struct NotUntil<P: instant::Relative>(P);\n\n\n\nimpl<P: instant::Relative> fmt::Display for NotUntil<P> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"rate-limited until {:?}\", self.0)\n\n }\n\n}\n", "file_path": "src/algorithms/gcra.rs", "rank": 91, "score": 16.13076406813945 }, { "content": " self.0.test_and_update(state, t0)\n\n }\n\n\n\n pub fn check_n(\n\n &self,\n\n state: &A::BucketState,\n\n n: u32,\n\n t0: P,\n\n ) -> Result<(), NegativeMultiDecision<A::NegativeDecision>> {\n\n self.0.test_n_and_update(state, n, t0)\n\n }\n\n}\n\n\n\nimpl<A, P> Default for AlgorithmForTest<A, P>\n\nwhere\n\n A: Algorithm<P>,\n\n P: instant::Relative,\n\n{\n\n fn default() -> Self {\n\n Self::new(nonzero!(1u32), None, None)\n", "file_path": "src/test_utilities/algorithms.rs", "rank": 92, "score": 15.839489085771632 }, { "content": " fn default() -> Builder<K, P, A, RandomState> {\n\n Builder {\n\n end_result: PhantomData,\n\n map_capacity: None,\n\n capacity: nonzero!(1u32),\n\n cell_weight: nonzero!(1u32),\n\n per_time_unit: Duration::from_secs(1),\n\n hasher: RandomState::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl<K, P, A, H> Builder<K, P, A, H>\n\nwhere\n\n K: Eq + Hash + Clone,\n\n P: instant::Absolute,\n\n A: Algorithm<P>,\n\n A::BucketState: KeyableRateLimitState<A, P>,\n\n H: BuildHasher,\n\n{\n", "file_path": "src/state/keyed.rs", "rank": 93, "score": 15.638373761470937 }, { "content": "extern crate ratelimit_meter;\n\n#[macro_use]\n\nextern crate nonzero_ext;\n\n\n\nuse ratelimit_meter::{\n\n algorithms::Algorithm, test_utilities::current_moment, NegativeMultiDecision, NonConformance,\n\n GCRA,\n\n};\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\n#[test]\n", "file_path": "tests/gcra.rs", "rank": 94, "score": 15.488252300536889 }, { "content": " }\n\n\n\n /// Tests whether a single cell can be accommodated at the given\n\n /// time stamp. See [`check`](#method.check).\n\n pub fn check_at(&mut self, at: P) -> Result<(), <A as Algorithm<P>>::NegativeDecision> {\n\n self.algorithm.test_and_update(&self.state, at)\n\n }\n\n\n\n /// Tests if `n` cells can be accommodated at the given time\n\n /// (`Instant::now()`), using [`check_n`](#method.check_n)\n\n pub fn check_n_at(\n\n &mut self,\n\n n: u32,\n\n at: P,\n\n ) -> Result<(), NegativeMultiDecision<<A as Algorithm<P>>::NegativeDecision>> {\n\n self.algorithm.test_n_and_update(&self.state, n, at)\n\n }\n\n}\n\n\n\nimpl<A, P> DirectRateLimiter<A, P>\n", "file_path": "src/state/direct.rs", "rank": 95, "score": 15.36191100649631 }, { "content": "/// # use std::time::Duration;\n\n/// use ratelimit_meter::{KeyedRateLimiter};\n\n/// # #[macro_use] extern crate nonzero_ext;\n\n/// # extern crate ratelimit_meter;\n\n/// # fn main () {\n\n/// let mut limiter = KeyedRateLimiter::<&str>::new(nonzero!(1u32), Duration::from_secs(5));\n\n/// assert_eq!(Ok(()), limiter.check(\"customer1\")); // allowed!\n\n/// assert_ne!(Ok(()), limiter.check(\"customer1\")); // ...but now customer1 must wait 5 seconds.\n\n///\n\n/// assert_eq!(Ok(()), limiter.check(\"customer2\")); // it's customer2's first request!\n\n/// # }\n\n/// ```\n\n///\n\n/// # Expiring old keys\n\n/// If a key has not been checked in a long time, that key can be\n\n/// expired safely (the next rate limit check for that key would\n\n/// behave as if the key was not present in the map, after all). To\n\n/// remove the unused keys and free up space, use the\n\n/// [`cleanup`](method.cleanup) method:\n\n///\n", "file_path": "src/state/keyed.rs", "rank": 96, "score": 15.322748381475622 }, { "content": "extern crate ratelimit_meter;\n\n#[macro_use]\n\nextern crate nonzero_ext;\n\n\n\nuse ratelimit_meter::{\n\n algorithms::Algorithm, test_utilities::current_moment, DirectRateLimiter, LeakyBucket,\n\n NegativeMultiDecision, NonConformance,\n\n};\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\n#[test]\n", "file_path": "tests/leaky_bucket.rs", "rank": 97, "score": 15.094237116391549 }, { "content": "pub mod gcra;\n\npub mod leaky_bucket;\n\n\n\npub use self::gcra::*;\n\npub use self::leaky_bucket::*;\n\n\n\nuse crate::{instant, InconsistentCapacity, NegativeMultiDecision};\n\n\n\nuse crate::lib::*;\n\n\n\n/// The default rate limiting algorithm in this crate: The [\"leaky\n\n/// bucket\"](leaky_bucket/struct.LeakyBucket.html).\n\n///\n\n/// The leaky bucket algorithm is fairly easy to understand and has\n\n/// decent performance in most cases. If better threaded performance\n\n/// is needed, this crate also offers the\n\n/// [`GCRA`](gcra/struct.GCRA.html) algorithm.\n\npub type DefaultAlgorithm = LeakyBucket;\n\n\n\n/// Provides additional information about non-conforming cells, most\n\n/// importantly the earliest time until the next cell could be\n\n/// considered conforming.\n\n///\n\n/// Since this does not account for effects like thundering herds,\n\n/// users should always add random jitter to the times given.\n", "file_path": "src/algorithms.rs", "rank": 98, "score": 14.763519196033764 }, { "content": "use std::time::Duration;\n\n\n\nuse ratelimit_meter::example_algorithms::{Allower, Always};\n\nuse ratelimit_meter::instant::Absolute;\n\nuse ratelimit_meter::test_utilities::algorithms::AlgorithmForTest;\n\n\n\nuse criterion::{black_box, Benchmark, Criterion, Throughput};\n\n\n", "file_path": "benches/no_op.rs", "rank": 99, "score": 14.701144375495856 } ]
Rust
languages/idl_gen/src/rust/con_idl.rs
adrianos42/native_idl
688de924e1e2244719a33aba40aae8b9dd10ede9
use idl::idl_nodes::*; use proc_macro2::{self, TokenStream}; use quote::format_ident; pub(crate) fn get_rust_ty_ref(ty: &TypeName, references: bool) -> TokenStream { match ty { TypeName::Types(types) => match types { Types::NatInt => quote! { i64 }, Types::NatFloat => quote! { f64 }, Types::NatString => quote! { String }, Types::NatBytes => quote! { Vec<u8> }, Types::NatBool => quote! { bool }, Types::NatUUID => quote! { Uuid }, Types::NatNone => quote! { () }, }, TypeName::TypeFunction(value) => { let args = get_rust_ty_ref(&value.args, references); let ret = get_rust_ty_ref(&value.return_ty, references); quote! { #args -> #ret } } TypeName::TypeTuple(value) => { let mut fields_t = vec![]; for ty in &value.fields { let ident = format_ident!("{}", &ty.ident); let ty_ident = get_rust_ty_ref(&ty.ty, references); fields_t.push(quote! { #ident: #ty_ident }) } let fields = fields_t.into_iter(); quote! { ( #( #fields ),* ) } } TypeName::TypeArray(value) => { let ty = get_rust_ty_ref(&value.ty, references); quote! { Vec<#ty> } } TypeName::TypeMap(value) => { let ty = get_rust_ty_ref(&value.map_ty, references); let index_ty = get_rust_ty_ref(&value.index_ty, references); quote! { ::std::collections::HashMap<#index_ty, #ty> } } TypeName::TypeResult(value) => { let ok_ty = get_rust_ty_ref(&value.ok_ty, references); let err_ty = get_rust_ty_ref(&value.err_ty, references); quote! { Result<#ok_ty, #err_ty> } } TypeName::TypePair(value) => { let first_ty = get_rust_ty_ref(&value.first_ty, references); let second_ty = get_rust_ty_ref(&value.second_ty, references); quote! { (#first_ty, #second_ty) } } TypeName::TypeOption(value) => { let some_ty = get_rust_ty_ref(&value.some_ty, references); quote! { Option<#some_ty> } } TypeName::ListTypeName(value) | TypeName::EnumTypeName(value) | TypeName::StructTypeName(value) | TypeName::ConstTypeName(value) => { let ident = format_ident!("{}", &value); if references { quote! { super::#ident } } else { quote! { #ident } } } TypeName::TypeStream(_) => { quote! { Box<dyn StreamInstance + Send + Sync> } } TypeName::InterfaceTypeName(value) => { let ident = format_ident!("{}Instance", value); quote! { Box<dyn super::idl_impl::#ident> } } } } pub(crate) fn get_rust_ty_name(ty: &TypeName) -> String { match ty { TypeName::Types(types) => match types { Types::NatInt => "i64".to_owned(), Types::NatFloat => "f64".to_owned(), Types::NatString => "String".to_owned(), Types::NatBytes => "Vecu8".to_owned(), Types::NatBool => "bool".to_owned(), Types::NatUUID => "Uuid".to_owned(), Types::NatNone => "none".to_owned(), }, TypeName::TypeFunction(value) => { let args = get_rust_ty_name(&value.args); let ret = get_rust_ty_name(&value.return_ty); format!("Func{}Ret{}_", args, ret) } TypeName::TypeTuple(value) => { let mut fields_t = String::new(); for ty in &value.fields { let ty_ident = get_rust_ty_name(&ty.ty); fields_t.push_str(&ty_ident); } format!("Args{}_", fields_t) } TypeName::TypeArray(value) => { let ty = get_rust_ty_name(&value.ty); format!("Vec{}_", ty) } TypeName::TypeMap(value) => { let ty = get_rust_ty_name(&value.map_ty); let index_ty = get_rust_ty_name(&value.index_ty); format!("Map{}{}_", ty, index_ty) } TypeName::TypeResult(value) => { let ok_ty = get_rust_ty_name(&value.ok_ty); let err_ty = get_rust_ty_name(&value.err_ty); format!("Result{}{}_", ok_ty, err_ty) } TypeName::TypePair(value) => { let first_ty = get_rust_ty_name(&value.first_ty); let second_ty = get_rust_ty_name(&value.second_ty); format!("Pair{}{}_", first_ty, second_ty) } TypeName::TypeOption(value) => { let some_ty = get_rust_ty_name(&value.some_ty); format!("Option{}_", some_ty) } TypeName::ListTypeName(value) | TypeName::EnumTypeName(value) | TypeName::StructTypeName(value) | TypeName::ConstTypeName(value) => value.to_owned(), TypeName::TypeStream(value) => { let stream_ty = get_rust_ty_name(&value.s_ty); format!("Stream{}_", stream_ty) } TypeName::InterfaceTypeName(value) => value.to_owned(), } }
use idl::idl_nodes::*; use proc_macro2::{self, TokenStream}; use quote::format_ident; pub(crate) fn get_rust_ty_ref(ty: &TypeName, references: bool) -> TokenStream { match ty { TypeName::Types(types) => match types { Types::NatInt => quote! { i64 }, Types::NatFloat => quote! { f64 }, Types::NatString => quote! { String }, Types::NatBytes => quote! { Vec<u8> }, Types::NatBool => quote! { bool }, Types::NatUUID => quote! { Uuid }, Types::NatNone => quote! { () }, }, TypeName::TypeFunction(value) => { let args = get_rust_ty_ref(&value.args, references); let ret = get_rust_ty_ref(&value.return_ty, references); quote! { #args -> #ret } } TypeName::TypeTuple(value) => { let mut fields_t = vec![]; for ty in &value.fields { let ident = format_ident!("{}", &ty.ident); let ty_ident = get_rust_ty_ref(&ty.ty, references); fields_t.push(quote! { #ident: #ty_ident }) } let fields = fields_t.into_iter(); quote! { ( #( #fields ),* ) } } TypeName::TypeArray(value) => { let ty = get_rust_ty_ref(&value.ty, references); quote! { Vec<#ty> } } TypeName::TypeMap(value) => { let ty = get_rust_ty_ref(&value.map_ty, references); let index_ty = get_rust_ty_ref(&value.index_ty, references); quote! { ::std::collections::HashMap<#index_ty, #ty> } } TypeName::TypeResult(value) => { let ok_ty = get_rust_ty_ref(&value.ok_ty, references); let err_ty = get_rust_ty_ref(&value.err_ty, references); quote! { Result<#ok_ty, #err_ty> } } TypeName::TypePair(value) => { let first_ty = get_rust_ty_ref(&value.first_ty, references); let second_ty = get_rust_ty_ref(&value.second_ty, references); quote! { (#first_ty, #second_ty) } } TypeName::TypeOption(value) => { let some_ty = get_rust_ty_ref(&value.some_ty, references); quote! { Option<#some_ty> } } TypeName::ListTypeName(value) | TypeName::EnumTypeName(value) | TypeName::StructTypeName(value) | TypeName::ConstTypeName(
TypeName::InterfaceTypeName(value) => { let ident = format_ident!("{}Instance", value); quote! { Box<dyn super::idl_impl::#ident> } } } } pub(crate) fn get_rust_ty_name(ty: &TypeName) -> String { match ty { TypeName::Types(types) => match types { Types::NatInt => "i64".to_owned(), Types::NatFloat => "f64".to_owned(), Types::NatString => "String".to_owned(), Types::NatBytes => "Vecu8".to_owned(), Types::NatBool => "bool".to_owned(), Types::NatUUID => "Uuid".to_owned(), Types::NatNone => "none".to_owned(), }, TypeName::TypeFunction(value) => { let args = get_rust_ty_name(&value.args); let ret = get_rust_ty_name(&value.return_ty); format!("Func{}Ret{}_", args, ret) } TypeName::TypeTuple(value) => { let mut fields_t = String::new(); for ty in &value.fields { let ty_ident = get_rust_ty_name(&ty.ty); fields_t.push_str(&ty_ident); } format!("Args{}_", fields_t) } TypeName::TypeArray(value) => { let ty = get_rust_ty_name(&value.ty); format!("Vec{}_", ty) } TypeName::TypeMap(value) => { let ty = get_rust_ty_name(&value.map_ty); let index_ty = get_rust_ty_name(&value.index_ty); format!("Map{}{}_", ty, index_ty) } TypeName::TypeResult(value) => { let ok_ty = get_rust_ty_name(&value.ok_ty); let err_ty = get_rust_ty_name(&value.err_ty); format!("Result{}{}_", ok_ty, err_ty) } TypeName::TypePair(value) => { let first_ty = get_rust_ty_name(&value.first_ty); let second_ty = get_rust_ty_name(&value.second_ty); format!("Pair{}{}_", first_ty, second_ty) } TypeName::TypeOption(value) => { let some_ty = get_rust_ty_name(&value.some_ty); format!("Option{}_", some_ty) } TypeName::ListTypeName(value) | TypeName::EnumTypeName(value) | TypeName::StructTypeName(value) | TypeName::ConstTypeName(value) => value.to_owned(), TypeName::TypeStream(value) => { let stream_ty = get_rust_ty_name(&value.s_ty); format!("Stream{}_", stream_ty) } TypeName::InterfaceTypeName(value) => value.to_owned(), } }
value) => { let ident = format_ident!("{}", &value); if references { quote! { super::#ident } } else { quote! { #ident } } } TypeName::TypeStream(_) => { quote! { Box<dyn StreamInstance + Send + Sync> } }
function_block-random_span
[]
Rust
src/mqtt/listener.rs
Galhad/ratelmq
15395108681aa97b861d0364263ac7cd123d2b11
use crate::mqtt::events::{ClientEvent, ServerEvent}; use crate::mqtt::packets::ControlPacket; use crate::mqtt::transport::mqtt_bytes_stream::{MqttBytesReadStream, MqttBytesWriteStream}; use crate::mqtt::transport::packet_decoder::read_packet; use crate::mqtt::transport::packet_encoder::write_packet; use log::{debug, error, info, trace, warn}; use std::net::SocketAddr; use tokio::io::Error; use tokio::net::{TcpListener, TcpStream}; use tokio::select; use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::{broadcast, mpsc}; pub struct MqttListener { listener: TcpListener, client_event_tx: mpsc::Sender<ClientEvent>, ctrl_c_rx: broadcast::Receiver<()>, } impl MqttListener { pub async fn bind( address: &str, client_event_tx: mpsc::Sender<ClientEvent>, ctrl_c_rx: broadcast::Receiver<()>, ) -> Result<MqttListener, Error> { debug!("Binding MQTT TCP to {}", &address); let listener = TcpListener::bind(address).await.unwrap(); info!("Listening for MQTT TCP connections on {}", &address); let mqtt_listener = MqttListener { listener, client_event_tx, ctrl_c_rx, }; Ok(mqtt_listener) } pub async fn start_accepting(mut self) { loop { select! { _ = self.ctrl_c_rx.recv() => { trace!("Stopping listener"); break; } _ = Self::accept(&self.listener, &self.client_event_tx) => {} } } } async fn accept(listener: &TcpListener, client_event_tx: &mpsc::Sender<ClientEvent>) { match listener.accept().await { Ok((socket, address)) => { let client_event_tx = client_event_tx.clone(); tokio::spawn(async move { Self::handle_connection(socket, client_event_tx, address).await; }); } Err(e) => { error!("Error while accepting connection: {:?}", e); } } } async fn handle_connection( socket: TcpStream, client_event_tx: Sender<ClientEvent>, address: SocketAddr, ) { let (tcp_read, tcp_write) = socket.into_split(); let (server_event_tx, server_event_rx) = mpsc::channel(32); let mut write_stream = MqttBytesWriteStream::new(4096, tcp_write); tokio::spawn(async move { Self::connection_write_loop(server_event_rx, &mut write_stream).await; }); let mut read_stream = MqttBytesReadStream::new(4096, tcp_read); tokio::spawn(async move { Self::connection_read_loop(client_event_tx, server_event_tx, &mut read_stream, address) .await; }); } async fn connection_read_loop( client_event_tx: Sender<ClientEvent>, server_event_tx: Sender<ServerEvent>, mut read_stream: &mut MqttBytesReadStream, address: SocketAddr, ) { let client_id; if let Ok(packet) = read_packet(&mut read_stream).await { trace!("Read the first packet: {:?}", &packet); if let ControlPacket::Connect(c) = packet { client_id = c.client_id.clone(); let event = ClientEvent::Connected(c, address, server_event_tx.clone()); if let Err(e) = client_event_tx.send(event).await { error!("Error while sending client event to be processed: {}", &e); } } else { warn!("The first received packet is not CONNECT"); return; } } else { return; } while let Ok(packet) = read_packet(&mut read_stream).await { trace!("Read packet: {:?}", &packet); let event = ClientEvent::ControlPacket(client_id.clone(), packet, server_event_tx.clone()); if let Err(e) = client_event_tx.send(event).await { error!("Error while sending client event to be processed: {}", &e); } } trace!("Client read task ended"); } async fn connection_write_loop( mut server_event_rx: Receiver<ServerEvent>, mut write_stream: &mut MqttBytesWriteStream, ) { while let Some(event) = server_event_rx.recv().await { trace!("Received server event: {:?}", &event); match event { ServerEvent::ControlPacket(packet) => { trace!("Writing packet: {:?}", &packet); if let Err(e) = write_packet(&mut write_stream, packet).await { error!("Error while writing packet: {:?}", &e); } } ServerEvent::Disconnect => { break; } } } trace!("Client write task ended"); } }
use crate::mqtt::events::{ClientEvent, ServerEvent}; use crate::mqtt::packets::ControlPacket; use crate::mqtt::transport::mqtt_bytes_stream::{MqttBytesReadStream, MqttBytesWriteStream}; use crate::mqtt::transport::packet_decoder::read_packet; use crate::mqtt::transport::packet_encoder::write_packet; use log::{debug, error, info, trace, warn}; use std::net::SocketAddr; use tokio::io::Error; use tokio::net::{TcpListener, TcpStream}; use tokio::select; use toki
_tx = client_event_tx.clone(); tokio::spawn(async move { Self::handle_connection(socket, client_event_tx, address).await; }); } Err(e) => { error!("Error while accepting connection: {:?}", e); } } } async fn handle_connection( socket: TcpStream, client_event_tx: Sender<ClientEvent>, address: SocketAddr, ) { let (tcp_read, tcp_write) = socket.into_split(); let (server_event_tx, server_event_rx) = mpsc::channel(32); let mut write_stream = MqttBytesWriteStream::new(4096, tcp_write); tokio::spawn(async move { Self::connection_write_loop(server_event_rx, &mut write_stream).await; }); let mut read_stream = MqttBytesReadStream::new(4096, tcp_read); tokio::spawn(async move { Self::connection_read_loop(client_event_tx, server_event_tx, &mut read_stream, address) .await; }); } async fn connection_read_loop( client_event_tx: Sender<ClientEvent>, server_event_tx: Sender<ServerEvent>, mut read_stream: &mut MqttBytesReadStream, address: SocketAddr, ) { let client_id; if let Ok(packet) = read_packet(&mut read_stream).await { trace!("Read the first packet: {:?}", &packet); if let ControlPacket::Connect(c) = packet { client_id = c.client_id.clone(); let event = ClientEvent::Connected(c, address, server_event_tx.clone()); if let Err(e) = client_event_tx.send(event).await { error!("Error while sending client event to be processed: {}", &e); } } else { warn!("The first received packet is not CONNECT"); return; } } else { return; } while let Ok(packet) = read_packet(&mut read_stream).await { trace!("Read packet: {:?}", &packet); let event = ClientEvent::ControlPacket(client_id.clone(), packet, server_event_tx.clone()); if let Err(e) = client_event_tx.send(event).await { error!("Error while sending client event to be processed: {}", &e); } } trace!("Client read task ended"); } async fn connection_write_loop( mut server_event_rx: Receiver<ServerEvent>, mut write_stream: &mut MqttBytesWriteStream, ) { while let Some(event) = server_event_rx.recv().await { trace!("Received server event: {:?}", &event); match event { ServerEvent::ControlPacket(packet) => { trace!("Writing packet: {:?}", &packet); if let Err(e) = write_packet(&mut write_stream, packet).await { error!("Error while writing packet: {:?}", &e); } } ServerEvent::Disconnect => { break; } } } trace!("Client write task ended"); } }
o::sync::mpsc::{Receiver, Sender}; use tokio::sync::{broadcast, mpsc}; pub struct MqttListener { listener: TcpListener, client_event_tx: mpsc::Sender<ClientEvent>, ctrl_c_rx: broadcast::Receiver<()>, } impl MqttListener { pub async fn bind( address: &str, client_event_tx: mpsc::Sender<ClientEvent>, ctrl_c_rx: broadcast::Receiver<()>, ) -> Result<MqttListener, Error> { debug!("Binding MQTT TCP to {}", &address); let listener = TcpListener::bind(address).await.unwrap(); info!("Listening for MQTT TCP connections on {}", &address); let mqtt_listener = MqttListener { listener, client_event_tx, ctrl_c_rx, }; Ok(mqtt_listener) } pub async fn start_accepting(mut self) { loop { select! { _ = self.ctrl_c_rx.recv() => { trace!("Stopping listener"); break; } _ = Self::accept(&self.listener, &self.client_event_tx) => {} } } } async fn accept(listener: &TcpListener, client_event_tx: &mpsc::Sender<ClientEvent>) { match listener.accept().await { Ok((socket, address)) => { let client_event
random
[ { "content": "#[derive(Debug)]\n\npub struct BuildInfo {\n\n pub version: &'static str,\n\n pub commit_hash: &'static str,\n\n}\n\n\n\npub const BUILD_INFO: BuildInfo = BuildInfo {\n\n version: env!(\"CARGO_PKG_VERSION\"),\n\n commit_hash: env!(\"BUILD_GIT_HASH\"),\n\n};\n", "file_path": "src/config/build_info.rs", "rank": 0, "score": 29942.31485936127 }, { "content": "fn validate_first_byte(actual: u8, expected: u8) -> Result<(), Error> {\n\n if actual == expected {\n\n Ok(())\n\n } else {\n\n Err(tokio::io::Error::new(\n\n ErrorKind::InvalidData,\n\n \"Malformed fixed header\",\n\n ))\n\n }\n\n}\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 1, "score": 28100.719076282097 }, { "content": "use std::net::SocketAddr;\n\n\n\nuse log::{debug, error, info, trace};\n\nuse tokio::select;\n\nuse tokio::sync::mpsc::{Receiver, Sender};\n\nuse tokio::sync::{broadcast, mpsc};\n\n\n\nuse crate::broker::authentication::{FileIdentityManager, IdentityProvider};\n\nuse crate::broker::messaging::MessagingService;\n\nuse crate::broker::session::Session;\n\nuse crate::mqtt::events::{ClientEvent, ServerEvent};\n\nuse crate::mqtt::packets::connack::ConnAckReturnCode;\n\nuse crate::mqtt::packets::suback::SubAckPacket;\n\nuse crate::mqtt::packets::subscribe::SubscribePacket;\n\nuse crate::mqtt::packets::unsuback::UnSubAckPacket;\n\nuse crate::mqtt::packets::unsubscribe::UnsubscribePacket;\n\nuse crate::mqtt::packets::ControlPacket::{ConnAck, PingResp, SubAck, UnsubAck};\n\nuse crate::mqtt::packets::*;\n\nuse crate::settings::Settings;\n\n\n", "file_path": "src/broker/manager.rs", "rank": 3, "score": 8.31002326166239 }, { "content": "use crate::broker::authentication::FileIdentityManager;\n\nuse crate::broker::manager::Manager;\n\nuse crate::config::build_info::BUILD_INFO;\n\nuse crate::mqtt::listener::MqttListener;\n\nuse crate::settings::Settings;\n\nuse futures::future::join_all;\n\nuse log::{debug, info};\n\nuse tokio::signal;\n\nuse tokio::sync::broadcast;\n\nuse tokio::sync::mpsc;\n\n\n\npub async fn run(config_filename: &str) {\n\n info!(\n\n \"Initializing RatelMQ v{} ({})...\",\n\n BUILD_INFO.version,\n\n &BUILD_INFO.commit_hash[..10]\n\n );\n\n\n\n debug!(\"Using configuration file {}\", config_filename);\n\n let settings = Settings::new(config_filename).unwrap();\n", "file_path": "src/application.rs", "rank": 4, "score": 8.05011268131039 }, { "content": "use bytes::{Buf, BufMut, BytesMut};\n\nuse log::trace;\n\nuse tokio::io::{AsyncReadExt, AsyncWriteExt, Error, ErrorKind};\n\nuse tokio::net::tcp::{OwnedReadHalf, OwnedWriteHalf};\n\nuse tokio::net::TcpStream;\n\n\n\npub struct MqttBytesStream {\n\n read_buffer: BytesMut,\n\n write_buffer: BytesMut,\n\n tcp_stream: TcpStream,\n\n}\n\n\n\npub struct MqttBytesWriteStream {\n\n write_buffer: BytesMut,\n\n write_stream: OwnedWriteHalf,\n\n}\n\n\n\npub struct MqttBytesReadStream {\n\n read_buffer: BytesMut,\n\n read_stream: OwnedReadHalf,\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 5, "score": 7.640292745878708 }, { "content": "// pub use self::build_info::*;\n\npub mod build_info;\n", "file_path": "src/config/mod.rs", "rank": 6, "score": 7.466998155383734 }, { "content": "use crate::broker::authentication::AuthenticationError::InvalidPassword;\n\nuse crate::broker::authentication::FileIdentityManagerError::InvalidEntry;\n\nuse std::collections::HashMap;\n\nuse std::io::Error;\n\nuse AuthenticationError::UserNotFound;\n\n\n\n#[derive(Debug)]\n\npub enum AuthenticationError {\n\n // UserNotFound(UserNotFound),\n\n // InvalidPassword(InvalidPassword),\n\n UserNotFound,\n\n InvalidPassword,\n\n}\n\n\n\n// #[derive(Debug, Clone)]\n\n// pub struct UserNotFound {\n\n// // pub username: &'static str,\n\n// }\n\n//\n\n// #[derive(Debug, Clone)]\n\n// pub struct InvalidPassword;\n\n\n", "file_path": "src/broker/authentication.rs", "rank": 7, "score": 7.305210950702282 }, { "content": "use clap::{App, Arg};\n\nuse dotenv::dotenv;\n\n\n\nuse ratelmq::config::build_info::BUILD_INFO;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n dotenv().ok();\n\n env_logger::init();\n\n\n\n let version = format!(\"v{}({})\", BUILD_INFO.version, &BUILD_INFO.commit_hash[..10]);\n\n\n\n let argument_name_config = \"config\";\n\n\n\n let arguments = App::new(\"RatelMQ\")\n\n .version(version.as_str())\n\n .about(\"Efficient, reliable & scalable MQTT broker.\")\n\n .arg(\n\n Arg::new(argument_name_config)\n\n .short('c')\n", "file_path": "src/main.rs", "rank": 9, "score": 7.026954965850307 }, { "content": "use async_trait::async_trait;\n\nuse tokio::io::Error;\n\n\n\nuse crate::mqtt::transport::packet_decoder::PacketDecoder;\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct DisconnectPacket {}\n\n\n\n#[async_trait]\n\nimpl PacketDecoder for DisconnectPacket {\n\n fn parse_fixed_header_flags(&mut self, _: u8) -> Result<(), Error> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/mqtt/packets/disconnect.rs", "rank": 10, "score": 6.563184976559407 }, { "content": " Ok(self.read_buffer.get_u8())\n\n }\n\n\n\n pub async fn get_u16(&mut self) -> Result<u16, Error> {\n\n self.wait_for_data(2).await?;\n\n\n\n Ok(self.read_buffer.get_u16())\n\n }\n\n\n\n pub async fn get_string(&mut self) -> Result<String, Error> {\n\n trace!(\"Parsing string size\");\n\n let string_size = self.get_u16().await? as usize;\n\n trace!(\"String size: {} ({:#04x})\", string_size, string_size);\n\n trace!(\"Parsing string buf\");\n\n let str_buf = self.get_bytes(string_size).await?;\n\n\n\n let str = std::str::from_utf8(str_buf.as_ref()).map_err(|e| {\n\n Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 11, "score": 6.456973949542382 }, { "content": " trace!(\"String size: {} ({:#04x})\", string_size, string_size);\n\n trace!(\"Parsing string buf\");\n\n let str_buf = self.get_bytes(string_size).await?;\n\n\n\n let str = std::str::from_utf8(str_buf.as_ref()).map_err(|e| {\n\n Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\n\n \"Invalid UTF8 String: error at position {}\",\n\n e.valid_up_to() + 1\n\n ),\n\n )\n\n })?;\n\n\n\n Ok(String::from(str))\n\n }\n\n\n\n pub async fn get_bytes(&mut self, size: usize) -> Result<BytesMut, Error> {\n\n let mut bytes = BytesMut::with_capacity(size);\n\n\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 12, "score": 6.318806721891911 }, { "content": "use crate::mqtt::packets::puback::PubAckPacket;\n\nuse crate::mqtt::packets::pubcomp::PubCompPacket;\n\nuse crate::mqtt::packets::pubrec::PubRecPacket;\n\nuse crate::mqtt::packets::pubrel::PubRelPacket;\n\nuse crate::mqtt::packets::subscribe::SubscribePacket;\n\nuse crate::mqtt::packets::unsubscribe::UnsubscribePacket;\n\nuse crate::mqtt::packets::{\n\n ConnectPacket, ControlPacket, DisconnectPacket, ProtocolVersion, PublishPacket, QoS,\n\n};\n\nuse crate::mqtt::packets::{\n\n PACKET_TYPE_CONNECT, PACKET_TYPE_DISCONNECT, PACKET_TYPE_PING_REQ, PACKET_TYPE_PUBLISH,\n\n PACKET_TYPE_PUB_ACK, PACKET_TYPE_PUB_COMP, PACKET_TYPE_PUB_REC, PACKET_TYPE_PUB_REL,\n\n PACKET_TYPE_SUBSCRIBE, PACKET_TYPE_UNSUBSCRIBE,\n\n};\n\nuse crate::mqtt::subscription::Subscription;\n\nuse crate::mqtt::transport::mqtt_bytes_stream::MqttBytesReadStream;\n\nuse async_trait::async_trait;\n\nuse bitflags::bitflags;\n\nuse tokio::io::{Error, ErrorKind};\n\n\n\n#[async_trait]\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 14, "score": 5.881202332791689 }, { "content": "use log::warn;\n\n\n\nuse crate::broker::messaging::subscriptions_repository::SubscriptionsRepository;\n\nuse crate::broker::session::session_repository::SessionRepository;\n\nuse crate::broker::session::{InMemorySessionRepository, Session};\n\nuse crate::mqtt::events::ServerEvent;\n\nuse crate::mqtt::message::Message;\n\nuse crate::mqtt::packets::suback::SubAckReturnCode;\n\nuse crate::mqtt::packets::ControlPacket::Publish;\n\nuse crate::mqtt::packets::{ClientId, PublishPacket};\n\nuse crate::mqtt::subscription::Subscription;\n\n\n\npub struct MessagingService {\n\n sessions: InMemorySessionRepository,\n\n subscriptions: SubscriptionsRepository,\n\n}\n\n\n\nimpl MessagingService {\n\n pub fn new() -> Self {\n\n MessagingService {\n", "file_path": "src/broker/messaging/messaging_service.rs", "rank": 15, "score": 5.750015083155831 }, { "content": " read_buffer: BytesMut::with_capacity(buffer_size),\n\n read_stream,\n\n }\n\n }\n\n\n\n pub async fn get_u8(&mut self) -> Result<u8, Error> {\n\n self.wait_for_data(1).await?;\n\n\n\n Ok(self.read_buffer.get_u8())\n\n }\n\n\n\n pub async fn get_u16(&mut self) -> Result<u16, Error> {\n\n self.wait_for_data(2).await?;\n\n\n\n Ok(self.read_buffer.get_u16())\n\n }\n\n\n\n pub async fn get_string(&mut self) -> Result<String, Error> {\n\n trace!(\"Parsing string size\");\n\n let string_size = self.get_u16().await? as usize;\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 16, "score": 5.190599357919163 }, { "content": " \"Invalid UTF8 String: error at position {}\",\n\n e.valid_up_to() + 1\n\n ),\n\n )\n\n })?;\n\n\n\n Ok(String::from(str))\n\n }\n\n\n\n pub async fn get_bytes(&mut self, size: usize) -> Result<BytesMut, Error> {\n\n let mut bytes = BytesMut::with_capacity(size);\n\n\n\n let mut remaining_length = size;\n\n\n\n while remaining_length > 0 {\n\n // todo: use clamp when stabilized\n\n let wait_for_bytes = {\n\n if remaining_length > self.read_buffer.capacity() {\n\n self.read_buffer.capacity()\n\n } else {\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 17, "score": 5.133721816412278 }, { "content": "use async_trait::async_trait;\n\nuse bitflags::bitflags;\n\nuse tokio::io::Error;\n\n\n\nuse crate::mqtt::packets::puback::PubAckPacket;\n\nuse crate::mqtt::packets::pubcomp::PubCompPacket;\n\nuse crate::mqtt::packets::pubrec::PubRecPacket;\n\nuse crate::mqtt::packets::pubrel::PubRelPacket;\n\nuse crate::mqtt::packets::suback::SubAckPacket;\n\nuse crate::mqtt::packets::unsuback::UnSubAckPacket;\n\nuse crate::mqtt::packets::{\n\n ConnAckPacket, ControlPacket, PublishPacket, QoS, PACKET_TYPE_CONN_ACK, PACKET_TYPE_PING_RESP,\n\n PACKET_TYPE_PUBLISH, PACKET_TYPE_PUB_ACK, PACKET_TYPE_PUB_COMP, PACKET_TYPE_PUB_REC,\n\n PACKET_TYPE_PUB_REL, PACKET_TYPE_SUB_ACK, PACKET_TYPE_UNSUB_ACK,\n\n};\n\nuse crate::mqtt::transport::mqtt_bytes_stream::{MqttBytesStream, MqttBytesWriteStream};\n\n\n\n#[async_trait]\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 18, "score": 5.129927965350056 }, { "content": "use config::{Config, ConfigError, Environment, File, FileFormat};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct MqttSettings {\n\n pub listeners_tcp: Vec<String>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct AuthenticationSettings {\n\n pub password_file: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Settings {\n\n pub mqtt: MqttSettings,\n\n pub authentication: AuthenticationSettings,\n\n}\n\n\n\nimpl Settings {\n", "file_path": "src/settings/mod.rs", "rank": 19, "score": 5.078869965692634 }, { "content": "pub use crate::mqtt::packets::connack::ConnAckPacket;\n\npub use crate::mqtt::packets::connect::ConnectPacket;\n\npub use crate::mqtt::packets::disconnect::DisconnectPacket;\n\nuse crate::mqtt::packets::puback::PubAckPacket;\n\nuse crate::mqtt::packets::pubcomp::PubCompPacket;\n\npub use crate::mqtt::packets::publish::PublishPacket;\n\nuse crate::mqtt::packets::pubrec::PubRecPacket;\n\nuse crate::mqtt::packets::pubrel::PubRelPacket;\n\nuse crate::mqtt::packets::suback::SubAckPacket;\n\nuse crate::mqtt::packets::subscribe::SubscribePacket;\n\nuse crate::mqtt::packets::unsuback::UnSubAckPacket;\n\nuse crate::mqtt::packets::unsubscribe::UnsubscribePacket;\n\nuse crate::mqtt::packets::ControlPacket::{\n\n ConnAck, Connect, Disconnect, PingReq, PingResp, PubAck, PubComp, PubRec, PubRel, Publish,\n\n SubAck, Subscribe, UnsubAck, Unsubscribe,\n\n};\n\nuse crate::mqtt::packets::ProtocolVersion::Mqtt3;\n\nuse crate::mqtt::packets::QoS::{AtLeastOnce, AtMostOnce, ExactlyOnce};\n\nuse std::fmt;\n\nuse std::fmt::{Display, Formatter};\n", "file_path": "src/mqtt/packets/mod.rs", "rank": 21, "score": 4.104120113332452 }, { "content": "use crate::broker::session::Session;\n\nuse crate::mqtt::packets::ClientId;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "src/broker/session/session_repository.rs", "rank": 22, "score": 4.0604927596834575 }, { "content": "use bytes::BytesMut;\n\nuse tokio::io::AsyncReadExt;\n\nuse tokio::net::{TcpListener, TcpStream};\n\n\n\nuse ratelmq::mqtt::packets::puback::PubAckPacket;\n\nuse ratelmq::mqtt::packets::pubcomp::PubCompPacket;\n\nuse ratelmq::mqtt::packets::pubrec::PubRecPacket;\n\nuse ratelmq::mqtt::packets::pubrel::PubRelPacket;\n\nuse ratelmq::mqtt::packets::suback::{SubAckPacket, SubAckReturnCode};\n\nuse ratelmq::mqtt::packets::unsuback::UnSubAckPacket;\n\nuse ratelmq::mqtt::packets::{ConnAckPacket, ControlPacket, PublishPacket, QoS};\n\nuse ratelmq::mqtt::transport::mqtt_bytes_stream::MqttBytesWriteStream;\n\nuse ratelmq::mqtt::transport::packet_encoder;\n\n\n\n#[tokio::test]\n\nasync fn it_write_conn_ack() {\n\n let conn_ack = ConnAckPacket::default();\n\n\n\n let data = write_packet(ControlPacket::ConnAck(conn_ack)).await;\n\n assert_bytes(data, vec![0x20, 0x02, 0x00, 0x00])\n", "file_path": "tests/write_packets.rs", "rank": 24, "score": 3.9358113007900766 }, { "content": "mod session_entity;\n\npub(crate) mod session_repository;\n\nmod session_service;\n\n\n\npub use self::session_entity::Session;\n\npub use self::session_repository::InMemorySessionRepository;\n\npub use self::session_repository::SessionRepository;\n\npub use self::session_service::SessionService;\n", "file_path": "src/broker/session/mod.rs", "rank": 25, "score": 3.913635549514639 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::remove_dir;\n\n\n\nuse crate::mqtt::packets::suback::SubAckReturnCode;\n\nuse crate::mqtt::packets::ClientId;\n\nuse crate::mqtt::subscription::Subscription;\n\n\n\npub struct SubscriptionsRepository {\n\n root: SubscriptionNode,\n\n}\n\n\n\nimpl SubscriptionsRepository {\n\n pub fn new() -> SubscriptionsRepository {\n\n SubscriptionsRepository {\n\n root: SubscriptionNode::new(),\n\n }\n\n }\n\n\n\n pub fn subscribe(\n\n &mut self,\n", "file_path": "src/broker/messaging/subscriptions_repository.rs", "rank": 26, "score": 3.855287358356077 }, { "content": " return if clean_shutdown {\n\n Err(tokio::io::Error::new(\n\n ErrorKind::ConnectionReset,\n\n \"connection reset by peer\",\n\n ))\n\n } else {\n\n // closed while sending\n\n Err(tokio::io::Error::new(\n\n ErrorKind::ConnectionReset,\n\n \"connection interrupted, reset by peer\",\n\n ))\n\n };\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl MqttBytesStream {\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 27, "score": 3.829622344600214 }, { "content": " listeners.push(tokio::spawn(listener.start_accepting()));\n\n }\n\n\n\n info!(\"Initialized RatelMQ\");\n\n\n\n signal::ctrl_c().await.unwrap();\n\n\n\n info!(\"Stopping RatelMQ\");\n\n ctrl_c_tx.send(()).unwrap();\n\n\n\n join_all(listeners).await;\n\n\n\n manager_future.await.unwrap();\n\n\n\n info!(\"RatelMQ stopped\");\n\n}\n", "file_path": "src/application.rs", "rank": 28, "score": 3.8013587968646 }, { "content": " };\n\n }\n\n\n\n async fn on_connect(\n\n &mut self,\n\n sender: Sender<ServerEvent>,\n\n packet: ConnectPacket,\n\n address: SocketAddr,\n\n ) {\n\n debug!(\"New client {:?} connected\", &packet.client_id);\n\n\n\n if let Some(user_name) = packet.user_name {\n\n let password = packet.password.unwrap();\n\n if let Err(e) = self.identity_provider.authenticate(&user_name, &password) {\n\n info!(\"Client {} authentication error: {:?}\", &user_name, &e);\n\n\n\n let conn_ack = ConnAckPacket::new(false, ConnAckReturnCode::NotAuthorized);\n\n sender\n\n .send(ServerEvent::ControlPacket(ControlPacket::ConnAck(conn_ack)))\n\n .await\n", "file_path": "src/broker/manager.rs", "rank": 29, "score": 3.6771623722735436 }, { "content": "use crate::mqtt::events::ServerEvent;\n\nuse crate::mqtt::packets::ClientId;\n\nuse std::net::IpAddr;\n\nuse tokio::sync::mpsc::Sender;\n\n\n\npub struct Session {\n\n client_id: ClientId,\n\n ip: IpAddr,\n\n persistent: bool,\n\n sender: Sender<ServerEvent>,\n\n}\n\n\n\nimpl Session {\n\n pub fn new(\n\n client_id: ClientId,\n\n ip: IpAddr,\n\n persistent: bool,\n\n sender: Sender<ServerEvent>,\n\n ) -> Self {\n\n Session {\n", "file_path": "src/broker/session/session_entity.rs", "rank": 30, "score": 3.6031310694014254 }, { "content": " async fn wait_for_data(&mut self, bytes: usize) -> Result<(), Error> {\n\n while self.read_buffer.len() < bytes {\n\n let read_bytes_num = self.read_stream.read_buf(&mut self.read_buffer).await?;\n\n\n\n // todo: Handle better?\n\n let connection_closed = read_bytes_num == 0; // end of file\n\n if connection_closed {\n\n let clean_shutdown = self.read_buffer.is_empty();\n\n return if clean_shutdown {\n\n Err(tokio::io::Error::new(\n\n ErrorKind::ConnectionReset,\n\n \"connection reset by peer\",\n\n ))\n\n } else {\n\n // closed while sending\n\n Err(tokio::io::Error::new(\n\n ErrorKind::ConnectionReset,\n\n \"connection interrupted, reset by peer\",\n\n ))\n\n };\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 31, "score": 3.572207518283132 }, { "content": "use std::process::Command;\n\n\n", "file_path": "build.rs", "rank": 32, "score": 3.5537417262102156 }, { "content": "use tokio::io::AsyncWriteExt;\n\nuse tokio::net::{TcpListener, TcpStream};\n\n\n\nuse ratelmq::mqtt::packets::{ControlPacket, ProtocolVersion, QoS};\n\nuse ratelmq::mqtt::subscription::Subscription;\n\nuse ratelmq::mqtt::transport::mqtt_bytes_stream::MqttBytesReadStream;\n\nuse ratelmq::mqtt::transport::packet_decoder;\n\n\n\n#[tokio::test]\n\nasync fn it_read_connect_min() {\n\n const DATA: &[u8] = &[\n\n 0x10, 0x23, 0x00, 0x04, 0x4d, 0x51, 0x54, 0x54, 0x04, 0x02, 0x00, 0x3c, 0x00, 0x17, 0x6d,\n\n 0x6f, 0x73, 0x71, 0x2d, 0x6e, 0x73, 0x36, 0x73, 0x7a, 0x33, 0x6b, 0x33, 0x6c, 0x62, 0x66,\n\n 0x4d, 0x31, 0x49, 0x66, 0x62, 0x63, 0x52,\n\n ];\n\n\n\n let packet = read_packet(DATA).await;\n\n\n\n match packet {\n\n ControlPacket::Connect(connect) => {\n", "file_path": "tests/read_packets.rs", "rank": 33, "score": 3.5170258650180655 }, { "content": " return Err(tokio::io::Error::new(\n\n ErrorKind::InvalidData,\n\n \"Malformed payload\",\n\n ));\n\n }\n\n\n\n let mut subscriptions = Vec::new();\n\n while remaining_length > 0 {\n\n let topic = buffer.get_string().await?;\n\n let qos = buffer.get_u8().await?;\n\n if qos > 2 {\n\n return Err(tokio::io::Error::new(\n\n ErrorKind::InvalidData,\n\n \"Malformed QoS\",\n\n ));\n\n }\n\n\n\n remaining_length -= topic.len() as u64 + 3u64 /* 2 topic length + QoS*/;\n\n\n\n let subscription = Subscription::new(topic, QoS::from_bits(qos));\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 34, "score": 3.4849819764707277 }, { "content": "use crate::broker::session::session_repository::SessionRepository;\n\nuse crate::broker::session::Session;\n\nuse crate::mqtt::packets::ClientId;\n\n\n\n#[derive(Default)]\n\npub struct SessionService<T>\n\nwhere\n\n T: SessionRepository,\n\n{\n\n repository: T,\n\n}\n\n\n\nimpl<T> SessionService<T>\n\nwhere\n\n T: SessionRepository,\n\n{\n\n pub fn new(repository: T) -> Self {\n\n SessionService { repository }\n\n }\n\n\n", "file_path": "src/broker/session/session_service.rs", "rank": 35, "score": 3.469602781372993 }, { "content": "\n\nimpl Default for InMemorySessionRepository {\n\n fn default() -> Self {\n\n InMemorySessionRepository {\n\n sessions: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::net::{IpAddr, Ipv4Addr};\n\n use tokio::sync::mpsc;\n\n\n\n fn create_session() -> Session {\n\n let (tx, _rx) = mpsc::channel(32);\n\n Session::new(\n\n \"client-1\".to_string(),\n\n IpAddr::V4(Ipv4Addr::LOCALHOST),\n", "file_path": "src/broker/session/session_repository.rs", "rank": 36, "score": 3.4362660203051827 }, { "content": "use crate::mqtt::packets::QoS;\n\nuse bytes::BytesMut;\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct Message {\n\n pub topic: String,\n\n pub payload: BytesMut,\n\n pub qos: QoS,\n\n pub retain: bool,\n\n}\n", "file_path": "src/mqtt/message.rs", "rank": 37, "score": 3.320150933331645 }, { "content": " }\n\n}\n\n\n\npub async fn decode_remaining_length(buffer: &mut MqttBytesReadStream) -> Result<u64, Error> {\n\n let mut remaining_length = 0u64;\n\n let mut multiplier = 1u64;\n\n\n\n loop {\n\n let byte = buffer.get_u8().await?;\n\n remaining_length += (byte & 127) as u64 * multiplier;\n\n multiplier *= 128;\n\n\n\n if multiplier > 128 * 128 * 128 {\n\n return Err(tokio::io::Error::new(\n\n ErrorKind::InvalidData,\n\n \"Malformed remaining length multiplier\",\n\n ));\n\n }\n\n\n\n let continuation_bit = byte & 128;\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 38, "score": 3.3168330273709183 }, { "content": "use crate::mqtt::message::Message;\n\nuse crate::mqtt::packets::QoS;\n\nuse bytes::BytesMut;\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct PublishPacket {\n\n pub packet_id: Option<u16>,\n\n pub dup: bool,\n\n\n\n pub message: Message,\n\n}\n\n\n\nimpl PublishPacket {\n\n pub fn new(\n\n topic: String,\n\n body: BytesMut,\n\n qos: QoS,\n\n retain: bool,\n\n packet_id: Option<u16>,\n\n dup: bool,\n", "file_path": "src/mqtt/packets/publish.rs", "rank": 39, "score": 3.3090880302382617 }, { "content": "use crate::mqtt::packets::{ClientId, ConnectPacket, ControlPacket};\n\nuse std::net::SocketAddr;\n\nuse tokio::sync::mpsc::Sender;\n\n\n\n#[derive(Debug)]\n\npub enum ClientEvent {\n\n Connected(ConnectPacket, SocketAddr, Sender<ServerEvent>),\n\n ControlPacket(ClientId, ControlPacket, Sender<ServerEvent>),\n\n Disconnected(ClientId),\n\n ConnectionLost(ClientId),\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ServerEvent {\n\n ControlPacket(ControlPacket),\n\n Disconnect,\n\n}\n", "file_path": "src/mqtt/events.rs", "rank": 40, "score": 3.2637900681751377 }, { "content": "use crate::mqtt::packets::QoS;\n\nuse std::fmt;\n\nuse std::fmt::{Display, Formatter};\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct Subscription {\n\n topic: String,\n\n qos: QoS,\n\n}\n\n\n\nimpl Display for Subscription {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"topic: '{}' QoS: {}\", self.topic, self.qos)\n\n }\n\n}\n\n\n\nimpl Subscription {\n\n pub fn new(topic: String, qos: QoS) -> Subscription {\n\n Subscription { topic, qos }\n\n }\n\n\n\n pub fn topic(&self) -> &str {\n\n self.topic.as_str()\n\n }\n\n}\n", "file_path": "src/mqtt/subscription.rs", "rank": 41, "score": 3.2637900681751377 }, { "content": " pub async fn put_u8(&mut self, n: u8) -> Result<(), Error> {\n\n self.write_buffer_if_too_small(1).await?;\n\n\n\n self.write_buffer.put_u8(n);\n\n Ok(())\n\n }\n\n\n\n pub async fn put_u16(&mut self, n: u16) -> Result<(), Error> {\n\n self.write_buffer_if_too_small(2).await?;\n\n\n\n self.write_buffer.put_u16(n);\n\n Ok(())\n\n }\n\n\n\n pub async fn put_string(&mut self, string: &str) -> Result<(), Error> {\n\n self.put_u16(string.len() as u16).await?;\n\n self.put_bytes(BytesMut::from(string.as_bytes())).await?;\n\n\n\n Ok(())\n\n }\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 42, "score": 3.209914666530279 }, { "content": " }\n\n\n\n pub async fn put_u16(&mut self, n: u16) -> Result<(), Error> {\n\n self.write_buffer_if_too_small(2).await?;\n\n\n\n self.write_buffer.put_u16(n);\n\n Ok(())\n\n }\n\n\n\n pub async fn put_string(&mut self, string: &str) -> Result<(), Error> {\n\n self.put_u16(string.len() as u16).await?;\n\n self.put_bytes(BytesMut::from(string.as_bytes())).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub async fn put_bytes(&mut self, mut bytes: BytesMut) -> Result<(), Error> {\n\n let mut remaining_size = bytes.len();\n\n\n\n while remaining_size > 0 {\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 43, "score": 3.19520073299748 }, { "content": "mod messaging_service;\n\n\n\nmod subscriptions_repository;\n\n\n\npub use self::messaging_service::MessagingService;\n", "file_path": "src/broker/messaging/mod.rs", "rank": 44, "score": 3.1751366783827146 }, { "content": " subscriptions.push(subscription);\n\n }\n\n\n\n let packet = SubscribePacket::new(packet_id, subscriptions);\n\n Ok(ControlPacket::Subscribe(packet))\n\n}\n\n\n\nasync fn decode_unsubscribe(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n mut remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n let packet_id = decode_packet_with_packet_id(buffer, first_byte, 0b10100010).await?;\n\n remaining_length -= 2;\n\n\n\n if remaining_length == 0 {\n\n return Err(tokio::io::Error::new(\n\n ErrorKind::InvalidData,\n\n \"Malformed payload\",\n\n ));\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 45, "score": 3.1518573199213447 }, { "content": " trace!(\"Got packet {:?}\", packet);\n\n\n\n match packet {\n\n // ControlPacket::Connect(c) => {\n\n // self.on_connect(action.response, c, &mut sessions).await\n\n // }\n\n // ControlPacket::ConnAck(_) => {}\n\n ControlPacket::Publish(p) => self.on_publish(tx, p, client_id).await,\n\n // ControlPacket::PubAck(_) => {}\n\n // ControlPacket::PubRec(_) => {}\n\n // ControlPacket::PubRel(_) => {}\n\n // ControlPacket::PubComp(_) => {}\n\n ControlPacket::Subscribe(p) => self.on_subscribe(tx, p, &client_id).await,\n\n // ControlPacket::SubAck(_) => {}\n\n ControlPacket::Unsubscribe(p) => self.on_unsubscribe(tx, p, &client_id).await,\n\n // ControlPacket::UnsubAck(_) => {}\n\n ControlPacket::PingReq => self.on_ping_req(tx).await,\n\n // ControlPacket::PingResp() => {}\n\n ControlPacket::Disconnect(_) => self.on_disconnect(client_id).await,\n\n _ => error!(\"Packet {} not supported\", &packet),\n", "file_path": "src/broker/manager.rs", "rank": 46, "score": 3.12068885036652 }, { "content": " ctrl_c_rx,\n\n // sessions: SessionService::default(),\n\n messaging: MessagingService::new(),\n\n identity_provider,\n\n }\n\n }\n\n\n\n pub async fn run(mut self) {\n\n loop {\n\n select! {\n\n _ = self.ctrl_c_rx.recv() => {\n\n trace!(\"Stopping manager\");\n\n break;\n\n }\n\n maybe_event = self.rx.recv() => {\n\n if let Some(event) = maybe_event {\n\n trace!(\"Got event {:?}\", &event);\n\n\n\n match event {\n\n ClientEvent::Connected(c, address, tx) => self.on_connect(tx, c, address).await,\n", "file_path": "src/broker/manager.rs", "rank": 47, "score": 3.094138629880727 }, { "content": "async fn write_pub_ack(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: PubAckPacket,\n\n) -> Result<(), Error> {\n\n write_packet_with_packet_id(buffer, PACKET_TYPE_PUB_ACK << 4, packet.packet_id).await?;\n\n Ok(())\n\n}\n\n\n\nasync fn write_pub_rec(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: PubRecPacket,\n\n) -> Result<(), Error> {\n\n write_packet_with_packet_id(buffer, PACKET_TYPE_PUB_REC << 4, packet.packet_id).await?;\n\n Ok(())\n\n}\n\n\n\nasync fn write_pub_rel(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: PubRelPacket,\n\n) -> Result<(), Error> {\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 48, "score": 2.951658689931886 }, { "content": "use crate::mqtt::packets::{ClientId, ProtocolVersion};\n\n\n\nuse crate::mqtt::message::Message;\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct ConnectPacket {\n\n pub version: ProtocolVersion,\n\n pub client_id: ClientId,\n\n pub keep_alive_seconds: u16,\n\n pub clean_session: bool,\n\n\n\n pub will_message: Option<Message>,\n\n\n\n pub user_name: Option<String>,\n\n pub password: Option<String>,\n\n}\n\n\n\nimpl ConnectPacket {\n\n pub fn new(\n\n version: ProtocolVersion,\n", "file_path": "src/mqtt/packets/connect.rs", "rank": 49, "score": 2.7464681902971733 }, { "content": " }\n\n\n\n pub async fn finish_packet(&mut self) -> Result<(), Error> {\n\n self.tcp_stream.write_buf(&mut self.write_buffer).await?;\n\n // self.tcp_stream.flush().await?;\n\n Ok(())\n\n }\n\n\n\n async fn write_buffer_if_too_small(&mut self, size: usize) -> Result<(), Error> {\n\n if self.write_buffer.len() + size >= self.write_buffer.capacity() {\n\n self.tcp_stream.write_buf(&mut self.write_buffer).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl MqttBytesReadStream {\n\n pub fn new(buffer_size: usize, read_stream: OwnedReadHalf) -> Self {\n\n MqttBytesReadStream {\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 50, "score": 2.631020846668261 }, { "content": " Ok(())\n\n }\n\n\n\n async fn write_buffer_if_too_small(&mut self, size: usize) -> Result<(), Error> {\n\n if self.write_buffer.len() + size >= self.write_buffer.capacity() {\n\n self.write_stream.write_buf(&mut self.write_buffer).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 51, "score": 2.607993115325768 }, { "content": " packet: UnSubAckPacket,\n\n) -> Result<(), Error> {\n\n // fixed header\n\n write_packet_with_packet_id(buffer, PACKET_TYPE_UNSUB_ACK << 4, packet.packet_id).await?;\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn write_ping_resp(buffer: &mut MqttBytesWriteStream) -> Result<(), Error> {\n\n // fixed header\n\n buffer.put_u8(PACKET_TYPE_PING_RESP << 4).await?;\n\n\n\n const REMAINING_LENGTH: u64 = 0;\n\n encode_remaining_length(REMAINING_LENGTH, buffer).await?;\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn write_packet_with_packet_id(\n\n buffer: &mut MqttBytesWriteStream,\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 52, "score": 2.6015619235411465 }, { "content": " }\n\n}\n\n\n\nimpl IdentityProvider for FileIdentityManager {\n\n fn authenticate(&self, username: &str, password: &str) -> Result<(), AuthenticationError> {\n\n // todo: encryption\n\n\n\n self.passwords_by_username\n\n .get(username)\n\n .ok_or(UserNotFound)?\n\n .eq(password)\n\n .then(|| ())\n\n .ok_or(InvalidPassword)\n\n }\n\n}\n", "file_path": "src/broker/authentication.rs", "rank": 53, "score": 2.5507320162134097 }, { "content": "async fn decode_pub_comp(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n _remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n let packet_id = decode_packet_with_packet_id(buffer, first_byte, 0b01110000).await?;\n\n Ok(ControlPacket::PubComp(PubCompPacket::new(packet_id)))\n\n}\n\n\n\nasync fn decode_subscribe(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n mut remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n let packet_id = decode_packet_with_packet_id(buffer, first_byte, 0b10000010).await?;\n\n remaining_length -= 2;\n\n\n\n // payload\n\n\n\n if remaining_length == 0 {\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 54, "score": 2.4506457437345954 }, { "content": " let mut first_byte = PACKET_TYPE_PUB_REL << 4;\n\n first_byte |= 0b10;\n\n write_packet_with_packet_id(buffer, first_byte, packet.packet_id).await?;\n\n Ok(())\n\n}\n\n\n\nasync fn write_pub_comp(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: PubCompPacket,\n\n) -> Result<(), Error> {\n\n write_packet_with_packet_id(buffer, PACKET_TYPE_PUB_COMP << 4, packet.packet_id).await?;\n\n Ok(())\n\n}\n\n\n\nasync fn write_sub_ack(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: SubAckPacket,\n\n) -> Result<(), Error> {\n\n // fixed header\n\n buffer.put_u8(PACKET_TYPE_SUB_ACK << 4).await?;\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 55, "score": 2.4506457437345954 }, { "content": " pub fn new(config_filename: &str) -> Result<Self, ConfigError> {\n\n let mut config = Config::new();\n\n\n\n config.merge(File::with_name(config_filename).format(FileFormat::Toml))?;\n\n config.merge(Environment::with_prefix(\"ratelmq\").separator(\"__\"))?;\n\n\n\n config.try_into()\n\n }\n\n}\n\n\n\n// todo: tests for envs precedence\n", "file_path": "src/settings/mod.rs", "rank": 56, "score": 2.39310309527849 }, { "content": "}\n\n\n\nasync fn decode_pub_rec(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n _remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n let packet_id = decode_packet_with_packet_id(buffer, first_byte, 0b01010000).await?;\n\n Ok(ControlPacket::PubRec(PubRecPacket::new(packet_id)))\n\n}\n\n\n\nasync fn decode_pub_rel(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n _remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n let packet_id = decode_packet_with_packet_id(buffer, first_byte, 0b01100010).await?;\n\n Ok(ControlPacket::PubRel(PubRelPacket::new(packet_id)))\n\n}\n\n\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 57, "score": 2.375481405853452 }, { "content": " first_byte: u8,\n\n packet_id: u16,\n\n) -> Result<(), Error> {\n\n // fixed header\n\n buffer.put_u8(first_byte).await?;\n\n\n\n const REMAINING_LENGTH: u64 = 2;\n\n encode_remaining_length(REMAINING_LENGTH, buffer).await?;\n\n\n\n // variable header\n\n buffer.put_u16(packet_id).await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 58, "score": 2.3687063971915863 }, { "content": "use crate::mqtt::subscription::Subscription;\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct SubscribePacket {\n\n pub packet_id: u16,\n\n\n\n pub subscriptions: Vec<Subscription>,\n\n}\n\n\n\nimpl SubscribePacket {\n\n pub fn new(packet_id: u16, subscriptions: Vec<Subscription>) -> Self {\n\n SubscribePacket {\n\n packet_id,\n\n subscriptions,\n\n }\n\n }\n\n}\n", "file_path": "src/mqtt/packets/subscribe.rs", "rank": 59, "score": 2.3354107111220164 }, { "content": " let mut remaining_length = size;\n\n\n\n while remaining_length > 0 {\n\n // todo: use clamp when stabilized\n\n let wait_for_bytes = {\n\n if remaining_length > self.read_buffer.capacity() {\n\n self.read_buffer.capacity()\n\n } else {\n\n remaining_length\n\n }\n\n };\n\n\n\n self.wait_for_data(wait_for_bytes).await?;\n\n bytes.put(self.read_buffer.split_to(wait_for_bytes));\n\n remaining_length -= wait_for_bytes;\n\n }\n\n\n\n Ok(bytes)\n\n }\n\n\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 60, "score": 2.312756894608573 }, { "content": " async fn on_connection_lost(&mut self, client_id: ClientId) {\n\n info!(\"Client {:?} disconnected unexpectedly\", &client_id);\n\n self.messaging.connection_lost(&client_id);\n\n\n\n debug!(\n\n \"Active sessions count: {:?}\",\n\n self.messaging.session_count()\n\n );\n\n }\n\n\n\n async fn on_publish(\n\n &self,\n\n _sender: Sender<ServerEvent>,\n\n publish: PublishPacket,\n\n client_id: ClientId,\n\n ) {\n\n debug!(\n\n \"Client {:?} published message on topic {:?}\",\n\n &client_id, &publish.message.topic\n\n );\n", "file_path": "src/broker/manager.rs", "rank": 61, "score": 2.194794513331142 }, { "content": " ProtocolVersion::Mqtt3,\n\n client_id,\n\n keep_alive_seconds,\n\n clean_session,\n\n None,\n\n user_name,\n\n password,\n\n );\n\n\n\n Ok(ControlPacket::Connect(connect_packet))\n\n}\n\n\n\nasync fn decode_publish(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n mut remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n bitflags! {\n\n struct FixedHeaderFlags: u8 {\n\n const RETAIN = 0b00000001;\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 62, "score": 2.110516289953459 }, { "content": " }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl MqttBytesWriteStream {\n\n pub fn new(buffer_size: usize, write_stream: OwnedWriteHalf) -> Self {\n\n MqttBytesWriteStream {\n\n write_buffer: BytesMut::with_capacity(buffer_size),\n\n write_stream,\n\n }\n\n }\n\n\n\n pub async fn put_u8(&mut self, n: u8) -> Result<(), Error> {\n\n self.write_buffer_if_too_small(1).await?;\n\n\n\n self.write_buffer.put_u8(n);\n\n Ok(())\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 63, "score": 2.110516289953459 }, { "content": "\n\n pub async fn put_bytes(&mut self, mut bytes: BytesMut) -> Result<(), Error> {\n\n let mut remaining_size = bytes.len();\n\n\n\n while remaining_size > 0 {\n\n let bytes_to_write = {\n\n if remaining_size > self.write_buffer.capacity() {\n\n self.write_buffer.capacity()\n\n } else {\n\n remaining_size\n\n }\n\n };\n\n self.write_buffer_if_too_small(bytes_to_write).await?;\n\n\n\n self.write_buffer.put(bytes.split_to(bytes_to_write));\n\n\n\n remaining_size -= bytes_to_write;\n\n }\n\n\n\n Ok(())\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 64, "score": 2.091518250836893 }, { "content": " let bytes_to_write = {\n\n if remaining_size > self.write_buffer.capacity() {\n\n self.write_buffer.capacity()\n\n } else {\n\n remaining_size\n\n }\n\n };\n\n self.write_buffer_if_too_small(bytes_to_write).await?;\n\n\n\n self.write_buffer.put(bytes.split_to(bytes_to_write));\n\n\n\n remaining_size -= bytes_to_write;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub async fn finish_packet(&mut self) -> Result<(), Error> {\n\n self.write_stream.write_buf(&mut self.write_buffer).await?;\n\n // self.tcp_stream.flush().await?;\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 65, "score": 2.0728591861853936 }, { "content": "\n\n pub fn unsubscribe(&mut self, client_id: &ClientId, topics: &Vec<String>) {\n\n self.subscriptions.unsubscribe(client_id, topics);\n\n }\n\n\n\n pub async fn publish(&self, message: &Message, publish: &PublishPacket) {\n\n if let Some(client_ids) = self.subscriptions.subscribed_clients(&message.topic) {\n\n for c in &client_ids {\n\n match self.sessions.get(c) {\n\n Some(session) => {\n\n let event = ServerEvent::ControlPacket(Publish(publish.clone()));\n\n session.sender().send(event).await.unwrap();\n\n }\n\n None => {\n\n warn!(\n\n \"Tried to send message, but session for client {:?} not found\",\n\n c\n\n );\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/broker/messaging/messaging_service.rs", "rank": 66, "score": 2.0466638067941787 }, { "content": "use crate::mqtt::packets::suback::SubAckReturnCode::Failure;\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum SubAckReturnCode {\n\n SuccessQoS0 = 0x00,\n\n SuccessQoS1 = 0x01,\n\n SuccessQoS2 = 0x02,\n\n Failure = 0x80,\n\n}\n\n\n\nimpl Default for SubAckReturnCode {\n\n fn default() -> Self {\n\n Failure\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct SubAckPacket {\n\n pub packet_id: u16,\n\n\n", "file_path": "src/mqtt/packets/suback.rs", "rank": 67, "score": 1.985365342663851 }, { "content": " const REMAINING_LENGTH: u64 = 2;\n\n encode_remaining_length(REMAINING_LENGTH, buffer).await?;\n\n\n\n // variable header\n\n buffer.put_u8(packet.session_present as u8).await?;\n\n buffer.put_u8(packet.return_code.clone() as u8).await?;\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn write_publish(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: PublishPacket,\n\n) -> Result<(), Error> {\n\n bitflags! {\n\n struct FixedHeaderFlags: u8 {\n\n const RETAIN = 0b00000001;\n\n const QOS_1 = 0b00000010;\n\n const QOS_2 = 0b00000100;\n\n const DUP = 0b00001000;\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 68, "score": 1.9510197405011103 }, { "content": "}\n\n\n\nimpl MqttBytesStream {\n\n pub fn new(\n\n read_buffer_size: usize,\n\n write_buffer_size: usize,\n\n tcp_stream: TcpStream,\n\n ) -> MqttBytesStream {\n\n MqttBytesStream {\n\n read_buffer: BytesMut::with_capacity(read_buffer_size),\n\n write_buffer: BytesMut::with_capacity(write_buffer_size),\n\n tcp_stream,\n\n }\n\n }\n\n}\n\n\n\nimpl MqttBytesStream {\n\n pub async fn get_u8(&mut self) -> Result<u8, Error> {\n\n self.wait_for_data(1).await?;\n\n\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 69, "score": 1.934773599186426 }, { "content": "\n\nimpl FileIdentityManager {\n\n pub fn new(filename: &str) -> Result<FileIdentityManager, FileIdentityManagerError> {\n\n let credentials = std::fs::read_to_string(filename)?;\n\n\n\n let mut passwords_by_username = HashMap::with_capacity(credentials.lines().count());\n\n\n\n for line in credentials.lines() {\n\n let credential = line.split_once(':').ok_or(InvalidEntry)?;\n\n let username = credential.0;\n\n let password = credential.1;\n\n\n\n passwords_by_username.insert(username.to_owned(), password.to_owned());\n\n }\n\n\n\n let manager = FileIdentityManager {\n\n passwords_by_username,\n\n };\n\n\n\n Ok(manager)\n", "file_path": "src/broker/authentication.rs", "rank": 70, "score": 1.903079709824774 }, { "content": " remaining_length\n\n }\n\n };\n\n\n\n self.wait_for_data(wait_for_bytes).await?;\n\n bytes.put(self.read_buffer.split_to(wait_for_bytes));\n\n remaining_length -= wait_for_bytes;\n\n }\n\n\n\n Ok(bytes)\n\n }\n\n\n\n async fn wait_for_data(&mut self, bytes: usize) -> Result<(), Error> {\n\n while self.read_buffer.len() < bytes {\n\n let read_bytes_num = self.tcp_stream.read_buf(&mut self.read_buffer).await?;\n\n\n\n // todo: Handle better?\n\n let connection_closed = read_bytes_num == 0; // end of file\n\n if connection_closed {\n\n let clean_shutdown = self.read_buffer.is_empty();\n", "file_path": "src/mqtt/transport/mqtt_bytes_stream.rs", "rank": 71, "score": 1.903079709824774 }, { "content": " PACKET_TYPE_SUBSCRIBE => {\n\n decode_subscribe(mqtt_stream, first_byte, remaining_length).await?\n\n }\n\n PACKET_TYPE_UNSUBSCRIBE => {\n\n decode_unsubscribe(mqtt_stream, first_byte, remaining_length).await?\n\n }\n\n PACKET_TYPE_PING_REQ => ControlPacket::PingReq,\n\n PACKET_TYPE_DISCONNECT => ControlPacket::Disconnect(DisconnectPacket::default()),\n\n _ => unimplemented!(),\n\n };\n\n\n\n Ok(packet)\n\n}\n\n\n\nasync fn decode_connect(\n\n buffer: &mut MqttBytesReadStream,\n\n _first_byte: u8,\n\n _remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n bitflags! {\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 72, "score": 1.828209118312012 }, { "content": " }\n\n\n\n let mut topics = Vec::new();\n\n while remaining_length > 0 {\n\n let topic = buffer.get_string().await?;\n\n\n\n remaining_length -= topic.len() as u64 + 2u64/* 2 topic length */;\n\n\n\n topics.push(topic);\n\n }\n\n\n\n let packet = UnsubscribePacket::new(packet_id, topics);\n\n Ok(ControlPacket::Unsubscribe(packet))\n\n}\n\n\n\nasync fn decode_packet_with_packet_id(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n expected_first_byte: u8,\n\n) -> Result<u16, Error> {\n\n validate_first_byte(first_byte, expected_first_byte)?;\n\n\n\n let packet_id = buffer.get_u16().await?;\n\n Ok(packet_id)\n\n}\n\n\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 73, "score": 1.719943995305314 }, { "content": " remaining_length -= 2;\n\n Some(buffer.get_u16().await?)\n\n } else {\n\n None\n\n };\n\n\n\n // payload\n\n let body = buffer.get_bytes(remaining_length as usize).await?;\n\n\n\n let packet = PublishPacket::new(topic, body, qos, retain, packet_id, dup);\n\n Ok(ControlPacket::Publish(packet))\n\n}\n\n\n\nasync fn decode_pub_ack(\n\n buffer: &mut MqttBytesReadStream,\n\n first_byte: u8,\n\n _remaining_length: u64,\n\n) -> Result<ControlPacket, Error> {\n\n let packet_id = decode_packet_with_packet_id(buffer, first_byte, 0b01000000).await?;\n\n Ok(ControlPacket::PubAck(PubAckPacket::new(packet_id)))\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 75, "score": 1.694852040182326 }, { "content": " }\n\n\n\n buffer.put_u8(encoded_byte).await?;\n\n\n\n if remaining_length == 0 {\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\npub async fn write_packet(\n\n mqtt_stream: &mut MqttBytesWriteStream,\n\n packet: ControlPacket,\n\n) -> Result<(), Error> {\n\n match packet {\n\n ControlPacket::ConnAck(conn_ack) => write_conn_ack(mqtt_stream, conn_ack).await?,\n\n ControlPacket::Publish(publish) => write_publish(mqtt_stream, publish).await?,\n\n ControlPacket::PubAck(pub_ack) => write_pub_ack(mqtt_stream, pub_ack).await?,\n\n ControlPacket::PubRec(pub_rec) => write_pub_rec(mqtt_stream, pub_rec).await?,\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 76, "score": 1.6237847259263658 }, { "content": " ControlPacket::PubRel(pub_rel) => write_pub_rel(mqtt_stream, pub_rel).await?,\n\n ControlPacket::PubComp(pub_comp) => write_pub_comp(mqtt_stream, pub_comp).await?,\n\n ControlPacket::SubAck(sub_ack) => write_sub_ack(mqtt_stream, sub_ack).await?,\n\n ControlPacket::UnsubAck(unsub_ack) => write_unsub_ack(mqtt_stream, unsub_ack).await?,\n\n ControlPacket::PingResp => write_ping_resp(mqtt_stream).await?,\n\n _ => unimplemented!(),\n\n };\n\n\n\n mqtt_stream.finish_packet().await?;\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn write_conn_ack(\n\n buffer: &mut MqttBytesWriteStream,\n\n packet: ConnAckPacket,\n\n) -> Result<(), Error> {\n\n // fixed header\n\n buffer.put_u8(PACKET_TYPE_CONN_ACK << 4).await?;\n\n\n", "file_path": "src/mqtt/transport/packet_encoder.rs", "rank": 77, "score": 1.4334480728616255 }, { "content": " if continuation_bit == 0 {\n\n break;\n\n }\n\n }\n\n\n\n Ok(remaining_length)\n\n}\n\n\n\npub async fn read_packet(mqtt_stream: &mut MqttBytesReadStream) -> Result<ControlPacket, Error> {\n\n let first_byte = mqtt_stream.get_u8().await?;\n\n let packet_type = first_byte >> 4;\n\n let remaining_length = decode_remaining_length(mqtt_stream).await?;\n\n\n\n let packet = match packet_type {\n\n PACKET_TYPE_CONNECT => decode_connect(mqtt_stream, first_byte, remaining_length).await?,\n\n PACKET_TYPE_PUBLISH => decode_publish(mqtt_stream, first_byte, remaining_length).await?,\n\n PACKET_TYPE_PUB_ACK => decode_pub_ack(mqtt_stream, first_byte, remaining_length).await?,\n\n PACKET_TYPE_PUB_REC => decode_pub_rec(mqtt_stream, first_byte, remaining_length).await?,\n\n PACKET_TYPE_PUB_REL => decode_pub_rel(mqtt_stream, first_byte, remaining_length).await?,\n\n PACKET_TYPE_PUB_COMP => decode_pub_comp(mqtt_stream, first_byte, remaining_length).await?,\n", "file_path": "src/mqtt/transport/packet_decoder.rs", "rank": 78, "score": 1.2419041175934593 }, { "content": "# RatelMQ\n\n\n\n![CI](https://github.com/ratelmq/ratelmq/workflows/CI/badge.svg)\n\n![DockerHub](https://img.shields.io/docker/pulls/ratelmq/ratelmq)\n\n![GitHub](https://img.shields.io/github/license/ratelmq/ratelmq)\n\n\n\n**RatelMQ is an efficient, reliable & scalable MQTT broker.**\n\n\n\n---\n\n\n\n## Installation\n\n\n\n### Docker image\n\n\n\nRatelMQ images are available on [Docker Hub](https://hub.docker.com/r/ratelmq/ratelmq).\n\n\n\nStart container: `docker run --name ratelmq -p 1883:1883 ratelmq/ratelmq:main`\n\n\n\n### Precompiled binaries\n\n\n\nTBD\n\n\n\n### Building from sources\n\n\n\nTBD\n\n\n\n## Configuration\n\n\n\nThe main configuration file is located in the `/etc/ratelmq/ratelmq.conf`.\n\nRatelMQ supports configuration via a [TOML](https://github.com/toml-lang/toml) file and environment variables.\n\n\n\nEnvironment variables take precedence and overwrites those from the configuration file.\n\nWhen using environment variables:\n\n\n\n* add `RATELMQ_` prefix\n\n* separate nested items with `__`, e.g. `RATELMQ_MQTT__SOMETHING`\n\n\n\nFor the default values with description please see [config/ratelmq.toml](config/ratelmq.toml).\n\n\n\n## Changelog\n\n\n\nSee [CHANGELOG.md](CHANGELOG.md) for details.\n\n\n\n## Roadmap\n\n\n\nSee [ROADMAP.md](ROADMAP.md) for details.\n\n\n\n## Versioning\n\n\n\nTBD\n\n\n\n## Resources\n\n\n\n* MQTT 3.1.1 spec: <https://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html>\n\n\n\n## Contributing\n\n\n\nTBD\n\n\n\n## License\n\n\n\nThe project is licensed under the Apache License 2.0, see [LICENSE](LICENSE).\n", "file_path": "README.md", "rank": 79, "score": 1.0453054396394528 } ]
Rust
src/utils.rs
KaiWitt/kalayo
5498ed501cb7799cacb709403ba8a6141dc3e2e4
use bitcoin::util::address::Address; use bitcoin::{AddressType, Network}; use std::str::FromStr; use warp::hyper::StatusCode; use warp::Rejection; use crate::error::ErrorMsg; pub fn validate_address(address: Address) -> Result<Address, Rejection> { if address.network != Network::Bitcoin { return Err(ErrorMsg::reject_new( StatusCode::BAD_REQUEST, format!("Address [{}] is not a Bitcoin mainnet address.", address), )); } if let Some(addr_type) = address.address_type() { if addr_type != AddressType::P2wpkh { let msg = format!("[{}] is not a P2WPKH Bitcoin mainnet address.", &address); return Err(ErrorMsg::reject_new(StatusCode::BAD_REQUEST, msg)); } return Ok(address); } log::warn!("Adress [{}] has no address type", address); Err(ErrorMsg::reject_new( StatusCode::BAD_REQUEST, format!("Address [{}] has no address type.", address), )) } pub fn validate_address_str(address: &str) -> Result<Address, Rejection> { match Address::from_str(address) { Ok(address) => validate_address(address), Err(_) => { let msg = format!("[{}] is not a valid Bitcoin address.", &address); Err(ErrorMsg::reject_new(StatusCode::BAD_REQUEST, msg)) } } } #[cfg(test)] mod test { use std::str::FromStr; use bitcoin::Address; use crate::utils::{validate_address, validate_address_str}; #[test] fn validate_address_returns_address_when_address_is_mainnet_p2wpkh() { let p2wpkh = Address::from_str("bc1qx46k3hyr7pu83vj3srapz0hdht3gzy8j3yplmp").unwrap(); let p2wpkh_validated = validate_address(p2wpkh.clone()).unwrap(); assert_eq!(p2wpkh_validated, p2wpkh); } #[test] fn validate_address_returns_err_when_address_is_not_p2wpkh() { let p2pkh = Address::from_str("1P5ZEDWTKTFGxQjZphgWPQUpe554WKDfHQ").unwrap(); let p2sh = Address::from_str("34xp4vRoCGJym3xR7yCVPFHoCNxv4Twseo").unwrap(); let p2wsh = Address::from_str("bc1q8500kqh90vgkgsdnsw6a84mrzmgwwnhtxuu5wy347wuxk0g08e4smc3tl9") .unwrap(); let p2pkh_validated = validate_address(p2pkh.clone()); let p2sh_validated = validate_address(p2sh.clone()); let p2wsh_validated = validate_address(p2wsh.clone()); assert!(p2pkh_validated.is_err()); assert!(p2sh_validated.is_err()); assert!(p2wsh_validated.is_err()); } #[test] fn validate_address_returns_err_when_address_is_not_mainnet() { let testnet_addr = Address::from_str("tb1qt25c7aencjadsva2tkzmkc9juqslnn2e0kug4h").unwrap(); let validated = validate_address(testnet_addr.clone()); assert!(validated.is_err()); } #[test] fn validate_address_str_returns_address_when_address_is_mainnet_p2wpkh() { let p2wpkh = Address::from_str("bc1qx46k3hyr7pu83vj3srapz0hdht3gzy8j3yplmp").unwrap(); let p2wpkh_validated = validate_address_str("bc1qx46k3hyr7pu83vj3srapz0hdht3gzy8j3yplmp").unwrap(); assert_eq!(p2wpkh_validated, p2wpkh); } #[test] fn validate_address_str_returns_err_when_address_is_not_p2wpkh() { let p2pkh = validate_address_str("1P5ZEDWTKTFGxQjZphgWPQUpe554WKDfHQ"); let p2sh = validate_address_str("34xp4vRoCGJym3xR7yCVPFHoCNxv4Twseo"); assert!(p2pkh.is_err()); assert!(p2sh.is_err()); } #[test] fn validate_address_str_returns_err_when_address_is_not_mainnet() { let validated = validate_address_str("tb1qt25c7aencjadsva2tkzmkc9juqslnn2e0kug4h"); assert!(validated.is_err()); } #[test] fn validate_address_str_returns_err_when_input_is_not_an_address() { let validated = validate_address_str("notanaddress"); assert!(validated.is_err()); } #[test] fn validate_address_str_returns_err_when_input_is_empty() { let validated = validate_address_str(""); assert!(validated.is_err()); } }
use bitcoin::util::address::Address; use bitcoin::{AddressType, Network}; use std::str::FromStr; use warp::hyper::StatusCode; use warp::Rejection; use crate::error::ErrorMsg; pub fn validate_address(address: Address) -> Result<Address, Rejection> { if address.network != Network::Bitcoin { return Err(ErrorMsg::reject_new( StatusCode::BAD_REQUEST, format!("Address [{}] is not a Bitcoin mainnet address.", address), )); }
pub fn validate_address_str(address: &str) -> Result<Address, Rejection> { match Address::from_str(address) { Ok(address) => validate_address(address), Err(_) => { let msg = format!("[{}] is not a valid Bitcoin address.", &address); Err(ErrorMsg::reject_new(StatusCode::BAD_REQUEST, msg)) } } } #[cfg(test)] mod test { use std::str::FromStr; use bitcoin::Address; use crate::utils::{validate_address, validate_address_str}; #[test] fn validate_address_returns_address_when_address_is_mainnet_p2wpkh() { let p2wpkh = Address::from_str("bc1qx46k3hyr7pu83vj3srapz0hdht3gzy8j3yplmp").unwrap(); let p2wpkh_validated = validate_address(p2wpkh.clone()).unwrap(); assert_eq!(p2wpkh_validated, p2wpkh); } #[test] fn validate_address_returns_err_when_address_is_not_p2wpkh() { let p2pkh = Address::from_str("1P5ZEDWTKTFGxQjZphgWPQUpe554WKDfHQ").unwrap(); let p2sh = Address::from_str("34xp4vRoCGJym3xR7yCVPFHoCNxv4Twseo").unwrap(); let p2wsh = Address::from_str("bc1q8500kqh90vgkgsdnsw6a84mrzmgwwnhtxuu5wy347wuxk0g08e4smc3tl9") .unwrap(); let p2pkh_validated = validate_address(p2pkh.clone()); let p2sh_validated = validate_address(p2sh.clone()); let p2wsh_validated = validate_address(p2wsh.clone()); assert!(p2pkh_validated.is_err()); assert!(p2sh_validated.is_err()); assert!(p2wsh_validated.is_err()); } #[test] fn validate_address_returns_err_when_address_is_not_mainnet() { let testnet_addr = Address::from_str("tb1qt25c7aencjadsva2tkzmkc9juqslnn2e0kug4h").unwrap(); let validated = validate_address(testnet_addr.clone()); assert!(validated.is_err()); } #[test] fn validate_address_str_returns_address_when_address_is_mainnet_p2wpkh() { let p2wpkh = Address::from_str("bc1qx46k3hyr7pu83vj3srapz0hdht3gzy8j3yplmp").unwrap(); let p2wpkh_validated = validate_address_str("bc1qx46k3hyr7pu83vj3srapz0hdht3gzy8j3yplmp").unwrap(); assert_eq!(p2wpkh_validated, p2wpkh); } #[test] fn validate_address_str_returns_err_when_address_is_not_p2wpkh() { let p2pkh = validate_address_str("1P5ZEDWTKTFGxQjZphgWPQUpe554WKDfHQ"); let p2sh = validate_address_str("34xp4vRoCGJym3xR7yCVPFHoCNxv4Twseo"); assert!(p2pkh.is_err()); assert!(p2sh.is_err()); } #[test] fn validate_address_str_returns_err_when_address_is_not_mainnet() { let validated = validate_address_str("tb1qt25c7aencjadsva2tkzmkc9juqslnn2e0kug4h"); assert!(validated.is_err()); } #[test] fn validate_address_str_returns_err_when_input_is_not_an_address() { let validated = validate_address_str("notanaddress"); assert!(validated.is_err()); } #[test] fn validate_address_str_returns_err_when_input_is_empty() { let validated = validate_address_str(""); assert!(validated.is_err()); } }
if let Some(addr_type) = address.address_type() { if addr_type != AddressType::P2wpkh { let msg = format!("[{}] is not a P2WPKH Bitcoin mainnet address.", &address); return Err(ErrorMsg::reject_new(StatusCode::BAD_REQUEST, msg)); } return Ok(address); } log::warn!("Adress [{}] has no address type", address); Err(ErrorMsg::reject_new( StatusCode::BAD_REQUEST, format!("Address [{}] has no address type.", address), )) }
function_block-function_prefix_line
[]
Rust
vendor/rust-cdk/src/ic_cdk_macros/src/import.rs
dfinity/bigmap-poc
177e3be92316064d62e6e31538f01dded65b9ccb
use crate::error::Errors; use quote::quote; use serde::Deserialize; use serde_tokenstream::from_tokenstream; use std::path::PathBuf; use std::str::FromStr; #[derive(Default, Deserialize)] struct ImportAttributes { pub canister: Option<String>, pub canister_id: Option<String>, pub candid_path: Option<PathBuf>, } fn get_env_id_and_candid(canister_name: &str) -> Result<(String, PathBuf), Errors> { let canister_id_var_name = format!("CANISTER_ID_{}", canister_name); let candid_path_var_name = format!("CANISTER_CANDID_{}", canister_name); Ok(( std::env::var(canister_id_var_name).map_err(|_| { Errors::message(&format!( "Could not find DFX bindings for canister named '{}'. Did you build using DFX?", canister_name )) })?, std::env::var_os(candid_path_var_name) .ok_or_else(|| Errors::message("Could not find DFX bindings.")) .map(|p| PathBuf::from(p))?, )) } struct RustLanguageBinding { visibility: String, canister_id: String, } impl candid::codegen::rust::RustBindings for RustLanguageBinding { fn record( &self, id: &str, fields: &[(String, String)], ) -> Result<String, candid::error::Error> { let all_fields = fields .iter() .map(|(name, ty)| format!("pub {} : {}", name, ty)) .collect::<Vec<String>>() .join(" , "); Ok(format!( r#" #[derive(Clone, Debug, Default, CandidType, serde::Deserialize)] pub struct {} {{ {} }} "#, id, all_fields )) } fn actor(&self, name: &str, all_functions: &[String]) -> Result<String, candid::error::Error> { let mut all_functions_str = String::new(); for f in all_functions { all_functions_str += f; } Ok(format!( r#"{vis} struct {name} {{ }} impl {name} {{ {functions} }}"#, vis = self.visibility, name = name, functions = all_functions_str )) } fn actor_function_body( &self, name: &str, arguments: &[(String, String)], return_type: &str, _is_query: bool, ) -> Result<String, candid::error::Error> { let canister_id = &self.canister_id; let arguments = if arguments.is_empty() { "Option::<()>::None".to_string() } else if arguments.len() == 1 { format!("Some({})", arguments[0].0) } else { format!( "Some(({}))", arguments .iter() .map(|(name, _)| name.clone()) .collect::<Vec<String>>() .join(",") ) }; let call = if return_type.is_empty() { "ic_cdk::call_no_return" } else { "ic_cdk::call" }; Ok(format!( r#" {{ {call}( ic_cdk::CanisterId::from_str("{canister_id}").unwrap(), "{name}", {arguments} ) .await .unwrap() }} "#, call = call, canister_id = canister_id, name = name.escape_debug(), arguments = arguments, )) } fn actor_function( &self, name: &str, arguments: &[(String, String)], returns: &[String], is_query: bool, ) -> Result<String, candid::error::Error> { let id = candid::codegen::rust::candid_id_to_rust(name); let return_type = if returns.is_empty() { "".to_string() } else if returns.len() == 1 { returns[0].clone() } else { format!("( {} )", returns.join(" , ")) }; let arguments_list = arguments .iter() .map(|(name, ty)| format!("{} : {}", name, ty)) .collect::<Vec<String>>() .join(" , "); let body = self.actor_function_body(name, arguments, &return_type, is_query)?; Ok(format!( "async fn {id}( {arguments} ) {return_type} {body}", id = id, arguments = arguments_list, body = body, return_type = if return_type == "" { format!("") } else { format!(" -> {}", return_type) } )) } } pub(crate) fn ic_import( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> Result<proc_macro::TokenStream, Errors> { let config = match from_tokenstream::<ImportAttributes>(&proc_macro2::TokenStream::from(attr)) { Ok(c) => c, Err(err) => return Err(Errors::message(format!("{}", err.to_compile_error()))), }; let (canister_id, candid_path) = { if let Some(canister_name) = config.canister { get_env_id_and_candid(&canister_name)? } else if let Some(canister_id) = config.canister_id { if let Some(candid_path) = config.candid_path { (canister_id, candid_path) } else { return Err(Errors::message("Must specify both candid and canister_id.")); } } else { return Err(Errors::message("Must specify both candid and canister_id.")); } }; let item = syn::parse2::<syn::Item>(proc_macro2::TokenStream::from(item)).map_err(Errors::from)?; let item = match item { syn::Item::Struct(item) => item, _ => return Err(Errors::message("import must be used on a struct.")), }; let visibility = { let vis = item.vis; format!("{}", quote! { #vis }) }; let struct_name = item.ident.to_string(); let candid_str = std::fs::read_to_string(&candid_path).unwrap(); let prog = candid::IDLProg::from_str(&candid_str) .map_err(|e| Errors::message(format!("Could not parse the candid file: {}", e)))?; let bindings = Box::new(RustLanguageBinding { visibility, canister_id: canister_id.clone(), }); let config = candid::codegen::rust::Config::default() .with_actor_name(struct_name) .with_biguint_type("candid::Nat".to_string()) .with_bigint_type("candid::Int".to_string()) .with_bindings(bindings); let rust_str = candid::codegen::idl_to_rust(&prog, &config).map_err(|e| Errors::message(e.to_string()))?; let rust_str = format!("{} {}", "type principal = Vec<u8>;", rust_str); Ok(proc_macro::TokenStream::from_str(&rust_str).unwrap()) }
use crate::error::Errors; use quote::quote; use serde::Deserialize; use serde_tokenstream::from_tokenstream; use std::path::PathBuf; use std::str::FromStr; #[derive(Default, Deserialize)] struct ImportAttributes { pub canister: Option<String>, pub canister_id: Option<String>, pub candid_path: Option<PathBuf>, }
struct RustLanguageBinding { visibility: String, canister_id: String, } impl candid::codegen::rust::RustBindings for RustLanguageBinding { fn record( &self, id: &str, fields: &[(String, String)], ) -> Result<String, candid::error::Error> { let all_fields = fields .iter() .map(|(name, ty)| format!("pub {} : {}", name, ty)) .collect::<Vec<String>>() .join(" , "); Ok(format!( r#" #[derive(Clone, Debug, Default, CandidType, serde::Deserialize)] pub struct {} {{ {} }} "#, id, all_fields )) } fn actor(&self, name: &str, all_functions: &[String]) -> Result<String, candid::error::Error> { let mut all_functions_str = String::new(); for f in all_functions { all_functions_str += f; } Ok(format!( r#"{vis} struct {name} {{ }} impl {name} {{ {functions} }}"#, vis = self.visibility, name = name, functions = all_functions_str )) } fn actor_function_body( &self, name: &str, arguments: &[(String, String)], return_type: &str, _is_query: bool, ) -> Result<String, candid::error::Error> { let canister_id = &self.canister_id; let arguments = if arguments.is_empty() { "Option::<()>::None".to_string() } else if arguments.len() == 1 { format!("Some({})", arguments[0].0) } else { format!( "Some(({}))", arguments .iter() .map(|(name, _)| name.clone()) .collect::<Vec<String>>() .join(",") ) }; let call = if return_type.is_empty() { "ic_cdk::call_no_return" } else { "ic_cdk::call" }; Ok(format!( r#" {{ {call}( ic_cdk::CanisterId::from_str("{canister_id}").unwrap(), "{name}", {arguments} ) .await .unwrap() }} "#, call = call, canister_id = canister_id, name = name.escape_debug(), arguments = arguments, )) } fn actor_function( &self, name: &str, arguments: &[(String, String)], returns: &[String], is_query: bool, ) -> Result<String, candid::error::Error> { let id = candid::codegen::rust::candid_id_to_rust(name); let return_type = if returns.is_empty() { "".to_string() } else if returns.len() == 1 { returns[0].clone() } else { format!("( {} )", returns.join(" , ")) }; let arguments_list = arguments .iter() .map(|(name, ty)| format!("{} : {}", name, ty)) .collect::<Vec<String>>() .join(" , "); let body = self.actor_function_body(name, arguments, &return_type, is_query)?; Ok(format!( "async fn {id}( {arguments} ) {return_type} {body}", id = id, arguments = arguments_list, body = body, return_type = if return_type == "" { format!("") } else { format!(" -> {}", return_type) } )) } } pub(crate) fn ic_import( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> Result<proc_macro::TokenStream, Errors> { let config = match from_tokenstream::<ImportAttributes>(&proc_macro2::TokenStream::from(attr)) { Ok(c) => c, Err(err) => return Err(Errors::message(format!("{}", err.to_compile_error()))), }; let (canister_id, candid_path) = { if let Some(canister_name) = config.canister { get_env_id_and_candid(&canister_name)? } else if let Some(canister_id) = config.canister_id { if let Some(candid_path) = config.candid_path { (canister_id, candid_path) } else { return Err(Errors::message("Must specify both candid and canister_id.")); } } else { return Err(Errors::message("Must specify both candid and canister_id.")); } }; let item = syn::parse2::<syn::Item>(proc_macro2::TokenStream::from(item)).map_err(Errors::from)?; let item = match item { syn::Item::Struct(item) => item, _ => return Err(Errors::message("import must be used on a struct.")), }; let visibility = { let vis = item.vis; format!("{}", quote! { #vis }) }; let struct_name = item.ident.to_string(); let candid_str = std::fs::read_to_string(&candid_path).unwrap(); let prog = candid::IDLProg::from_str(&candid_str) .map_err(|e| Errors::message(format!("Could not parse the candid file: {}", e)))?; let bindings = Box::new(RustLanguageBinding { visibility, canister_id: canister_id.clone(), }); let config = candid::codegen::rust::Config::default() .with_actor_name(struct_name) .with_biguint_type("candid::Nat".to_string()) .with_bigint_type("candid::Int".to_string()) .with_bindings(bindings); let rust_str = candid::codegen::idl_to_rust(&prog, &config).map_err(|e| Errors::message(e.to_string()))?; let rust_str = format!("{} {}", "type principal = Vec<u8>;", rust_str); Ok(proc_macro::TokenStream::from_str(&rust_str).unwrap()) }
fn get_env_id_and_candid(canister_name: &str) -> Result<(String, PathBuf), Errors> { let canister_id_var_name = format!("CANISTER_ID_{}", canister_name); let candid_path_var_name = format!("CANISTER_CANDID_{}", canister_name); Ok(( std::env::var(canister_id_var_name).map_err(|_| { Errors::message(&format!( "Could not find DFX bindings for canister named '{}'. Did you build using DFX?", canister_name )) })?, std::env::var_os(candid_path_var_name) .ok_or_else(|| Errors::message("Could not find DFX bindings.")) .map(|p| PathBuf::from(p))?, )) }
function_block-full_function
[ { "content": "#[derive(CandidType, serde::Deserialize, Debug)]\n\nstruct CanisterIdRecord {\n\n canister_id: candid::Principal,\n\n}\n\n\n\npub async fn subnet_create_new_canister() -> Result<CanisterId, String> {\n\n let management_canister = ic_cdk::CanisterId::from(Vec::new());\n\n let new_can_id_record: CanisterIdRecord =\n\n match ic_cdk::call(management_canister, \"create_canister\", Some(())).await {\n\n Ok(res) => res,\n\n Err(err) => {\n\n ic_cdk::println!(\"Error invoking create_canister: {:?} {}\", err.0, err.1);\n\n return Err(err.1);\n\n }\n\n };\n\n\n\n let new_can_id = CanisterId::from(new_can_id_record.canister_id.as_slice());\n\n Ok(new_can_id)\n\n}\n\n\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "src/canister_management.rs", "rank": 0, "score": 99287.38771718356 }, { "content": "#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\nstruct CanisterPtr(u32);\n\npub type DetHashSet<K> = HashSet<K, BuildHasherDefault<WyHash>>;\n\npub type DetHashMap<K, V> = HashMap<K, V, BuildHasherDefault<WyHash>>;\n\n\n", "file_path": "src/index.rs", "rank": 1, "score": 88478.53263966288 }, { "content": "/// Returns the canister id as a blob.\n\npub fn id() -> CanisterId {\n\n let len: u32 = unsafe { ic0::canister_self_size() };\n\n let mut bytes = vec![0; len as usize];\n\n unsafe {\n\n ic0::canister_self_copy(bytes.as_mut_ptr() as u32, 0, len);\n\n }\n\n CanisterId(bytes)\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 2, "score": 80186.14815992407 }, { "content": "pub fn to_input<Input: DeserializeOwned>(inp: Vec<u8>) -> Input {\n\n let mut d = IDLDeserialize::new(&inp).unwrap();\n\n let v = d.get_value().unwrap();\n\n d.done().unwrap();\n\n v\n\n}\n\n\n", "file_path": "src/dfn_candid.rs", "rank": 3, "score": 63344.717073733926 }, { "content": "#[derive(Default)]\n\nstruct TermData {\n\n frequency: usize,\n\n inverted_index: RoaringBitmap,\n\n}\n\n\n\npub struct SearchIndexer {\n\n key_to_doc_id: DetHashMap<Key, DocumentId>,\n\n doc_id_to_key: DetHashMap<DocumentId, Key>,\n\n terms: DetHashMap<Term, TermData>,\n\n stemmer: Stemmer,\n\n}\n\n\n\nimpl Default for SearchIndexer {\n\n fn default() -> Self {\n\n Self {\n\n key_to_doc_id: DetHashMap::default(),\n\n doc_id_to_key: DetHashMap::default(),\n\n terms: DetHashMap::default(),\n\n stemmer: Stemmer::create(Algorithm::English),\n\n }\n", "file_path": "src/search.rs", "rank": 4, "score": 61560.892788827776 }, { "content": "/// Calls another canister and executes one of the callbacks.\n\npub fn call_with_callbacks(\n\n id: CanisterId,\n\n method: &str,\n\n data: &[u8],\n\n reply: impl FnOnce() + 'static,\n\n reject: impl FnOnce() + 'static,\n\n) -> i32 {\n\n type Closures = (Box<dyn FnOnce() + 'static>, Box<dyn FnOnce() + 'static>);\n\n fn on_reply(env: u32) {\n\n let closure = unsafe { Box::from_raw(env as *mut Closures) }.0;\n\n closure();\n\n }\n\n fn on_reject(env: u32) {\n\n let closure = unsafe { Box::from_raw(env as *mut Closures) }.1;\n\n closure();\n\n }\n\n let callee = id.0;\n\n let boxed_closures: Box<Closures> = Box::new((Box::new(reply), Box::new(reject)));\n\n let env = Box::into_raw(boxed_closures);\n\n\n", "file_path": "src/dfn_core.rs", "rank": 5, "score": 54442.48586923211 }, { "content": "/// Calls another canister and returns a future.\n\npub fn call(id: CanisterId, method: &str, data: &[u8]) -> impl Future<Output = FutureResult> {\n\n // the callback from IC dereferences the future from a raw pointer, assigns the\n\n // result and calls the waker\n\n fn callback(future_ptr: u32) {\n\n let waker = {\n\n let ref_counted =\n\n unsafe { RefCounted::from_raw(future_ptr as *const RefCell<CallFuture>) };\n\n let mut future = ref_counted.borrow_mut();\n\n future.result = Some(match reject_code() {\n\n 0 => Ok(arg_data()),\n\n n => Err((n, reject_message())),\n\n });\n\n future.waker.clone()\n\n };\n\n waker.expect(\"there is a waker\").wake();\n\n };\n\n let callee = id.0;\n\n let future_for_closure = RefCounted::new(CallFuture::new());\n\n let future = future_for_closure.clone();\n\n let future_ptr = future_for_closure.into_raw();\n", "file_path": "src/dfn_core.rs", "rank": 6, "score": 53303.998411702974 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrUsedBytes = Box<dyn Fn(CanisterId) -> usize>;\n", "file_path": "src/index.rs", "rank": 7, "score": 50520.909770471524 }, { "content": "/// Returns the rejection code.\n\npub fn reject_code() -> i32 {\n\n unsafe { ic0::msg_reject_code() }\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 8, "score": 50418.38028017041 }, { "content": "/// Returns the rejection message.\n\npub fn reject_message() -> String {\n\n let len: u32 = unsafe { ic0::msg_reject_msg_size() };\n\n let mut bytes = vec![0; len as usize];\n\n unsafe {\n\n ic0::msg_reject_msg_copy(bytes.as_mut_ptr() as u32, 0, len);\n\n }\n\n String::from_utf8_lossy(&bytes).to_string()\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 9, "score": 50418.38028017041 }, { "content": "/// Replies with the given byte array.\n\n/// Note, currently we do not support chunkwise assemlbing of the response.\n\npub fn reply(payload: &[u8]) {\n\n unsafe {\n\n ic0::msg_reply_data_append(payload.as_ptr() as u32, payload.len() as u32);\n\n ic0::msg_reply();\n\n }\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 10, "score": 48172.10060543954 }, { "content": "/// Returns the caller of the current call.\n\npub fn caller() -> Vec<u8> {\n\n let len: u32 = unsafe { ic0::msg_caller_size() };\n\n let mut bytes = vec![0; len as usize];\n\n unsafe {\n\n ic0::msg_caller_copy(bytes.as_mut_ptr() as u32, 0, len);\n\n }\n\n bytes\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 11, "score": 48172.10060543954 }, { "content": "pub trait OverCandid<Arguments, Output> {\n\n fn over_candid(self, _: IDLDeserialize<'_>) -> Vec<u8>;\n\n}\n\n\n\nimpl<C: CandidType, F: FnOnce() -> C> OverCandid<(), C> for F {\n\n fn over_candid(self, _: IDLDeserialize<'_>) -> Vec<u8> {\n\n from_output(self())\n\n }\n\n}\n\n\n\nimpl<A1: DeserializeOwned, C: CandidType, F: FnOnce(A1) -> C> OverCandid<(A1,), C> for F {\n\n fn over_candid(self, d: IDLDeserialize<'_>) -> Vec<u8> {\n\n let ((a1,), _) = extract_arg_1(d);\n\n from_output(self(a1))\n\n }\n\n}\n\n\n\nimpl<A1: DeserializeOwned, A2: DeserializeOwned, C: CandidType, F: FnOnce(A1, A2) -> C>\n\n OverCandid<(A1, A2), C> for F\n\n{\n", "file_path": "src/dfn_candid.rs", "rank": 12, "score": 48172.10060543954 }, { "content": "/// Traps with the given message.\n\npub fn trap_with(message: &str) {\n\n unsafe {\n\n ic0::trap(message.as_ptr() as u32, message.len() as u32);\n\n }\n\n}\n\n\n\n// This module contains all mechanisms required to enable asynchronous\n\n// programming in Rust, based on native async Rust capabilities:\n\n//\n\n// - the future returned by the asynchronous System API call, and\n\n// - the kickstarting/waker implementations to advance top level futures on\n\n// every inter-canister callback call.\n\n\n\nuse std::{\n\n cell::RefMut,\n\n pin::Pin,\n\n rc::Rc,\n\n task::{Context, Poll, Waker},\n\n};\n\n\n", "file_path": "src/dfn_core.rs", "rank": 13, "score": 48172.10060543954 }, { "content": "/// Returns the argument extracted from the message payload.\n\npub fn arg_data() -> Vec<u8> {\n\n let len: u32 = unsafe { ic0::msg_arg_data_size() };\n\n let mut bytes = vec![0; len as usize];\n\n unsafe {\n\n ic0::msg_arg_data_copy(bytes.as_mut_ptr() as u32, 0, len);\n\n }\n\n bytes\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 14, "score": 47107.513946503575 }, { "content": "/// Rejects the current call with the given message.\n\npub fn reject(err_message: &str) {\n\n let err_message = err_message.as_bytes();\n\n unsafe {\n\n ic0::msg_reject(err_message.as_ptr() as u32, err_message.len() as u32);\n\n }\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 15, "score": 47107.513946503575 }, { "content": "pub fn biguint_to_sha256_digest(bigint: &BigUint) -> Sha256Digest {\n\n let mut key = bigint.to_radix_be(256);\n\n key.resize(32, 0);\n\n *Sha256Digest::from_slice(&key)\n\n}\n\n\n\nuse lazy_static::lazy_static;\n\n\n\nlazy_static! {\n\n pub static ref SHA256_DIGEST_MIN: Sha256Digest = biguint_to_sha256_digest(&BigUint::from(0u64));\n\n pub static ref SHA256_DIGEST_MAX: Sha256Digest =\n\n biguint_to_sha256_digest(&BigUint::parse_bytes(b\"f\".repeat(64).as_slice(), 16).unwrap());\n\n}\n\n\n\npub(crate) fn sha256_range_half(\n\n sha256_lower: &Sha256Digest,\n\n sha256_upper: &Sha256Digest,\n\n) -> Sha256Digest {\n\n let i_prev = biguint_from_slice256(sha256_lower.as_slice());\n\n let i = biguint_from_slice256(sha256_upper.as_slice());\n", "file_path": "src/hashring_sha256.rs", "rank": 16, "score": 41709.769927113986 }, { "content": "pub fn sha256_digest_to_biguint(val: Sha256Digest) -> BigUint {\n\n biguint_from_slice256(val.as_slice())\n\n}\n\n\n", "file_path": "src/hashring_sha256.rs", "rank": 17, "score": 41709.769927113986 }, { "content": "fn extract_arg_3<A1: DeserializeOwned, A2: DeserializeOwned, A3: DeserializeOwned>(\n\n de: IDLDeserialize<'_>,\n\n) -> ((A1, A2, A3), IDLDeserialize<'_>) {\n\n let ((a1, a2), mut de) = extract_arg_2(de);\n\n let a_new = de.get_value().unwrap();\n\n ((a1, a2, a_new), de)\n\n}\n\n\n", "file_path": "src/dfn_candid.rs", "rank": 18, "score": 39508.703349400836 }, { "content": "fn extract_arg_1<A1: DeserializeOwned>(mut de: IDLDeserialize<'_>) -> ((A1,), IDLDeserialize<'_>) {\n\n let a1 = de.get_value().unwrap();\n\n ((a1,), de)\n\n}\n\n\n", "file_path": "src/dfn_candid.rs", "rank": 19, "score": 38879.65911159082 }, { "content": "/// Must be called on every top-level future corresponding to a method call of a\n\n/// canister by the IC.\n\n///\n\n/// Saves the pointer to the future on the heap and kickstarts the future by\n\n/// polling it once. During the polling we also need to provide the waker\n\n/// callback which is triggered after the future made progress. The waker would\n\n/// then. The waker would then poll the future one last time to advance it to\n\n/// the final state. For that, we pass the future pointer to the waker, so that\n\n/// it can be restored into a box from a raw pointer and then dropped if not\n\n/// needed anymore.\n\n///\n\n/// Technically, we store 2 pointers on the heap: the pointer to the future\n\n/// itself, and a pointer to that pointer. The reason for this is that the waker\n\n/// API requires us to pass one thin pointer, while a a pointer to a `dyn Trait`\n\n/// can only be fat. So we create one additional thin pointer, pointing to the\n\n/// fat pointer and pass it instead.\n\npub fn kickstart<F: 'static + Future<Output = ()>>(future: F) {\n\n let future_ptr = Box::into_raw(Box::new(future));\n\n let future_ptr_ptr: *mut *mut dyn Future<Output = ()> = Box::into_raw(Box::new(future_ptr));\n\n let mut pinned_future = unsafe { Pin::new_unchecked(&mut *future_ptr) };\n\n if let Poll::Ready(_) = pinned_future\n\n .as_mut()\n\n .poll(&mut Context::from_waker(&waker::waker(\n\n future_ptr_ptr as *const (),\n\n )))\n\n {\n\n unsafe {\n\n let _ = Box::from_raw(future_ptr);\n\n let _ = Box::from_raw(future_ptr_ptr);\n\n }\n\n }\n\n}\n\n\n\n// This module conatins the implementation of a waker we're using for waking\n\n// top-level futures (the ones returned by canister methods). The waker polls\n\n// the future once and re-pins it on the heap, if it's pending. If the future is\n", "file_path": "src/dfn_core.rs", "rank": 20, "score": 38757.2230849286 }, { "content": "/// Must be called on every top-level future corresponding to a method call of a\n\n/// canister by the IC.\n\n///\n\n/// Saves the pointer to the future on the heap and kickstarts the future by\n\n/// polling it once. During the polling we also need to provide the waker\n\n/// callback which is triggered after the future made progress. The waker would\n\n/// then. The waker would then poll the future one last time to advance it to\n\n/// the final state. For that, we pass the future pointer to the waker, so that\n\n/// it can be restored into a box from a raw pointer and then dropped if not\n\n/// needed anymore.\n\n///\n\n/// Technically, we store 2 pointers on the heap: the pointer to the future\n\n/// itself, and a pointer to that pointer. The reason for this is that the waker\n\n/// API requires us to pass one thin pointer, while a a pointer to a `dyn Trait`\n\n/// can only be fat. So we create one additional thin pointer, pointing to the\n\n/// fat pointer and pass it instead.\n\npub fn kickstart<F: 'static + Future<Output = ()>>(future: F) {\n\n let future_ptr = Box::into_raw(Box::new(future));\n\n let future_ptr_ptr: *mut *mut dyn Future<Output = ()> = Box::into_raw(Box::new(future_ptr));\n\n let mut pinned_future = unsafe { Pin::new_unchecked(&mut *future_ptr) };\n\n if let Poll::Ready(_) = pinned_future\n\n .as_mut()\n\n .poll(&mut Context::from_waker(&waker::waker(\n\n future_ptr_ptr as *const (),\n\n )))\n\n {\n\n unsafe {\n\n let _ = Box::from_raw(future_ptr);\n\n let _ = Box::from_raw(future_ptr_ptr);\n\n }\n\n }\n\n}\n\n\n\n// This module conatins the implementation of a waker we're using for waking\n\n// top-level futures (the ones returned by canister methods). The waker polls\n\n// the future once and re-pins it on the heap, if it's pending. If the future is\n", "file_path": "src/dfn_futures.rs", "rank": 21, "score": 38757.2230849286 }, { "content": "pub fn from_output<Output: CandidType>(payload: Output) -> Vec<u8> {\n\n candid::ser::IDLBuilder::new()\n\n .arg(&payload)\n\n .unwrap()\n\n .serialize_to_vec()\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/dfn_candid.rs", "rank": 22, "score": 37943.922055183255 }, { "content": "/// This allows you to wrap regular rust functions and have them take and return\n\n/// candid\n\n/// Currently it will only return one argument\n\npub fn candid<A, O, F: OverCandid<A, O>>(f: F) {\n\n dfn_core::bytes(|bs| {\n\n let de = candid::de::IDLDeserialize::new(&bs).unwrap();\n\n f.over_candid(de)\n\n })\n\n}\n\n\n\n// TODO turn this into a trait with call_json, unless the type complexity is\n\n// more trouble than it's worth\n\npub async fn call_candid<Input: DeserializeOwned, Output: CandidType>(\n\n canister_id: Vec<u8>,\n\n method_name: &str,\n\n payload: Output,\n\n) -> Result<Input, (i32, String)> {\n\n let result = dfn_core::call(\n\n dfn_core::CanisterId(canister_id),\n\n method_name,\n\n &from_output(payload),\n\n )\n\n .await;\n\n result.map(to_input)\n\n}\n", "file_path": "src/dfn_candid.rs", "rank": 23, "score": 37939.07176137015 }, { "content": "/// Prints the given message.\n\npub fn print<S: std::convert::AsRef<str>>(s: S) {\n\n let s = s.as_ref();\n\n unsafe {\n\n ic0::debug_print(s.as_ptr() as u32, s.len() as u32);\n\n }\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 24, "score": 37855.05628571828 }, { "content": "/// Useful for writing endpoints that take a set of bytes and return a set of\n\n/// bytes Check test/reverse_blob_bin.rs for a usage example\n\npub fn bytes<F: FnOnce(Vec<u8>) -> Vec<u8>>(f: F) {\n\n let bs = arg_data();\n\n let res = f(bs);\n\n reply(&res);\n\n}\n", "file_path": "src/dfn_core.rs", "rank": 25, "score": 36264.916127476456 }, { "content": "fn extract_arg_2<A1: DeserializeOwned, A2: DeserializeOwned>(\n\n de: IDLDeserialize<'_>,\n\n) -> ((A1, A2), IDLDeserialize<'_>) {\n\n let ((a1,), mut de) = extract_arg_1(de);\n\n let a_new = de.get_value().unwrap();\n\n ((a1, a_new), de)\n\n}\n\n\n", "file_path": "src/dfn_candid.rs", "rank": 26, "score": 36236.03731328285 }, { "content": "/// The mode with which a canister is installed.\n\n#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq, Hash, CandidType)]\n\npub enum CanisterInstallMode {\n\n /// A fresh install of a new canister.\n\n #[serde(rename = \"install\")]\n\n Install,\n\n /// Reinstalling a canister that was already installed.\n\n #[serde(rename = \"reinstall\")]\n\n Reinstall,\n\n /// Upgrade an existing canister.\n\n #[serde(rename = \"upgrade\")]\n\n Upgrade,\n\n}\n\n\n\n#[derive(CandidType, Deserialize, Debug)]\n\npub struct InstallCodeArgs {\n\n pub mode: CanisterInstallMode,\n\n pub canister_id: candid::Principal,\n\n pub wasm_module: Vec<u8>,\n\n pub arg: Vec<u8>,\n", "file_path": "src/canister_management.rs", "rank": 27, "score": 34941.36363173804 }, { "content": "use crate::CanisterId;\n\nuse candid::CandidType;\n\n\n\n#[derive(CandidType, serde::Deserialize, Debug)]\n", "file_path": "src/canister_management.rs", "rank": 28, "score": 34941.04682362181 }, { "content": " pub compute_allocation: Option<u64>,\n\n pub memory_allocation: Option<u64>,\n\n}\n\nuse std::convert::TryFrom;\n\n\n\npub async fn subnet_install_canister_code(\n\n canister_id: CanisterId,\n\n wasm_module: Vec<u8>,\n\n) -> Result<(), String> {\n\n if wasm_module.is_empty() {\n\n return Err(\"Empty wasm module provided for canister installation\".to_string());\n\n }\n\n\n\n let management_canister = ic_cdk::CanisterId::from(Vec::new());\n\n\n\n let install_code_args = InstallCodeArgs {\n\n mode: CanisterInstallMode::Install,\n\n canister_id: candid::Principal::try_from(canister_id.0)\n\n .expect(\"Failed to make principal from canister_id\"),\n\n wasm_module,\n", "file_path": "src/canister_management.rs", "rank": 29, "score": 34939.71467090449 }, { "content": " arg: Vec::new(),\n\n compute_allocation: None,\n\n memory_allocation: None,\n\n };\n\n\n\n match ic_cdk::call_no_return(management_canister, \"install_code\", Some(install_code_args)).await\n\n {\n\n Ok(res) => res,\n\n Err(err) => {\n\n ic_cdk::println!(\"Error invoking install_code: {:?} {}\", err.0, err.1);\n\n return Err(err.1);\n\n }\n\n };\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn subnet_raw_rand() -> Result<Vec<u8>, String> {\n\n let management_canister = ic_cdk::CanisterId::from(Vec::new());\n\n let rnd_buffer: Vec<u8> = match ic_cdk::call(management_canister, \"raw_rand\", Some(())).await {\n", "file_path": "src/canister_management.rs", "rank": 30, "score": 34935.72693846325 }, { "content": " Ok(result) => result,\n\n Err(err) => {\n\n ic_cdk::println!(\"Error invoking raw_rand: {:?} {}\", err.0, err.1);\n\n return Err(err.1);\n\n }\n\n };\n\n\n\n Ok(rnd_buffer.to_vec())\n\n}\n", "file_path": "src/canister_management.rs", "rank": 31, "score": 34930.36736215424 }, { "content": "#[query]\n\nfn used_bytes() -> u64 {\n\n let search = storage::get::<SearchIndexer>();\n\n\n\n search.used_bytes() as u64\n\n}\n\n\n", "file_path": "src/bigmap_search.rs", "rank": 32, "score": 31252.303046017067 }, { "content": "#[query]\n\nfn used_bytes(_: ()) -> u64 {\n\n let bm_data = storage::get::<DataBucket>();\n\n\n\n bm_data.used_bytes() as u64\n\n}\n\n\n", "file_path": "src/bigmap_data.rs", "rank": 33, "score": 31252.303046017067 }, { "content": "#[update]\n\nfn set_used_bytes_threshold(threshold: u32) {\n\n let bigmap_idx = storage::get_mut::<BigmapIdx>();\n\n\n\n bigmap_idx.set_used_bytes_threshold(threshold);\n\n}\n\n\n\n#[update]\n\nasync fn maintenance() -> String {\n\n let bigmap_idx = storage::get_mut::<BigmapIdx>();\n\n\n\n bigmap_idx.maintenance().await\n\n}\n\n\n\n#[query]\n\nasync fn status() -> String {\n\n let bigmap_idx = storage::get::<BigmapIdx>();\n\n\n\n bigmap_idx.status().await\n\n}\n\n\n", "file_path": "src/bigmap_index.rs", "rank": 34, "score": 28024.35773679214 }, { "content": "const getCanister = (\n\n canisterName,\n\n host = DEFAULT_HOST,\n\n keypair = generateKeyPair()\n\n) => {\n\n const candid = eval(getCandid(canisterName));\n\n const canisterId = getCanisterId(canisterName);\n\n const principal = Principal.selfAuthenticating(keypair.publicKey);\n\n const config = { fetch, host, principal };\n\n if (credentials.name && credentials.password) {\n\n config.credentials = credentials;\n\n }\n\n const agent = new HttpAgent(config);\n\n agent.addTransform(makeNonceTransform());\n\n agent.addTransform(makeExpiryTransform(5 * 60 * 1000));\n\n agent.addTransform(makeAuthTransform(keypair));\n\n\n\n return makeActorFactory(candid)({ canisterId, agent });\n", "file_path": "cli/bigmap_functions.js", "rank": 35, "score": 27714.686949088544 }, { "content": "const DATA_CANISTER_ACTORS = new Map(); // A map of CanisterId => DataCanisterActor\n", "file_path": "cli/bigmap_functions.js", "rank": 36, "score": 26792.259480966884 }, { "content": "const getCanisterPath = (canisterName) => {\n\n return path.join(\n\n outputRoot,\n\n \"canisters\",\n\n canisterName\n\n );\n", "file_path": "cli/bigmap_functions.js", "rank": 37, "score": 26792.259480966884 }, { "content": "const getCanisterId = (canisterName) => {\n\n const canisterIdsPath = networkName === 'local' ? outputRoot : '.';\n\n let manifest = JSON.parse(\n\n fs.readFileSync(path.resolve(canisterIdsPath, 'canister_ids.json'))\n\n );\n\n return manifest[canisterName][networkName];\n", "file_path": "cli/bigmap_functions.js", "rank": 38, "score": 26792.259480966884 }, { "content": "const bigMapDataCanisterIdToActor = async (canisterId) => {\n\n let cacheLookup = DATA_CANISTER_ACTORS.get(canisterId);\n\n if (cacheLookup) {\n\n return cacheLookup;\n\n } else {\n\n let canisterActor = getBigMapDataActor(canisterId);\n\n DATA_CANISTER_ACTORS.set(canisterId, canisterActor);\n\n return canisterActor;\n\n }\n", "file_path": "cli/bigmap_functions.js", "rank": 39, "score": 24364.036174616827 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrHoldsKey = Box<dyn Fn(CanisterId, &Key) -> bool>;\n", "file_path": "src/index.rs", "rank": 40, "score": 24359.944581127264 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrList = Box<dyn Fn(CanisterId, &Key) -> Vec<Key>>;\n", "file_path": "src/index.rs", "rank": 41, "score": 24359.944581127264 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrRemoveFromSearchIndex = Box<dyn Fn(CanisterId, &Key)>;\n", "file_path": "src/index.rs", "rank": 42, "score": 24359.944581127264 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrAddToSearchIndex = Box<dyn Fn(CanisterId, &Key, &String)>;\n", "file_path": "src/index.rs", "rank": 43, "score": 23644.43069524631 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrDeleteEntries = Box<dyn Fn(CanisterId, &Vec<Vec<u8>>)>;\n\n\n\n#[derive(Default)]\n\npub struct BigmapIdx {\n\n idx: Vec<CanisterId>, // indirection for CanisterId, to avoid many copies of CanisterIds\n\n hash_ring: hashring_sha256::HashRing<CanisterPtr>,\n\n now_rebalancing_src_dst: Option<(CanisterPtr, CanisterPtr)>,\n\n is_maintenance_active: bool,\n\n creating_data_canister: bool,\n\n creating_search_canister: bool,\n\n batch_limit_bytes: u64,\n\n canister_available_queue: VecDeque<CanisterId>,\n\n used_bytes_threshold: u32,\n\n used_bytes_total: u64,\n\n search_canisters: Vec<CanisterId>,\n\n data_bucket_canister_wasm_binary: Vec<u8>,\n\n search_canister_wasm_binary: Vec<u8>,\n\n id: CanisterId,\n\n // Testing functions\n\n #[cfg(not(target_arch = \"wasm32\"))]\n", "file_path": "src/index.rs", "rank": 44, "score": 23644.43069524631 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrSearchKeysByQuery = Box<dyn Fn(CanisterId, &String) -> Vec<Key>>;\n", "file_path": "src/index.rs", "rank": 45, "score": 22969.750369522342 }, { "content": "type DataBucketMap = Arc<RwLock<IndexMap<CanisterId, DataBucket>>>;\n\n\n\n#[actix_rt::test]\n\nasync fn bigmap_put_get() {\n\n // Insert key&value pairs and then get the value, and verify the correctness\n\n\n\n let num_data_canisters_initial = 11;\n\n\n\n let (mut bm_idx, db_map) = alloc_bigmap_index_and_data(num_data_canisters_initial).await;\n\n\n\n bm_idx.maintenance().await;\n\n\n\n for i in 0..1001 {\n\n let key = format!(\"key-{}\", i).into_bytes();\n\n let value = vec![(i % 256) as u8; 200_000];\n\n\n\n let can_data_id = bm_idx.lookup_put(&key).unwrap();\n\n assert_ne!(can_data_id, Default::default());\n\n db_map\n\n .write()\n", "file_path": "src/index/tests.rs", "rank": 46, "score": 22969.750369522342 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrSetRange = Box<dyn Fn(CanisterId, Sha256Digest, Sha256Digest)>;\n", "file_path": "src/index.rs", "rank": 47, "score": 22969.750369522342 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrBatchAddToSearchIndex = Box<dyn Fn(CanisterId, &Vec<(Key, String)>) -> u64>;\n", "file_path": "src/index.rs", "rank": 48, "score": 21729.663305807844 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrPutRelocationBatch = Box<dyn Fn(CanisterId, &Vec<(Sha2Vec, Key, Val)>) -> u64>;\n", "file_path": "src/index.rs", "rank": 49, "score": 21158.512186873566 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\ntype FnPtrGetRelocationBatch = Box<dyn Fn(CanisterId, u64) -> Vec<(Sha2Vec, Key, Val)>>;\n", "file_path": "src/index.rs", "rank": 50, "score": 21158.512186873566 }, { "content": "// pub type CanisterId = u64;\n\nuse crate::data::DataBucket;\n\nuse crate::index::BigmapIdx;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::convert::From;\n\n\n\n#[derive(Clone, Default, Hash, Serialize, Deserialize, candid::CandidType, Eq, PartialEq)]\n\n#[repr(transparent)]\n\npub struct CanisterId(pub Vec<u8>);\n\n\n\nimpl From<DataBucket> for CanisterId {\n\n fn from(item: DataBucket) -> Self {\n\n Self {\n\n 0: item.canister_id().0,\n\n }\n\n }\n\n}\n\n\n\nimpl From<u64> for CanisterId {\n\n fn from(item: u64) -> Self {\n", "file_path": "src/lib_native.rs", "rank": 51, "score": 14.097668457289203 }, { "content": "// pub type CanisterId = Vec<u8>;\n\nuse crate::data::DataBucket;\n\nuse crate::index::BigmapIdx;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::convert::From;\n\n\n\n#[derive(Clone, Default, Hash, Serialize, Deserialize, candid::CandidType, Eq, PartialEq)]\n\n#[repr(transparent)]\n\npub struct CanisterId(pub Vec<u8>);\n\n\n\nimpl From<DataBucket> for CanisterId {\n\n fn from(item: DataBucket) -> Self {\n\n Self {\n\n 0: item.canister_id().0,\n\n }\n\n }\n\n}\n\n\n\nimpl From<BigmapIdx> for CanisterId {\n\n fn from(item: BigmapIdx) -> Self {\n", "file_path": "src/lib_wasm32.rs", "rank": 52, "score": 13.91946524104262 }, { "content": "\n\n let hr_idx = self\n\n .hash_ring\n\n .get_idx_key_node_for_node(&ptr_new)\n\n .unwrap()\n\n .0;\n\n self.hash_ring.get_key_range_for_idx(hr_idx)\n\n }\n\n\n\n pub fn set_used_bytes_threshold(&mut self, used_bytes_threshold: u32) {\n\n self.used_bytes_threshold = used_bytes_threshold;\n\n }\n\n\n\n pub fn set_canister_id(&mut self, can_id: CanisterId) {\n\n self.id = can_id\n\n }\n\n\n\n pub fn canister_id(&self) -> CanisterId {\n\n self.id.clone()\n\n }\n", "file_path": "src/index.rs", "rank": 53, "score": 11.041675890628616 }, { "content": "mod canister_management;\n\npub use canister_management::{\n\n subnet_create_new_canister, subnet_install_canister_code, subnet_raw_rand,\n\n};\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nmod lib_native;\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub use lib_native::*;\n\n\n\npub type Key = Vec<u8>;\n\npub type Val = Vec<u8>;\n\npub type Sha2Vec = Vec<u8>;\n\n\n\npub type Sha256Digest = GenericArray<u8, <Sha256 as Digest>::OutputSize>;\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 10.869855589151026 }, { "content": " pub fn used_bytes(&self) -> usize {\n\n // We use DataBucket with BTreeMap, and the size is precalculated\n\n // For DataBucket configuration with HashMap, usage can be calculated as:\n\n // (key_size + value_size + 8) bytes * 1.1\n\n // https://github.com/servo/servo/issues/6908\n\n self.used_bytes\n\n }\n\n\n\n pub fn canister_id(&self) -> CanisterId {\n\n self.id.clone()\n\n }\n\n\n\n pub fn set_canister_id(&mut self, can_id: CanisterId) {\n\n self.id = can_id\n\n }\n\n\n\n pub fn get_key_hash_range(&self) -> Option<(Sha256Digest, Sha256Digest)> {\n\n match (self.entries.keys().min(), self.entries.keys().max()) {\n\n (Some(min), Some(max)) => Some((*min, *max)),\n\n _ => None,\n", "file_path": "src/data.rs", "rank": 55, "score": 10.23702809853589 }, { "content": "\n\n for can_id in self.search_canisters.iter() {\n\n let used_bytes = self.qcall_canister_used_bytes(can_id).await as u32;\n\n self.used_bytes_total += used_bytes as u64;\n\n }\n\n\n\n println!(\"Total capacity used {}\", ByteSize(self.used_bytes_total));\n\n\n\n self.is_maintenance_active = false;\n\n\n\n serde_json_wasm::to_string(&Status {\n\n status: \"Good\",\n\n message: \"Finished maintenance\",\n\n })\n\n .unwrap()\n\n }\n\n\n\n pub async fn status(&self) -> String {\n\n #[derive(serde::Serialize, Default)]\n\n struct DataBucketStatus {\n", "file_path": "src/index.rs", "rank": 56, "score": 9.260298260637123 }, { "content": "use crate::{calc_sha256, sha256_digest_from_vec, CanisterId, Key, Sha256Digest, Sha2Vec, Val};\n\n#[cfg(target_arch = \"wasm32\")]\n\nuse ic_cdk::println;\n\nuse std::collections::BTreeMap;\n\n// use std::hash::{BuildHasherDefault, Hash, Hasher};\n\n// use wyhash::WyHash;\n\n\n\n// pub type DetHashMap<K, V> = HashMap<K, V, BuildHasherDefault<WyHash>>;\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct DataBucket {\n\n pub entries: BTreeMap<Sha256Digest, (Key, Val)>, // Can be DetHashMap\n\n range_start: Sha256Digest, // This DataBucket holds entries\n\n range_end: Sha256Digest, // in [range_start..range_end]\n\n used_bytes: usize,\n\n bytes_to_send: usize,\n\n id: CanisterId,\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "src/data.rs", "rank": 57, "score": 8.81740889303152 }, { "content": " | Canister | +------->---------------+\n\n +-------+-------+ | +--------------+\n\n ^ | | BigMap Data |\n\n | +------->+ Bucket Can. |\n\n | | +--------------+\n\n +-------+-------+ | +--------------+\n\n | BigMap client +---------+--------+ BigMap Data |\n\n | App Canister | | Bucket Can. |\n\n +---------------+ +--------------+\n\n .\n\n .\n\n .\n\n\n\n********************************************************************/\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nmod lib_wasm32;\n\n#[cfg(target_arch = \"wasm32\")]\n\npub use lib_wasm32::*;\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 8.757752914151155 }, { "content": "use crate::dfn_core;\n\nuse candid::de::IDLDeserialize;\n\nuse candid::CandidType;\n\nuse serde::de::DeserializeOwned;\n\n\n", "file_path": "src/dfn_candid.rs", "rank": 59, "score": 8.674267525782042 }, { "content": " println!(\"Error adding canisters: {}\", err);\n\n }\n\n }\n\n\n\n fn print_canister_utilization(&self, can_id: &CanisterId, used_bytes: u64) {\n\n println!(\n\n \"CanisterId {} used {}\",\n\n can_id.clone(),\n\n ByteSize(used_bytes)\n\n );\n\n }\n\n\n\n // Returns a randomly generated and unused key\n\n pub async fn get_random_key(&self) -> String {\n\n let time_bytes = ic_cdk::time().to_be_bytes();\n\n let mut rand_key = calc_sha256(&time_bytes.to_vec());\n\n for i in 0..100u32 {\n\n // Only try this a limited number of times\n\n let rand_key_hash = calc_sha256(&rand_key);\n\n let (_, can_ptr) = match self.hash_ring.get_idx_node_for_key(&rand_key_hash) {\n", "file_path": "src/index.rs", "rank": 60, "score": 8.493807813095419 }, { "content": "// Load the allocator\n\ncfg_if::cfg_if! {\n\n // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global\n\n // allocator.\n\n if #[cfg(feature = \"wee_alloc\")] {\n\n use wee_alloc;\n\n #[global_allocator]\n\n static ALLOC: wee_alloc::WeeAlloc<'_> = wee_alloc::WeeAlloc::INIT;\n\n }\n\n}\n\n\n\n// use futures::{CallFuture, RefCounted};\n\nuse std::{cell::RefCell, future::Future};\n\n\n\n#[derive(PartialEq, Clone, Eq)]\n\n#[repr(transparent)]\n\npub struct CanisterId(pub Vec<u8>);\n\n\n\n#[allow(dead_code)]\n\n#[cfg(target_arch = \"wasm32\")]\n", "file_path": "src/dfn_core.rs", "rank": 61, "score": 8.40824401789871 }, { "content": " let used_bytes = self.qcall_canister_used_bytes(can_id).await as u32;\n\n status.data_buckets.push(DataBucketStatus {\n\n canister_id: can_id.to_string(),\n\n used_bytes,\n\n });\n\n status.used_bytes_total += used_bytes as u64;\n\n }\n\n\n\n for can_id in self.search_canisters.iter() {\n\n let used_bytes = self.qcall_canister_used_bytes(can_id).await as u32;\n\n status.search_canisters.push(SearchCanisterStatus {\n\n canister_id: can_id.to_string(),\n\n used_bytes,\n\n });\n\n status.used_bytes_total += used_bytes as u64;\n\n }\n\n\n\n serde_json_wasm::to_string(&status).unwrap()\n\n }\n\n\n", "file_path": "src/index.rs", "rank": 62, "score": 8.398953566083344 }, { "content": "\n\n self.print_canister_utilization(&can_id, used_bytes);\n\n\n\n if used_bytes as u32 > self.used_bytes_threshold {\n\n println!(\n\n \"BigMap Index: CanisterId {} used bytes {} is over threshold {}\",\n\n can_id, used_bytes, self.used_bytes_threshold\n\n );\n\n\n\n // This canister should be rebalanced. We'll do these steps:\n\n // - Create destination canister, to which half of the data from the source canister will go\n\n // - Move batches of objects from source canister to the destination canister\n\n // We're just starting to rebalance, create the destination canister\n\n let src_canister_ptr = can_ptr.clone();\n\n let src_canister = self.can_ptr_to_canister_id(&src_canister_ptr);\n\n let dst_canister = self\n\n .create_data_bucket_canister()\n\n .await\n\n .expect(\"create_data_bucket_canister failed\");\n\n\n", "file_path": "src/index.rs", "rank": 63, "score": 8.310365502134749 }, { "content": "/// A reference counter wrapper we use with the CallFuture.\n\n/// This is required, because the future we return from the `call` method can\n\n/// either have two owners (the callback closure and the canister runtime) if\n\n/// the underlying system call succeeded, or just one (the canister runtime) it\n\n/// the system call failed.\n\npub struct RefCounted<T>(Rc<RefCell<T>>);\n\n\n\nimpl<T> RefCounted<T> {\n\n pub fn new(val: T) -> Self {\n\n RefCounted(Rc::new(RefCell::new(val)))\n\n }\n\n pub fn into_raw(self) -> *const RefCell<T> {\n\n Rc::into_raw(self.0)\n\n }\n\n pub unsafe fn from_raw(ptr: *const RefCell<T>) -> Self {\n\n Self(Rc::from_raw(ptr))\n\n }\n\n pub fn borrow_mut(&self) -> RefMut<'_, T> {\n\n self.0.borrow_mut()\n\n }\n", "file_path": "src/dfn_core.rs", "rank": 64, "score": 7.947398159275512 }, { "content": " canister_id: String,\n\n used_bytes: u32,\n\n };\n\n\n\n #[derive(serde::Serialize, Default)]\n\n struct SearchCanisterStatus {\n\n canister_id: String,\n\n used_bytes: u32,\n\n };\n\n\n\n #[derive(serde::Serialize, Default)]\n\n struct Status {\n\n data_buckets: Vec<DataBucketStatus>,\n\n search_canisters: Vec<SearchCanisterStatus>,\n\n used_bytes_total: u64,\n\n };\n\n\n\n let mut status = Status::default();\n\n\n\n for can_id in self.idx.iter() {\n", "file_path": "src/index.rs", "rank": 65, "score": 7.6142607316905915 }, { "content": "use digest::generic_array::GenericArray;\n\nuse sha2::{Digest, Sha256};\n\npub mod data;\n\npub(crate) mod hashring;\n\n#[allow(dead_code)]\n\npub(crate) mod hashring_sha256;\n\npub mod index;\n\npub mod search;\n\n\n\n/********************************************************************\n\n ____ _ __ __\n\n + __ )(_) __ _ + \\/ + __ _ _ __\n\n | _ \\+ +/ _` + | +\\/+ |/ _` + '_ \\\n\n | +_) | | (_+ | | | | | (_+ | +_) +\n\n +____/+_+\\__, + +_+ +_+\\__,_| .__/\n\n +___/ +_+\n\n Application architecture\n\n +--------------+\n\n +---------------+ | BigMap Data |\n\n | BigMap Index | | Bucket Can. |\n", "file_path": "src/lib.rs", "rank": 66, "score": 7.593805773222115 }, { "content": " self.is_maintenance_active = true;\n\n\n\n if let Err(_) = self.ensure_at_least_one_data_canister().await {\n\n self.is_maintenance_active = false;\n\n return serde_json_wasm::to_string(&Status {\n\n status: \"Unknown\",\n\n message: \"Error trying to ensure at least one data canister\",\n\n })\n\n .unwrap();\n\n }\n\n\n\n println!(\"BigMap Index: starting maintenance\");\n\n\n\n self.used_bytes_total = 0;\n\n\n\n for i in 0..self.idx.len() {\n\n let can_id = self.idx[i].clone();\n\n let can_ptr = CanisterPtr { 0: i as u32 };\n\n let used_bytes = self.qcall_canister_used_bytes(&can_id).await as u64;\n\n self.used_bytes_total += used_bytes;\n", "file_path": "src/index.rs", "rank": 67, "score": 7.494689608432236 }, { "content": "use crate::{\n\n calc_sha256, hashring_sha256, subnet_create_new_canister, subnet_install_canister_code,\n\n CanisterId, Key, Sha256Digest, Sha2Vec, Val,\n\n};\n\nuse bytesize::ByteSize;\n\n#[cfg(target_arch = \"wasm32\")]\n\nuse ic_cdk::println;\n\nuse std::collections::{BTreeSet, HashMap, HashSet, VecDeque};\n\nuse std::hash::BuildHasherDefault;\n\nuse wyhash::WyHash;\n\n\n\n// CanisterPtr allows us to have u64 instead of a full CanisterId\n\n// in various parts of the BigMap Index\n\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n", "file_path": "src/index.rs", "rank": 68, "score": 7.486899396986052 }, { "content": "///////////////////////////////////////////////////////////////\n\n// Consistent Hash implementation, based on the HashRing crate\n\n// https://github.com/jeromefroe/hashring-rs/\n\n// but using SHA-256 for keys\n\n///////////////////////////////////////////////////////////////\n\nuse crate::Sha256Digest;\n\nuse num_bigint::BigUint;\n\nuse std::cmp::Ordering;\n\n\n\n// Node is an internal struct used to encapsulate the nodes that will be added\n\n// and removed from `HashRing`\n\n#[derive(Debug, Clone)]\n\npub(crate) struct Node<T: Clone> {\n\n pub(crate) key: Sha256Digest,\n\n pub(crate) node: T,\n\n}\n\n\n\nimpl<T: Clone> Node<T> {\n\n fn new(key: Sha256Digest, node: T) -> Node<T> {\n\n Node { key, node }\n", "file_path": "src/hashring_sha256.rs", "rank": 69, "score": 7.144214397634398 }, { "content": " }\n\n\n\n pub fn set_fn_ptr_used_bytes(&mut self, fn_ptr: FnPtrUsedBytes) {\n\n self.fn_ptr_used_bytes = Some(fn_ptr);\n\n }\n\n\n\n pub fn set_fn_ptr_set_range(&mut self, fn_ptr: FnPtrSetRange) {\n\n self.fn_ptr_set_range = Some(fn_ptr);\n\n }\n\n\n\n pub fn set_fn_ptr_list(&mut self, fn_ptr: FnPtrList) {\n\n self.fn_ptr_list = Some(fn_ptr);\n\n }\n\n\n\n pub fn set_fn_ptr_holds_key(&mut self, fn_ptr: FnPtrHoldsKey) {\n\n self.fn_ptr_holds_key = Some(fn_ptr);\n\n }\n\n\n\n pub fn set_fn_ptr_get_relocation_batch(&mut self, fn_ptr: FnPtrGetRelocationBatch) {\n\n self.fn_ptr_get_relocation_batch = Some(fn_ptr);\n", "file_path": "src/index.rs", "rank": 70, "score": 6.935499123472125 }, { "content": "use crate::data::DataBucket;\n\nuse crate::index::BigmapIdx;\n\nuse crate::{CanisterId, Key, Sha256Digest, Sha2Vec, Val};\n\nuse indexmap::IndexMap;\n\nuse std::collections::BTreeSet;\n\nuse std::sync::{Arc, RwLock};\n\n// use std::time::Instant;\n\n\n", "file_path": "src/index/tests.rs", "rank": 71, "score": 6.863659161145174 }, { "content": " fn_ptr(can_id.clone(), key)\n\n }\n\n\n\n async fn qcall_s_can_search_keys_by_query(\n\n &self,\n\n can_id: &CanisterId,\n\n search_query: &String,\n\n ) -> Vec<Key> {\n\n let fn_ptr = self\n\n .fn_ptr_search_keys_by_query\n\n .as_ref()\n\n .expect(\"fn_ptr_search_keys_by_query is not set\");\n\n fn_ptr(can_id.clone(), search_query)\n\n }\n\n\n\n async fn qcall_canister_used_bytes(&self, can_id: &CanisterId) -> usize {\n\n let fn_ptr = self\n\n .fn_ptr_used_bytes\n\n .as_ref()\n\n .expect(\"fn_ptr_used_bytes is not set\");\n", "file_path": "src/index.rs", "rank": 72, "score": 6.8051680143943205 }, { "content": " .await\n\n .expect(\"list call failed\")\n\n }\n\n\n\n async fn qcall_canister_used_bytes(&self, can_id: &CanisterId) -> usize {\n\n ic_cdk::call(can_id.clone().0.into(), \"used_bytes\", Some(()))\n\n .await\n\n .expect(\"used_bytes call failed\")\n\n }\n\n\n\n async fn qcall_dcan_holds_key(&self, can_id: &CanisterId, key: &Key) -> bool {\n\n ic_cdk::call(can_id.clone().0.into(), \"holds_key\", Some(key))\n\n .await\n\n .expect(\"holds_key call failed\")\n\n }\n\n\n\n async fn ucall_dcan_set_range(\n\n &self,\n\n can_id: &CanisterId,\n\n range_start: Sha256Digest,\n", "file_path": "src/index.rs", "rank": 73, "score": 6.79220862863693 }, { "content": " let can_id = CanisterId::from(i);\n\n db_map\n\n .write()\n\n .unwrap()\n\n .insert(can_id.clone(), DataBucket::new(can_id));\n\n }\n\n\n\n let db_map_ref = db_map.clone();\n\n let fn_ptr_used_bytes = move |can_id: CanisterId| {\n\n db_map_ref\n\n .read()\n\n .unwrap()\n\n .get(&can_id)\n\n .unwrap()\n\n .used_bytes()\n\n };\n\n bm_idx.set_fn_ptr_used_bytes(Box::new(fn_ptr_used_bytes));\n\n\n\n let db_map_ref = db_map.clone();\n\n let fn_ptr_holds_key = move |can_id: CanisterId, key: &Key| {\n", "file_path": "src/index/tests.rs", "rank": 74, "score": 6.785556427147117 }, { "content": " \"CanisterId {}: code install failed with error {}\",\n\n new_can_id, err\n\n ),\n\n };\n\n Ok(new_can_id)\n\n }\n\n Err(err) => Err(err),\n\n }\n\n }\n\n\n\n pub async fn set_data_bucket_canister_wasm_binary(&mut self, wasm_binary: Vec<u8>) {\n\n self.data_bucket_canister_wasm_binary = wasm_binary;\n\n if let Err(err) = self.ensure_at_least_one_data_canister().await {\n\n println!(\"Error adding canisters: {}\", err);\n\n }\n\n }\n\n\n\n pub async fn set_search_canister_wasm_binary(&mut self, wasm_binary: Vec<u8>) {\n\n self.search_canister_wasm_binary = wasm_binary;\n\n if let Err(err) = self.ensure_at_least_one_search_canister().await {\n", "file_path": "src/index.rs", "rank": 75, "score": 6.678816079614915 }, { "content": "// Full Text Search\n\n// Input: (doc_id: Vec<u8>, document: String)\n\n//\n\n// Steps:\n\n// - Split string into tokens,\n\n// - Normalize tokens,\n\n// - Get or assign a unique ID for each token,\n\n// -\n\n\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse roaring::RoaringBitmap;\n\nuse rust_stemmers::{Algorithm, Stemmer};\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::hash::BuildHasherDefault;\n\nuse wyhash::WyHash;\n\n\n\npub type DetHashMap<K, V> = HashMap<K, V, BuildHasherDefault<WyHash>>;\n\npub type DetHashSet<K> = HashSet<K, BuildHasherDefault<WyHash>>;\n\n\n\n// #[cfg(target_arch = \"wasm32\")]\n\n// use ic_cdk::println;\n\n\n\nuse crate::Key;\n\n\n\n// Roaring Bitmaps only support 32-bit integers\n", "file_path": "src/search.rs", "rank": 76, "score": 6.591251073216856 }, { "content": "mod ic0 {\n\n #[link(wasm_import_module = \"ic0\")]\n\n extern \"C\" {\n\n pub fn canister_self_copy(dst: u32, offset: u32, size: u32) -> ();\n\n pub fn canister_self_size() -> u32;\n\n pub fn debug_print(offset: u32, size: u32) -> ();\n\n pub fn msg_arg_data_copy(dst: u32, offset: u32, size: u32) -> ();\n\n pub fn msg_arg_data_size() -> u32;\n\n pub fn msg_caller_copy(dst: u32, offset: u32, size: u32) -> ();\n\n pub fn msg_caller_size() -> u32;\n\n pub fn msg_reject(src: u32, size: u32) -> ();\n\n pub fn msg_reject_code() -> i32;\n\n pub fn msg_reject_msg_copy(dst: u32, offset: u32, size: u32) -> ();\n\n pub fn msg_reject_msg_size() -> u32;\n\n pub fn msg_reply() -> ();\n\n pub fn msg_reply_data_append(offset: u32, size: u32) -> ();\n\n pub fn trap(offset: u32, size: u32) -> ();\n\n pub fn call_simple(\n\n callee_src: u32,\n\n callee_size: u32,\n", "file_path": "src/dfn_core.rs", "rank": 77, "score": 6.482820274497623 }, { "content": " .expect(&format!(\n\n \"BigMap index: append call to CanisterId {} failed\",\n\n can_id\n\n ))\n\n }\n\n None => {\n\n println!(\n\n \"BigMap Index: no data canister suitable for key {}\",\n\n String::from_utf8_lossy(&key)\n\n );\n\n 0\n\n }\n\n }\n\n }\n\n\n\n fn can_ptr_to_canister_id(&self, can_ptr: &CanisterPtr) -> CanisterId {\n\n self.idx[can_ptr.0 as usize].clone()\n\n }\n\n\n\n pub async fn add_canisters(&mut self, can_ids: Vec<CanisterId>) {\n", "file_path": "src/index.rs", "rank": 78, "score": 6.390138090635031 }, { "content": " unsafe { Pin::new_unchecked(&mut *self.0.borrow_mut()) }.poll(ctx)\n\n }\n\n}\n\n\n\nimpl<T> Clone for RefCounted<T> {\n\n fn clone(&self) -> Self {\n\n RefCounted(Rc::clone(&self.0))\n\n }\n\n}\n\n\n\n/// The result type of the CallFuture.\n\npub(super) type FutureResult = Result<Vec<u8>, (i32, String)>;\n\n\n\n/// The Future trait implemenation, returned by the asynchronous inter-canister\n\n/// call.\n\n#[derive(Default)]\n\npub(super) struct CallFuture {\n\n /// result of the canister call\n\n pub result: Option<FutureResult>,\n\n /// waker (callback)\n", "file_path": "src/dfn_futures.rs", "rank": 79, "score": 6.333373510240887 }, { "content": " println!(\"BigMap Index: Created Data CanisterId {}\", can_id);\n\n\n\n // Add all canisters to the available queue\n\n self.canister_available_queue.push_back(can_id);\n\n }\n\n\n\n if let Err(err) = self.ensure_at_least_one_data_canister().await {\n\n self.is_maintenance_active = false;\n\n println!(\"Error adding canisters: {}\", err);\n\n }\n\n }\n\n\n\n pub async fn ensure_at_least_one_data_canister(&mut self) -> Result<(), String> {\n\n if self.hash_ring.is_empty() {\n\n if self.creating_data_canister {\n\n return Err(\n\n \"Already creating data canister, concurrent calls are not allowed\".to_string(),\n\n );\n\n }\n\n self.creating_data_canister = true;\n", "file_path": "src/index.rs", "rank": 80, "score": 6.130776891392326 }, { "content": " fn_ptr_delete_entries: Option<FnPtrDeleteEntries>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl BigmapIdx {\n\n pub fn new() -> Self {\n\n let mut result: BigmapIdx = BigmapIdx::default();\n\n result.reset();\n\n result\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n *self = Self {\n\n used_bytes_threshold: 3 * 1024 * 1024 * 1024,\n\n batch_limit_bytes: 1024 * 1024,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub async fn get(&self, key: &Key) -> Option<Val> {\n", "file_path": "src/index.rs", "rank": 81, "score": 6.0156338085687295 }, { "content": " }\n\n\n\n pub fn used_bytes(&self) -> usize {\n\n std::mem::size_of_val(self)\n\n }\n\n\n\n fn normalize_to_string(&self, input: &str) -> String {\n\n String::from(self.stemmer.stem(&input.to_lowercase()))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/search.rs", "rank": 82, "score": 5.97172633989585 }, { "content": "\n\n /// Get the Option<node> at position `idx - 1`.\n\n /// Returns `None` if `idx - 1` is out of bounds\n\n pub fn get_prev_key_node_at_idx(&self, idx: usize) -> Option<(Sha256Digest, &T)> {\n\n if idx < 1 {\n\n return None;\n\n }\n\n\n\n match self.get_key_node_at_idx(idx - 1) {\n\n Some(e) => Some(e),\n\n None => None,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::biguint_to_sha256_digest;\n\n use super::BigUint;\n\n use super::HashRing;\n", "file_path": "src/hashring_sha256.rs", "rank": 83, "score": 5.9496293991490266 }, { "content": " println!(\"BigMap Index: No Data Canisters, creating one!\");\n\n match self.create_data_bucket_canister().await {\n\n Ok(can_id) => {\n\n println!(\"BigMap Index: Activating Data CanisterId {}\", can_id);\n\n\n\n let range = self.hash_ring_add_canister_id(&can_id);\n\n self.ucall_dcan_set_range(&can_id, range.0, range.1).await;\n\n }\n\n Err(err) => {\n\n println!(\"BigMap Index: Error creating a new Data Canister {}\", err);\n\n }\n\n }\n\n self.creating_data_canister = false;\n\n };\n\n Ok(())\n\n }\n\n\n\n pub async fn ensure_at_least_one_search_canister(&mut self) -> Result<(), String> {\n\n if self.search_canisters.is_empty() {\n\n if self.creating_search_canister {\n", "file_path": "src/index.rs", "rank": 84, "score": 5.887993991980675 }, { "content": "///////////////////////////////////////////////////////////////\n\n// Consistent Hash implementation, based on the HashRing crate\n\n// https://github.com/jeromefroe/hashring-rs/\n\n///////////////////////////////////////////////////////////////\n\nuse std::cmp::Ordering;\n\nuse std::hash::{BuildHasher, Hash, Hasher};\n\nuse wyhash::WyHash;\n\n\n\n#[derive(Debug, Default, Clone)]\n\npub struct WyHashBuilder;\n\n\n\nimpl BuildHasher for WyHashBuilder {\n\n type Hasher = WyHash;\n\n\n\n fn build_hasher(&self) -> Self::Hasher {\n\n WyHash::with_seed(3)\n\n }\n\n}\n\n\n\n// Node is an internal struct used to encapsulate the nodes that will be added\n", "file_path": "src/hashring.rs", "rank": 85, "score": 5.879382241114167 }, { "content": "use super::{calc_sha256, CanisterId, DataBucket};\n\nuse crate::hashring_sha256::{SHA256_DIGEST_MAX, SHA256_DIGEST_MIN};\n\n\n\n#[actix_rt::test]\n\nasync fn bm_data_put_get() {\n\n // Insert key&value pairs and then get the value, and verify the correctness\n\n let mut d = DataBucket::new(CanisterId::from(42));\n\n d.set_range(&SHA256_DIGEST_MIN, &SHA256_DIGEST_MAX);\n\n for i in 0..100 as u8 {\n\n let key = format!(\"key-{}\", i).into_bytes();\n\n let value = vec![i; 200_000];\n\n\n\n d.put(&key, &value, false).expect(\"DataBucket put failed\");\n\n assert!(d.used_bytes() >= i as usize * 200_000);\n\n }\n\n\n\n for i in 0..100 as u8 {\n\n let key = format!(\"key-{}\", i).into_bytes();\n\n let value = vec![i; 200_000];\n\n\n", "file_path": "src/data/tests.rs", "rank": 86, "score": 5.814578232076362 }, { "content": " }\n\n\n\n for can_id in self.search_canisters.iter() {\n\n self.ucall_s_can_remove_from_search_index(can_id, key).await\n\n }\n\n }\n\n\n\n pub async fn search(&self, search_query: &String) -> (u64, Vec<(Key, Val)>) {\n\n if self.search_canisters.is_empty() {\n\n return (0, Vec::new());\n\n }\n\n\n\n let mut results = Vec::new();\n\n let mut results_len = 0;\n\n\n\n for can_id in self.search_canisters.iter() {\n\n let results_per_canister = self\n\n .qcall_s_can_search_keys_by_query(can_id, search_query)\n\n .await;\n\n results_len += results_per_canister.len() as u64;\n", "file_path": "src/index.rs", "rank": 87, "score": 5.801019580614045 }, { "content": " // The destination canister doesn't have the key but it's currently rebalancing.\n\n // The key may not have been moved yet from the source canister\n\n\n\n let can_id = self.can_ptr_to_canister_id(&rebalance_dst_ptr);\n\n if self.qcall_dcan_holds_key(&can_id, key).await {\n\n println!(\n\n \"BigMap Index: lookup_get @key {} from a relocation destination {}\",\n\n String::from_utf8_lossy(key),\n\n can_id\n\n );\n\n return Some(can_id);\n\n }\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n\n // Find the data bucket canister into which the object with the provided key should go\n\n pub fn lookup_put(&self, key: &Key) -> Option<CanisterId> {\n", "file_path": "src/index.rs", "rank": 88, "score": 5.772658101326881 }, { "content": "There are two major parts of the BigMap architecture: 1) Index canister, and 2) Data Bucket canisters. In the current implementation there is only one Index canister and there can be a large number of Data Bucket canisters, to which the Index canister has pointers.\n\n\n\nThe *Index* canister is implemented as a [Hash Ring](https://en.wikipedia.org/wiki/Consistent_hashing) for which the code is in [this Rust file](../src/hashring_sha256.rs). The implementation is based using a Rust vector with Sha256 hashing of keys and Canister Ids.\n\n\n\n![](./images/hashring.svg)<br>\n\n*Hash Ring with Sha256 hashing. Sha256 hashes are shortened to 8 characters to simplify the illustration. The full hash (32 bytes) is used in the implementation.*\n\n\n\nIn the illustration, the entry's hash is `b6cdd8c1`, and the Hash Ring does not have that exact value. To determine the Data Bucket CanisterId that handles the particular entry is: the Hash Ring is searched for the *first hash which is larger or equal to the entry's hash*. Since the Hash Ring holds `34f444af` as the first entry, that entry is skipped since it's less than `b6cdd8c1`). After that, the Hash Ring holds `bda57583` -- which is the first hash larger or equal to the entry's hash `b6cdd8c1`, and therefore it's determined that `CanId1` handles the entry. Note that Hash Ring is maintained in a sorted order and due to this it is possible to perform Hash Ring lookups with binary search. Therefore, Hash Ring lookups take approximately `log(N)` time for `N` entries (Data Bucket canisters).\n\n\n\nBeside the Index canister, there is a variable number of Data Bucket canisters. BigMap starts with a single Data Bucket canister and these canisters are added one at a time as the capacity of a Data Bucket canister is exhausted. The ultimate objective of the `User Agent` is to get data from or to store data at a Data Bucket canister. It can achieve this in two ways:\n\n\n\n1. By communicating through the BigMap Index, or\n\n2. By communicating directly with the Data Bucket canisters, and using the BigMap Index only for lookups.\n\n\n\nIn the following sections we describe these two options.\n\n\n", "file_path": "docs/bigmap.md", "rank": 89, "score": 5.768913110225878 }, { "content": "//! This module contains all mechanisms required to enable asynchronous\n\n//! programming in Rust, based on native async Rust capabilities:\n\n//!\n\n//! - the future returned by the asynchronous System API call, and\n\n//! - the kickstarting/waker implementations to advance top level futures on\n\n//! every inter-canister callback call.\n\n\n\nuse std::{\n\n cell::{RefCell, RefMut},\n\n future::Future,\n\n pin::Pin,\n\n rc::Rc,\n\n task::{Context, Poll, Waker},\n\n};\n\n\n\n/// A reference counter wrapper we use with the CallFuture.\n\n/// This is required, because the future we return from the `call` method can\n\n/// either have two owners (the callback closure and the canister runtime) if\n\n/// the underlying system call succeeded, or just one (the canister runtime) it\n\n/// the system call failed.\n", "file_path": "src/dfn_futures.rs", "rank": 90, "score": 5.754992150174095 }, { "content": "#[derive(Default)]\n\npub(super) struct CallFuture {\n\n /// result of the canister call\n\n pub result: Option<FutureResult>,\n\n /// waker (callback)\n\n pub waker: Option<Waker>,\n\n}\n\n\n\nimpl CallFuture {\n\n pub fn new() -> Self {\n\n CallFuture::default()\n\n }\n\n}\n\n\n\nimpl Future for CallFuture {\n\n type Output = FutureResult;\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if let Some(result) = self.result.take() {\n\n return Poll::Ready(result);\n\n }\n\n self.waker = Some(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n}\n\n\n", "file_path": "src/dfn_core.rs", "rank": 91, "score": 5.737797716719763 }, { "content": "# (opt vec { 4; 5; 6; })\n\n```\n\n\n\nIt is also possible to talk directly to the data bucket canisters, but this is likely only useful during development or debugging.\n\nIn this case it's necessary to know the CanisterId, which is printed in the replica debug logs during creation.\n\n\n\nFor example:\n\n```bash\n\ndfx canister call tup4c-ks6aa-aaaaa-aaaaa-aaaaa-aaaaa-aaaaa-q get '(vec { 97; 98; 99; })'\n\n# (null)\n\ndfx canister call tup4c-ks6aa-aaaaa-aaaaa-aaaaa-aaaaa-aaaaa-q put '(vec { 97; 98; 99; }, vec { 100; 101; 102; })'\n\n# ()\n\ndfx canister call tup4c-ks6aa-aaaaa-aaaaa-aaaaa-aaaaa-aaaaa-q get '(vec { 97; 98; 99; })'\n\n# (opt vec { 4; 5; 6; })\n\n```\n", "file_path": "README.md", "rank": 92, "score": 5.707129795481315 }, { "content": " name_src: u32,\n\n name_size: u32,\n\n reply_fun: usize,\n\n reply_env: u32,\n\n reject_fun: usize,\n\n reject_env: u32,\n\n data_src: u32,\n\n data_size: u32,\n\n ) -> i32;\n\n pub fn stable_size() -> u32;\n\n pub fn ic0_stable_grow(additional_pages: u32) -> i32;\n\n pub fn ic0_stable_read(dst: u32, offset: u32, size: u32) -> ();\n\n pub fn ic0_stable_write(offset: u32, src: u32, size: u32) -> ();\n\n }\n\n}\n\n\n\n/*\n\nThese stubs exist for when you're compiling this code not on a canister. If you\n", "file_path": "src/dfn_core.rs", "rank": 93, "score": 5.618737878868154 }, { "content": " self.used_bytes -= 32; // for the Sha256 of the key (=32 bytes)\n\n }\n\n None => {}\n\n }\n\n }\n\n }\n\n\n\n pub fn get(&self, key: Key) -> Result<&Val, String> {\n\n // println!(\n\n // \"BigMap Data: get {}\",\n\n // String::from_utf8_lossy(&key)\n\n // );\n\n let key_sha2 = calc_sha256(&key);\n\n match self.entries.get(&key_sha2) {\n\n Some((_, v)) => Ok(v),\n\n None => Err(\"Entry not found\".to_string()),\n\n }\n\n }\n\n\n\n pub fn list(&self, key_prefix: &Key) -> Vec<Key> {\n", "file_path": "src/data.rs", "rank": 94, "score": 5.58247468700766 }, { "content": " pub unsafe fn debug_print(_offset: u32, _size: u32) {\n\n wrong_arch(\"debug_print\")\n\n }\n\n pub unsafe fn msg_arg_data_copy(_dst: u32, _offset: u32, _size: u32) {\n\n wrong_arch(\"msg_arg_data_copy\")\n\n }\n\n pub unsafe fn msg_arg_data_size() -> u32 {\n\n wrong_arch(\"canister_self_copy\")\n\n }\n\n pub unsafe fn msg_caller_copy(_dst: u32, _offset: u32, _size: u32) {\n\n wrong_arch(\"msg_caller_copy\")\n\n }\n\n pub unsafe fn msg_caller_size() -> u32 {\n\n wrong_arch(\"msg_caller_size\")\n\n }\n\n pub unsafe fn msg_reject(_src: u32, _size: u32) {\n\n wrong_arch(\"msg_reject\")\n\n }\n\n pub unsafe fn msg_reject_code() -> i32 {\n\n wrong_arch(\"msg_reject_code\")\n", "file_path": "src/dfn_core.rs", "rank": 95, "score": 5.507571279895802 }, { "content": "use ::bigmap::{index::BigmapIdx, CanisterId, Key, Val};\n\n#[cfg(target_arch = \"wasm32\")]\n\nuse ic_cdk::println;\n\nuse ic_cdk::storage;\n\nuse ic_cdk_macros::*;\n\n\n\n#[query]\n\nasync fn get(key: Key) -> Option<Val> {\n\n let bigmap_idx = storage::get::<BigmapIdx>();\n\n\n\n bigmap_idx.get(&key).await\n\n}\n\n\n\n#[update]\n\nasync fn put(key: Key, value: Val) -> u64 {\n\n let bigmap_idx = storage::get_mut::<BigmapIdx>();\n\n\n\n println!(\"BigMap Index: put key {}\", String::from_utf8_lossy(&key));\n\n\n\n bigmap_idx.put(&key, &value).await\n", "file_path": "src/bigmap_index.rs", "rank": 96, "score": 5.350625850631953 }, { "content": "impl DataBucket {\n\n pub fn new(id: CanisterId) -> Self {\n\n // println!(\"BigMap Data {}: new\", id);\n\n Self {\n\n id,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub fn set_range(&mut self, range_start: &Sha256Digest, range_end: &Sha256Digest) {\n\n println!(\n\n \"BigMap Data: set_range {} .. {}\",\n\n hex::encode(range_start),\n\n hex::encode(range_end)\n\n );\n\n self.range_start = range_start.clone();\n\n self.range_end = range_end.clone();\n\n }\n\n\n\n pub fn is_in_range(&self, key_sha2: &Sha256Digest) -> bool {\n", "file_path": "src/data.rs", "rank": 97, "score": 5.322656424767349 }, { "content": " Some(v) => v,\n\n None => return hex::encode(rand_key),\n\n };\n\n\n\n let can_id = self.can_ptr_to_canister_id(can_ptr);\n\n\n\n let key_is_used = self\n\n .qcall_dcan_holds_key(&can_id, &Vec::from(rand_key.as_slice()))\n\n .await;\n\n\n\n if !key_is_used {\n\n let result = hex::encode(rand_key);\n\n println!(\n\n \"get_random_key: after {} attempts found {} which maps to {}\",\n\n i, result, can_id\n\n );\n\n return result;\n\n }\n\n\n\n rand_key = rand_key_hash;\n", "file_path": "src/index.rs", "rank": 98, "score": 5.30669293670331 }, { "content": " .collect();\n\n\n\n self.batch_put(&batch_as_bytes).await;\n\n\n\n // FIXME: Ensure the search canister has enough space and allocate a new one if necessary\n\n let search_can_id = &self.search_canisters[0].clone();\n\n self.ucall_s_can_batch_add_to_search_index(&search_can_id, batch)\n\n .await;\n\n\n\n batch.len() as u64\n\n }\n\n\n\n pub async fn remove_from_fts_index(&mut self, key: &Key) {\n\n if let Err(err) = self.ensure_at_least_one_search_canister().await {\n\n println!(\n\n \"Error removing key {} => {}\",\n\n String::from_utf8_lossy(key),\n\n err\n\n );\n\n return;\n", "file_path": "src/index.rs", "rank": 99, "score": 5.2806497120673965 } ]
Rust
src/k8-client/src/client/config_rustls.rs
simlay/k8-api
3a69671c469c46757e489f4ab6919f6601e0e514
use std::io::{Error as IoError, ErrorKind, Result as IoResult}; use std::net::ToSocketAddrs; use std::path::Path; use std::pin::Pin; use std::sync::Arc; use std::task::{Context, Poll}; use futures_util::future::Future; use futures_util::io::{AsyncRead as StdAsyncRead, AsyncWrite as StdAsyncWrite}; use http::Uri; use tracing::debug; use hyper::client::connect::{Connected, Connection}; use hyper::service::Service; use hyper::Body; use hyper::Client; use tokio::io::{AsyncRead, AsyncWrite, ReadBuf}; use fluvio_future::net::TcpStream; use fluvio_future::rust_tls::{ConnectorBuilder, DefaultClientTlsStream, TlsConnector}; use super::executor::FluvioHyperExecutor; use crate::cert::{ClientConfigBuilder, ConfigBuilder}; use crate::ClientError; pub type HyperClient = Client<TlsHyperConnector, Body>; pub type HyperConfigBuilder = ClientConfigBuilder<HyperClientBuilder>; pub struct HyperTlsStream(DefaultClientTlsStream); impl Connection for HyperTlsStream { fn connected(&self) -> Connected { Connected::new() } } impl AsyncRead for HyperTlsStream { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<IoResult<()>> { match Pin::new(&mut self.0).poll_read(cx, buf.initialize_unfilled())? { Poll::Ready(bytes_read) => { buf.advance(bytes_read); Poll::Ready(Ok(())) } Poll::Pending => Poll::Pending, } } } impl AsyncWrite for HyperTlsStream { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<IoResult<usize>> { Pin::new(&mut self.0).poll_write(cx, buf) } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<IoResult<()>> { Pin::new(&mut self.0).poll_flush(cx) } fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<IoResult<()>> { Pin::new(&mut self.0).poll_close(cx) } } #[derive(Clone)] pub struct TlsHyperConnector(Arc<TlsConnector>); impl TlsHyperConnector { fn new(connector: TlsConnector) -> Self { Self(Arc::new(connector)) } } impl Service<Uri> for TlsHyperConnector { type Response = HyperTlsStream; type Error = ClientError; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>; fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } fn call(&mut self, uri: Uri) -> Self::Future { let connector = self.0.clone(); Box::pin(async move { let host = match uri.host() { Some(h) => h, None => return Err(ClientError::Other("no host".to_string())), }; match uri.scheme_str() { Some("http") => Err(ClientError::Other("http not supported".to_string())), Some("https") => { let socket_addr = { let host = host.to_string(); let port = uri.port_u16().unwrap_or(443); match (host.as_str(), port).to_socket_addrs()?.next() { Some(addr) => addr, None => { return Err(ClientError::Other(format!( "host resolution: {} failed", host ))) } } }; debug!("socket address to: {}", socket_addr); let tcp_stream = TcpStream::connect(&socket_addr).await?; let stream = connector.connect(host, tcp_stream).await.map_err(|err| { IoError::new(ErrorKind::Other, format!("tls handshake: {}", err)) })?; Ok(HyperTlsStream(stream)) } scheme => Err(ClientError::Other(format!("{:?}", scheme))), } }) } } pub struct HyperClientBuilder(ConnectorBuilder); impl ConfigBuilder for HyperClientBuilder { type Client = HyperClient; fn new() -> Self { Self(ConnectorBuilder::new()) } fn build(self) -> Result<Self::Client, ClientError> { let connector = self.0.build(); Ok(Client::builder() .executor(FluvioHyperExecutor) .build::<_, Body>(TlsHyperConnector::new(connector))) } fn load_ca_certificate(self, ca_path: impl AsRef<Path>) -> Result<Self, IoError> { Ok(Self(self.0.load_ca_cert(ca_path)?)) } fn load_client_certificate<P: AsRef<Path>>( self, client_crt_path: P, client_key_path: P, ) -> Result<Self, IoError> { Ok(Self( self.0.load_client_certs(client_crt_path, client_key_path)?, )) } }
use std::io::{Error as IoError, ErrorKind, Result as IoResult}; use std::net::ToSocketAddrs; use std::path::Path; use std::pin::Pin; use std::sync::Arc; use std::task::{Context, Poll}; use futures_util::future::Future; use futures_util::io::{AsyncRead as StdAsyncRead, AsyncWrite as StdAsyncWrite}; use http::Uri; use tracing::debug; use hyper::client::connect::{Connected, Connection}; use hyper::service::Service; use hyper::Body; use hyper::Client; use tokio::io::{AsyncRead, AsyncWrite, ReadBuf}; use fluvio_future::net::TcpStream; use fluvio_future::rust_tls::{ConnectorBuilder, DefaultClientTlsStream, TlsConnector}; use super::executor::FluvioHyperExecutor; use crate::cert::{ClientConfigBuilder, ConfigBuilder}; use crate::ClientError; pub type HyperClient = Client<TlsHyperConnector, Body>; pub type HyperConfigBuilder = ClientConfigBuilder<HyperClientBuilder>; pub struct HyperTlsStream(DefaultClientTlsStream); impl Connection for HyperTlsStream { fn connected(&self) -> Connected { Connected::new() } } impl AsyncRead for HyperTlsStream { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<IoResult<()>> { match Pin::new(&mut self.0).poll_read(cx, buf.initialize_unfilled())? {
} impl AsyncWrite for HyperTlsStream { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<IoResult<usize>> { Pin::new(&mut self.0).poll_write(cx, buf) } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<IoResult<()>> { Pin::new(&mut self.0).poll_flush(cx) } fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<IoResult<()>> { Pin::new(&mut self.0).poll_close(cx) } } #[derive(Clone)] pub struct TlsHyperConnector(Arc<TlsConnector>); impl TlsHyperConnector { fn new(connector: TlsConnector) -> Self { Self(Arc::new(connector)) } } impl Service<Uri> for TlsHyperConnector { type Response = HyperTlsStream; type Error = ClientError; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>; fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } fn call(&mut self, uri: Uri) -> Self::Future { let connector = self.0.clone(); Box::pin(async move { let host = match uri.host() { Some(h) => h, None => return Err(ClientError::Other("no host".to_string())), }; match uri.scheme_str() { Some("http") => Err(ClientError::Other("http not supported".to_string())), Some("https") => { let socket_addr = { let host = host.to_string(); let port = uri.port_u16().unwrap_or(443); match (host.as_str(), port).to_socket_addrs()?.next() { Some(addr) => addr, None => { return Err(ClientError::Other(format!( "host resolution: {} failed", host ))) } } }; debug!("socket address to: {}", socket_addr); let tcp_stream = TcpStream::connect(&socket_addr).await?; let stream = connector.connect(host, tcp_stream).await.map_err(|err| { IoError::new(ErrorKind::Other, format!("tls handshake: {}", err)) })?; Ok(HyperTlsStream(stream)) } scheme => Err(ClientError::Other(format!("{:?}", scheme))), } }) } } pub struct HyperClientBuilder(ConnectorBuilder); impl ConfigBuilder for HyperClientBuilder { type Client = HyperClient; fn new() -> Self { Self(ConnectorBuilder::new()) } fn build(self) -> Result<Self::Client, ClientError> { let connector = self.0.build(); Ok(Client::builder() .executor(FluvioHyperExecutor) .build::<_, Body>(TlsHyperConnector::new(connector))) } fn load_ca_certificate(self, ca_path: impl AsRef<Path>) -> Result<Self, IoError> { Ok(Self(self.0.load_ca_cert(ca_path)?)) } fn load_client_certificate<P: AsRef<Path>>( self, client_crt_path: P, client_key_path: P, ) -> Result<Self, IoError> { Ok(Self( self.0.load_client_certs(client_crt_path, client_key_path)?, )) } }
Poll::Ready(bytes_read) => { buf.advance(bytes_read); Poll::Ready(Ok(())) } Poll::Pending => Poll::Pending, } }
function_block-function_prefix_line
[ { "content": "pub fn create_topic_stream_result(\n\n ttw_list: &TestTopicWatchList,\n\n) -> TokenStreamResult<TopicSpec, TopicStatus,ClientError> {\n\n let mut topic_watch_list = vec![];\n\n for ttw in ttw_list {\n\n topic_watch_list.push(Ok(create_topic_watch(&ttw)));\n\n }\n\n Ok(topic_watch_list)\n\n}\n\n\n\n//\n\n// Utility APIs\n\n//\n\n\n", "file_path": "src/k8-client/k8-fixtures/src/test_fixtures.rs", "rank": 0, "score": 111312.28505213931 }, { "content": "fn run() -> Result<(), ConfigError> {\n\n let context = MinikubeContext::try_from_system()?;\n\n context.save()?;\n\n Ok(())\n\n}\n", "file_path": "src/k8-ctx-util/src/main.rs", "rank": 1, "score": 91249.77959039006 }, { "content": "#[allow(clippy::redundant_closure)]\n\npub fn as_token_stream_result<S, E>(events: Vec<K8Watch<S>>) -> TokenStreamResult<S, E>\n\nwhere\n\n S: Spec,\n\n S::Status: Serialize + DeserializeOwned,\n\n S::Header: Serialize + DeserializeOwned,\n\n{\n\n Ok(events.into_iter().map(|event| Ok(event)).collect())\n\n}\n\n\n", "file_path": "src/k8-metadata-client/src/client.rs", "rank": 2, "score": 89832.7100231961 }, { "content": "#[proc_macro_derive(Difference)]\n\npub fn diff(input: TokenStream1) -> TokenStream1 {\n\n\n\n // Parse the string representation\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let expanded = diff::geneate_diff_trait(&ast);\n\n expanded.into()\n\n}\n\n\n\n\n", "file_path": "src/k8-diff/k8-dderive/src/lib.rs", "rank": 3, "score": 77195.16997633707 }, { "content": " /// Kubernetes Spec\n\n pub trait Spec:\n\n Sized + Debug + Clone + Default + Serialize + DeserializeOwned + Send + Sync\n\n {\n\n type Status: Status;\n\n\n\n type Header: Header;\n\n\n\n /// if true, spec is namespaced\n\n const NAME_SPACED: bool = true;\n\n\n\n /// return uri for single instance\n\n fn metadata() -> &'static Crd;\n\n\n\n fn label() -> &'static str {\n\n Self::metadata().names.kind\n\n }\n\n\n\n fn api_version() -> String {\n\n let metadata = Self::metadata();\n\n if metadata.group == \"core\" {\n", "file_path": "src/k8-types/src/lib.rs", "rank": 4, "score": 76313.4596724619 }, { "content": " pub trait Status:\n\n Sized + Debug + Clone + Default + Serialize + DeserializeOwned + Send + Sync\n\n {\n\n }\n\n\n", "file_path": "src/k8-types/src/lib.rs", "rank": 5, "score": 76313.4596724619 }, { "content": " pub trait Header:\n\n Sized + Debug + Clone + Default + Serialize + DeserializeOwned + Send + Sync\n\n {\n\n }\n\n\n", "file_path": "src/k8-types/src/lib.rs", "rank": 6, "score": 76313.4596724619 }, { "content": "pub fn geneate_diff_trait(input: &DeriveInput) -> TokenStream {\n\n let name = &input.ident;\n\n let decoded_field_tokens = decode_fields(&input.data);\n\n\n\n quote! {\n\n\n\n impl <'a>k8_diff::Changes<'a> for #name {\n\n\n\n fn diff(&self, new: &'a Self) -> k8_diff::Diff {\n\n\n\n let mut s_diff = k8_diff::DiffStruct::new();\n\n\n\n #decoded_field_tokens\n\n \n\n if s_diff.no_change() {\n\n return k8_diff::Diff::None\n\n }\n\n \n\n k8_diff::Diff::Change(k8_diff::DiffValue::Struct(s_diff))\n\n }\n\n }\n\n\n\n }\n\n}\n\n\n", "file_path": "src/k8-diff/k8-dderive/src/diff.rs", "rank": 7, "score": 74593.48713464057 }, { "content": "pub trait K8Meta {\n\n /// resource name\n\n fn name(&self) -> &str;\n\n\n\n /// namespace\n\n fn namespace(&self) -> &str;\n\n}\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 8, "score": 74320.8383027833 }, { "content": "pub trait DeserializeWith: Sized {\n\n fn deserialize_with<'de, D>(de: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>;\n\n}\n\n\n\n#[allow(clippy::upper_case_acronyms)]\n\n#[derive(Deserialize, Debug, Clone)]\n\n#[serde(tag = \"type\", content = \"object\")]\n\n#[serde(bound(serialize = \"K8Obj<S>: Serialize\"))]\n\n#[serde(bound(deserialize = \"K8Obj<S>: DeserializeOwned\"))]\n\npub enum K8Watch<S>\n\nwhere\n\n S: Spec,\n\n{\n\n ADDED(K8Obj<S>),\n\n MODIFIED(K8Obj<S>),\n\n DELETED(K8Obj<S>),\n\n}\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 9, "score": 72936.74817911725 }, { "content": "pub trait LabelProvider: Sized {\n\n fn set_label_map(self, labels: HashMap<String, String>) -> Self;\n\n\n\n /// helper for setting list of labels\n\n fn set_labels<T: ToString>(self, labels: Vec<(T, T)>) -> Self {\n\n let mut label_map = HashMap::new();\n\n for (key, value) in labels {\n\n label_map.insert(key.to_string(), value.to_string());\n\n }\n\n self.set_label_map(label_map)\n\n }\n\n}\n\n\n\n/// metadata associated with object when returned\n\n/// here name and namespace must be populated\n\n#[derive(Deserialize, Serialize, PartialEq, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\", default)]\n\npub struct ObjectMeta {\n\n // mandatory fields\n\n pub name: String,\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 10, "score": 71074.35258810218 }, { "content": "type K8ListImpl<'a, S> =\n\n Option<Pin<Box<dyn Future<Output = Result<K8List<S>, ClientError>> + Send + 'a>>>;\n\n\n\npub struct ListStream<'a, S>\n\nwhere\n\n S: Spec,\n\n{\n\n arg: Option<ListArg>,\n\n limit: u32,\n\n done: bool,\n\n namespace: NameSpace,\n\n client: SharedK8Client,\n\n inner: K8ListImpl<'a, S>,\n\n data1: PhantomData<S>,\n\n}\n\n\n\nimpl<S> ListStream<'_, S>\n\nwhere\n\n S: Spec,\n\n{\n", "file_path": "src/k8-client/src/client/list_stream.rs", "rank": 11, "score": 70788.7058916614 }, { "content": "// Get absolute path to the \"target\" directory (\"build\" dir)\n\nfn get_target_dir() -> PathBuf {\n\n let bin = env::current_exe().expect(\"exe path\");\n\n let mut target_dir = PathBuf::from(bin.parent().expect(\"bin parent\"));\n\n while target_dir.file_name() != Some(OsStr::new(\"target\")) {\n\n target_dir.pop();\n\n }\n\n target_dir\n\n}\n\n\n", "file_path": "src/k8-client/k8-fixtures/src/test_fixtures.rs", "rank": 12, "score": 68890.03992445287 }, { "content": "/// Gets the current entry for a given host in `/etc/hosts` if there is one\n\nfn get_host_entry(hostname: &str) -> Result<std::option::Option<IpAddr>, ConfigError> {\n\n // Get all of the host entries\n\n let hosts = hostfile::parse_hostfile()\n\n .map_err(|e| ConfigError::Other(format!(\"failed to get /etc/hosts entries: {}\", e)))?;\n\n // Try to find a host entry with the given hostname\n\n let minikube_entry = hosts\n\n .into_iter()\n\n .find(|entry| entry.names.iter().any(|name| name == hostname));\n\n Ok(minikube_entry.map(|entry| entry.ip))\n\n}\n\n\n\n/// *Deprecated*: use [`MinikubeContext`] instead\n\n///\n\n/// Updates kubectl context settings\n\n///\n\n/// [`MinikubeContext`]: ./struct.MinikubeContext\n\npub mod v1 {\n\n use std::env;\n\n use std::fs::OpenOptions;\n\n use std::io;\n", "file_path": "src/k8-config/src/context.rs", "rank": 13, "score": 68434.2308678135 }, { "content": "pub fn create_topic_watch(ttw: &TestTopicWatch) -> K8Watch<TopicSpec, TopicStatus> {\n\n let target_dir = get_target_dir();\n\n let path = get_top_dir(&target_dir);\n\n let mut contents = String::new();\n\n let (filename, file_has_options) = if ttw.ignore_rack_assignment.is_none() {\n\n (\n\n String::from(\"k8-client/k8-fixtures/data/topic_no_options.tmpl\"),\n\n false,\n\n )\n\n } else {\n\n (\n\n String::from(\"k8-client/k8-fixtures/data/topic_all.tmpl\"),\n\n true,\n\n )\n\n };\n\n let f = File::open(path.join(filename));\n\n f.unwrap().read_to_string(&mut contents).unwrap();\n\n\n\n contents = contents.replace(\"{type}\", &*ttw.operation);\n\n contents = contents.replace(\"{name}\", &*ttw.name);\n", "file_path": "src/k8-client/k8-fixtures/src/test_fixtures.rs", "rank": 14, "score": 64796.4136535382 }, { "content": "// Spec that can store in meta store\n\npub trait StoreSpec: Sized + Default + Debug + Clone {\n\n type K8Spec: Spec;\n\n type Status: Sized + Clone + Default + Debug;\n\n type Key: Ord + Clone + Debug + ToString;\n\n type Owner: StoreSpec;\n\n\n\n const LABEL: &'static str;\n\n\n\n // convert kubernetes objects into KV value\n\n fn convert_from_k8(k8_obj: K8Obj<Self::K8Spec>) -> Result<Option<MetaItem<Self>>, IoError>;\n\n}\n\n\n\n/// Metadata object. Used to be KVObject int sc-core\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct MetaItem<S>\n\nwhere\n\n S: StoreSpec,\n\n{\n\n pub spec: S,\n\n pub status: S::Status,\n", "file_path": "src/k8-types/src/store.rs", "rank": 15, "score": 63070.26091228848 }, { "content": "/// items uri\n\npub fn items_uri<S>(host: &str, namespace: NameSpace, list_options: Option<ListOptions>) -> Uri\n\nwhere\n\n S: Spec,\n\n{\n\n let ns = if S::NAME_SPACED {\n\n namespace\n\n } else {\n\n NameSpace::All\n\n };\n\n let crd = S::metadata();\n\n let uri_value = prefix_uri(crd, host, ns, list_options);\n\n let uri: Uri = uri_value.parse().unwrap();\n\n uri\n\n}\n\n\n\n/// related to query parameters and uri\n\n///\n\n///\n\n///\n\n/// generate prefix for given crd\n\n/// if crd group is core then /api is used otherwise /apis + group\n\n\n", "file_path": "src/k8-client/src/uri.rs", "rank": 16, "score": 61870.900118868216 }, { "content": "/// items uri\n\npub fn item_uri<S>(host: &str, name: &str, namespace: &str, sub_resource: Option<&str>) -> Uri\n\nwhere\n\n S: Spec,\n\n{\n\n let ns = if S::NAME_SPACED {\n\n NameSpace::Named(namespace.to_owned())\n\n } else {\n\n NameSpace::All\n\n };\n\n let crd = S::metadata();\n\n let prefix = prefix_uri(crd, host, ns, None);\n\n let uri_value = format!(\"{}/{}{}\", prefix, name, sub_resource.unwrap_or(\"\"));\n\n let uri: Uri = uri_value.parse().unwrap();\n\n uri\n\n}\n\n\n", "file_path": "src/k8-client/src/uri.rs", "rank": 17, "score": 60093.64851747533 }, { "content": "pub fn prefix_uri<N>(crd: &Crd, host: &str, ns: N, options: Option<ListOptions>) -> String\n\nwhere\n\n N: Into<NameSpace>,\n\n{\n\n let namespace = ns.into();\n\n let version = crd.version;\n\n let plural = crd.names.plural;\n\n let group = crd.group;\n\n let api_prefix = match group {\n\n \"core\" => \"api\".to_owned(),\n\n _ => format!(\"apis/{}\", group),\n\n };\n\n\n\n let query = if let Some(opt) = options {\n\n let mut query = \"?\".to_owned();\n\n let qs = serde_qs::to_string(&opt).unwrap();\n\n query.push_str(&qs);\n\n query\n\n } else {\n\n \"\".to_owned()\n", "file_path": "src/k8-client/src/uri.rs", "rank": 18, "score": 60093.64851747533 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct MinikubeConfig {\n\n #[serde(rename = \"Name\")]\n\n name: String,\n\n #[serde(rename = \"Nodes\")]\n\n nodes: Vec<MinikubeNode>,\n\n}\n\n\n", "file_path": "src/k8-config/src/context.rs", "rank": 19, "score": 52603.177180075654 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct MinikubeNode {\n\n #[serde(rename = \"IP\")]\n\n ip: IpAddr,\n\n #[serde(rename = \"Port\")]\n\n port: u16,\n\n}\n\n\n", "file_path": "src/k8-config/src/context.rs", "rank": 20, "score": 52603.177180075654 }, { "content": "#[derive(Debug)]\n\nstruct MinikubeProfile {\n\n /// The name of the minikube profile, usually \"minikube\"\n\n name: String,\n\n /// The active minikube node, with IP and port\n\n node: MinikubeNode,\n\n}\n\n\n\nimpl MinikubeProfile {\n\n /// Gets minikube's current profile\n\n fn load() -> Result<MinikubeProfile, ConfigError> {\n\n let output = Command::new(\"minikube\")\n\n .args(&[\"profile\", \"list\", \"-o\", \"json\"])\n\n .output()?;\n\n let output_string = String::from_utf8(output.stdout).map_err(|e| {\n\n ConfigError::Other(format!(\n\n \"`minikube profile list -o json` did not give UTF-8: {}\",\n\n e\n\n ))\n\n })?;\n\n let profiles: MinikubeProfileWrapper =\n", "file_path": "src/k8-config/src/context.rs", "rank": 21, "score": 52603.177180075654 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct MinikubeProfileJson {\n\n #[serde(rename = \"Name\")]\n\n name: String,\n\n #[serde(rename = \"Status\")]\n\n status: String,\n\n #[serde(rename = \"Config\")]\n\n config: MinikubeConfig,\n\n}\n\n\n\n/// A description of the active Minikube instance, including IP and port\n", "file_path": "src/k8-config/src/context.rs", "rank": 22, "score": 51568.77419488529 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct MinikubeProfileWrapper {\n\n valid: Vec<MinikubeProfileJson>,\n\n}\n\n\n", "file_path": "src/k8-config/src/context.rs", "rank": 23, "score": 51568.77419488529 }, { "content": "/// Performs following\n\n/// add minikube IP address to /etc/host\n\n/// create new kubectl cluster and context which uses minikube name\n\nfn main() {\n\n if let Err(e) = run() {\n\n println!(\"{}\", e);\n\n }\n\n}\n\n\n", "file_path": "src/k8-ctx-util/src/main.rs", "rank": 24, "score": 50539.781489451634 }, { "content": "struct FluvioHyperExecutor;\n\n\n\nimpl<F: Future + Send + 'static> Executor<F> for FluvioHyperExecutor {\n\n fn execute(&self, fut: F) {\n\n spawn(async { drop(fut.await) });\n\n }\n\n}\n\n\n\n/// hyper connector that uses fluvio TLS\n\n#[derive(Clone)]\n\npub struct TlsHyperConnector(Arc<TlsConnector>);\n\n\n\nimpl TlsHyperConnector {\n\n fn new(connector: TlsConnector) -> Self {\n\n Self(Arc::new(connector))\n\n }\n\n}\n\n\n\n#[allow(clippy::type_complexity)]\n\nimpl Service<Uri> for TlsHyperConnector {\n", "file_path": "src/k8-client/src/client/config_native.rs", "rank": 25, "score": 49696.357919055896 }, { "content": "pub trait Changes {\n\n type Replace;\n\n type Patch;\n\n\n\n fn diff(&self, new: &Self) -> Result<Diff<Self::Replace, Self::Patch>, DiffError>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum DiffError {\n\n DiffValue, // json values are different\n\n}\n\n\n\nimpl std::fmt::Display for DiffError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"JSON value types are different\")\n\n }\n\n}\n\n\n\nimpl std::error::Error for DiffError {}\n\n\n", "file_path": "src/k8-diff/src/lib.rs", "rank": 26, "score": 47737.00958970103 }, { "content": "pub trait ConfigBuilder: Sized {\n\n type Client;\n\n\n\n fn new() -> Self;\n\n\n\n fn build(self) -> Result<Self::Client, ClientError>;\n\n\n\n fn load_ca_certificate(self, ca_path: impl AsRef<Path>) -> Result<Self, IoError>;\n\n\n\n // load from ca data\n\n fn load_ca_cert_with_data(self, data: Vec<u8>) -> Result<Self, IoError>;\n\n\n\n // load client certificate (crt) and private key\n\n fn load_client_certificate<P: AsRef<Path>>(\n\n self,\n\n client_crt_path: P,\n\n client_key_path: P,\n\n ) -> Result<Self, IoError>;\n\n\n\n fn load_client_certificate_with_data(\n", "file_path": "src/k8-client/src/cert.rs", "rank": 27, "score": 44365.48784799347 }, { "content": "type SerdeObj = Map<String, Value>;\n\npub type JsonDiff = Diff<Value, PatchObject>;\n\n\n\n#[derive(Debug)]\n\npub struct PatchObject(HashMap<String, JsonDiff>);\n\n\n\nimpl PatchObject {\n\n // diff { \"a\": 1,\"b\": 2}, { \"a\": 3, \"b\": 2} => { \"a\": 1}\n\n fn diff(old: &SerdeObj, new: &SerdeObj) -> Result<Self, DiffError> {\n\n let mut map: HashMap<String, JsonDiff> = HashMap::new();\n\n\n\n for (key, new_val) in new.iter() {\n\n match old.get(key) {\n\n Some(old_val) => {\n\n if old_val != new_val {\n\n let diff_value = old_val.diff(new_val)?;\n\n map.insert(key.clone(), diff_value);\n\n }\n\n }\n\n _ => {\n", "file_path": "src/k8-diff/src/json/mod.rs", "rank": 28, "score": 42727.992900325204 }, { "content": "#[async_trait]\n\npub trait MetadataClient: Send + Sync {\n\n type MetadataClientError: MetadataClientError\n\n + Send\n\n + Display\n\n + From<IoError>\n\n + From<DiffError>\n\n + From<SerdeJsonError>;\n\n\n\n /// retrieval a single item\n\n async fn retrieve_item<S, M>(\n\n &self,\n\n metadata: &M,\n\n ) -> Result<K8Obj<S>, Self::MetadataClientError>\n\n where\n\n S: Spec,\n\n M: K8Meta + Send + Sync;\n\n\n\n /// retrieve all items a single chunk\n\n /// this may cause client to hang if there are too many items\n\n async fn retrieve_items<S, N>(\n", "file_path": "src/k8-metadata-client/src/client.rs", "rank": 29, "score": 41442.573065625955 }, { "content": "// read file\n\nfn read_file(name: &str) -> Option<String> {\n\n let full_path = format!(\"{}/{}\", BASE_DIR, name);\n\n match read_to_string(&full_path) {\n\n Ok(value) => Some(value),\n\n Err(err) => {\n\n error!(\"no {} founded as pod in {}\", name, full_path);\n\n trace!(\"unable to read pod: {} value: {}\", name, err);\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/k8-config/src/pod.rs", "rank": 30, "score": 41270.48395798693 }, { "content": "fn decode_fields(data: &Data) -> TokenStream {\n\n match *data {\n\n Data::Struct(ref data) => {\n\n match data.fields {\n\n Fields::Named(ref fields) => {\n\n let recurse = fields.named.iter().map(|f| {\n\n let fname = &f.ident;\n\n \n\n quote! {\n\n // s_diff.insert(\"replicas\".to_owned(), self.replicas.diff(&new.replicas));\n\n s_diff.insert(stringify!(#fname).to_owned(), self.#fname.diff(&new.#fname));\n\n \n\n }\n\n \n\n });\n\n\n\n quote! {\n\n #(#recurse)*\n\n }\n\n }\n\n _ => unimplemented!(),\n\n }\n\n }\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\n\n", "file_path": "src/k8-diff/k8-dderive/src/diff.rs", "rank": 31, "score": 40800.21698931014 }, { "content": "/// trait for metadata client\n\npub trait MetadataClientError: Debug + Display {\n\n /// is not founded\n\n fn not_founded(&self) -> bool;\n\n\n\n // create new patch error\n\n fn patch_error() -> Self;\n\n}\n\n\n\n// For error mapping: see: https://doc.rust-lang.org/nightly/core/convert/trait.From.html\n\n\n\npub type TokenStreamResult<S, E> = Result<Vec<Result<K8Watch<S>, E>>, E>;\n\n\n", "file_path": "src/k8-metadata-client/src/client.rs", "rank": 32, "score": 40648.60907259954 }, { "content": "// Get absolute path to the project's top dir, given target dir\n\nfn get_top_dir<'a>(target_dir: &'a Path) -> &'a Path {\n\n target_dir.parent().expect(\"target parent\")\n\n}\n", "file_path": "src/k8-client/k8-fixtures/src/test_fixtures.rs", "rank": 33, "score": 36588.94995819749 }, { "content": " pub name: String,\n\n pub labels: HashMap<String, String>,\n\n pub namespace: String,\n\n pub owner_references: Vec<OwnerReferences>,\n\n pub finalizers: Vec<String>,\n\n pub annotations: HashMap<String, String>,\n\n}\n\n\n\nimpl LabelProvider for InputObjectMeta {\n\n fn set_label_map(mut self, labels: HashMap<String, String>) -> Self {\n\n self.labels = labels;\n\n self\n\n }\n\n}\n\n\n\nimpl fmt::Display for InputObjectMeta {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}:{}\", self.name, self.namespace)\n\n }\n\n}\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 34, "score": 30739.540383184172 }, { "content": "#[derive(Deserialize, Serialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ListMetadata {\n\n pub _continue: Option<String>,\n\n pub resource_version: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Default, Debug, PartialEq, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct LabelSelector {\n\n pub match_labels: HashMap<String, String>,\n\n}\n\n\n\nimpl LabelSelector {\n\n pub fn new_labels<T: Into<String>>(labels: Vec<(T, T)>) -> Self {\n\n let mut match_labels = HashMap::new();\n\n for (key, value) in labels {\n\n match_labels.insert(key.into(), value.into());\n\n }\n\n LabelSelector { match_labels }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 35, "score": 30738.67281548348 }, { "content": "pub struct MetaItemContext {\n\n pub item_ctx: Option<ObjectMeta>,\n\n pub parent_ctx: Option<ObjectMeta>,\n\n}\n\n\n\nimpl MetaItemContext {\n\n pub fn with_ctx(mut self, ctx: ObjectMeta) -> Self {\n\n self.item_ctx = Some(ctx);\n\n self\n\n }\n\n\n\n pub fn with_parent_ctx(mut self, ctx: ObjectMeta) -> Self {\n\n self.parent_ctx = Some(ctx);\n\n self\n\n }\n\n\n\n pub fn make_parent_ctx(&self) -> Self {\n\n if self.item_ctx.is_some() {\n\n Self::default().with_parent_ctx(self.item_ctx.as_ref().unwrap().clone())\n\n } else {\n", "file_path": "src/k8-types/src/store.rs", "rank": 36, "score": 30738.55506451868 }, { "content": "impl From<ObjectMeta> for ItemMeta {\n\n fn from(meta: ObjectMeta) -> Self {\n\n Self {\n\n name: meta.name,\n\n namespace: meta.namespace,\n\n }\n\n }\n\n}\n\n\n\n/// used for updating item\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UpdateItemMeta {\n\n pub name: String,\n\n pub namespace: String,\n\n pub resource_version: String,\n\n}\n\n\n\nimpl From<ObjectMeta> for UpdateItemMeta {\n\n fn from(meta: ObjectMeta) -> Self {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 37, "score": 30737.953773522997 }, { "content": " fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where D: Deserializer<'de>,\n\n {\n\n use serde::de::{ Visitor, MapAccess};\n\n\n\n struct StatusVisitor<S: Spec>(PhantomData<fn() -> S>);\n\n\n\n impl<'de,S> Visitor<'de> for StatusVisitor<S>\n\n where\n\n S: Spec,\n\n DeleteResponse<S>: Deserialize<'de>,\n\n {\n\n type Value = DeleteResponse<S>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string or json\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 38, "score": 30737.73590184691 }, { "content": "}\n\n\n\nimpl From<ObjectMeta> for InputObjectMeta {\n\n fn from(meta: ObjectMeta) -> Self {\n\n Self {\n\n name: meta.name,\n\n namespace: meta.namespace,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n/// used for retrieving,updating and deleting item\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ItemMeta {\n\n pub name: String,\n\n pub namespace: String,\n\n}\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 39, "score": 30737.0165415251 }, { "content": "impl<S> K8Obj<S>\n\nwhere\n\n S: Spec,\n\n{\n\n #[allow(dead_code)]\n\n pub fn new<N>(name: N, spec: S) -> Self\n\n where\n\n N: Into<String>,\n\n {\n\n Self {\n\n api_version: S::api_version(),\n\n kind: S::kind(),\n\n metadata: ObjectMeta::named(name),\n\n spec,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn set_status(mut self, status: S::Status) -> Self {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 40, "score": 30736.701645400542 }, { "content": "}\n\n\n\n/// name is optional for template\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone, PartialEq)]\n\n#[serde(rename_all = \"camelCase\", default)]\n\npub struct TemplateMeta {\n\n pub name: Option<String>,\n\n pub creation_timestamp: Option<String>,\n\n pub labels: HashMap<String, String>,\n\n}\n\n\n\nimpl LabelProvider for TemplateMeta {\n\n fn set_label_map(mut self, labels: HashMap<String, String>) -> Self {\n\n self.labels = labels;\n\n self\n\n }\n\n}\n\n\n\nimpl TemplateMeta {\n\n /// create with name and default namespace\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 41, "score": 30736.242170680674 }, { "content": " return metadata.version.to_owned();\n\n }\n\n format!(\"{}/{}\", metadata.group, metadata.version)\n\n }\n\n\n\n fn kind() -> String {\n\n Self::metadata().names.kind.to_owned()\n\n }\n\n\n\n /// in case of applying, we have some fields that are generated\n\n /// or override. So need to special logic to reset them so we can do proper comparison\n\n fn make_same(&mut self, _other: &Self) {}\n\n }\n\n\n\n #[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n pub struct DefaultHeader {}\n\n\n\n impl Header for DefaultHeader {}\n\n}\n", "file_path": "src/k8-types/src/lib.rs", "rank": 42, "score": 30735.840343571683 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::marker::PhantomData;\n\n\n\nuse serde::de::{DeserializeOwned, Deserializer};\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\n\n\nuse crate::Spec;\n\n\n\npub const DEFAULT_NS: &str = \"default\";\n\npub const TYPE_OPAQUE: &str = \"Opaque\";\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 43, "score": 30735.519955589494 }, { "content": " ..Default::default()\n\n }\n\n }\n\n\n\n /// provide builder pattern setter\n\n pub fn set_labels<T: Into<String>>(mut self, labels: Vec<(T, T)>) -> Self {\n\n let mut label_map = HashMap::new();\n\n for (key, value) in labels {\n\n label_map.insert(key.into(), value.into());\n\n }\n\n self.labels = label_map;\n\n self\n\n }\n\n\n\n /// create with name and default namespace\n\n pub fn named<S>(name: S) -> Self\n\n where\n\n S: Into<String>,\n\n {\n\n Self {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 44, "score": 30735.339093994397 }, { "content": " pub fn named<S>(name: S) -> Self\n\n where\n\n S: Into<String>,\n\n {\n\n Self {\n\n name: Some(name.into()),\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TemplateSpec<S> {\n\n pub metadata: Option<TemplateMeta>,\n\n pub spec: S,\n\n}\n\n\n\nimpl<S> TemplateSpec<S> {\n\n pub fn new(spec: S) -> Self {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 45, "score": 30735.27827500885 }, { "content": " pub namespace: String,\n\n pub uid: String,\n\n pub creation_timestamp: String,\n\n pub generation: Option<i32>,\n\n pub resource_version: String,\n\n // optional\n\n pub cluster_name: Option<String>,\n\n pub deletion_timestamp: Option<String>,\n\n pub deletion_grace_period_seconds: Option<u32>,\n\n pub labels: HashMap<String, String>,\n\n pub owner_references: Vec<OwnerReferences>,\n\n pub annotations: HashMap<String, String>,\n\n pub finalizers: Vec<String>,\n\n}\n\n\n\nimpl LabelProvider for ObjectMeta {\n\n fn set_label_map(mut self, labels: HashMap<String, String>) -> Self {\n\n self.labels = labels;\n\n self\n\n }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 46, "score": 30735.22614960466 }, { "content": " pub fn with_ctx(mut self, ctx: MetaItemContext) -> Self {\n\n self.ctx = ctx;\n\n self\n\n }\n\n\n\n pub fn key(&self) -> &S::Key {\n\n &self.key\n\n }\n\n\n\n pub fn key_owned(&self) -> S::Key {\n\n self.key.clone()\n\n }\n\n\n\n pub fn my_key(self) -> S::Key {\n\n self.key\n\n }\n\n\n\n pub fn spec(&self) -> &S {\n\n &self.spec\n\n }\n", "file_path": "src/k8-types/src/store.rs", "rank": 47, "score": 30734.98902594919 }, { "content": "}\n\n\n\nimpl K8Meta for ObjectMeta {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn namespace(&self) -> &str {\n\n &self.namespace\n\n }\n\n}\n\n\n\nimpl ObjectMeta {\n\n pub fn new<S>(name: S, name_space: S) -> Self\n\n where\n\n S: Into<String>,\n\n {\n\n Self {\n\n name: name.into(),\n\n namespace: name_space.into(),\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 48, "score": 30734.975549138417 }, { "content": " pub kind: String,\n\n pub metadata: M,\n\n pub spec: S,\n\n #[serde(flatten)]\n\n pub header: S::Header,\n\n}\n\n\n\nimpl<S, M> K8SpecObj<S, M>\n\nwhere\n\n S: Spec,\n\n{\n\n pub fn new(spec: S, metadata: M) -> Self\n\n where\n\n M: Default,\n\n {\n\n Self {\n\n api_version: S::api_version(),\n\n kind: S::kind(),\n\n metadata,\n\n spec,\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 49, "score": 30734.89946538372 }, { "content": "impl<S> fmt::Display for MetaItem<S>\n\nwhere\n\n S: StoreSpec,\n\n S::Key: Display,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"MetaItem {} key: {}\", S::LABEL, self.key())\n\n }\n\n}\n\n\n\nimpl<S> From<MetaItem<S>> for (S::Key, S, S::Status)\n\nwhere\n\n S: StoreSpec,\n\n{\n\n fn from(val: MetaItem<S>) -> Self {\n\n (val.key, val.spec, val.status)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n", "file_path": "src/k8-types/src/store.rs", "rank": 50, "score": 30734.837686964176 }, { "content": " pub key: S::Key,\n\n pub ctx: MetaItemContext,\n\n}\n\n\n\nimpl<S> MetaItem<S>\n\nwhere\n\n S: StoreSpec,\n\n{\n\n pub fn new<J>(key: J, spec: S, status: S::Status, ctx: MetaItemContext) -> Self\n\n where\n\n J: Into<S::Key>,\n\n {\n\n Self {\n\n key: key.into(),\n\n spec,\n\n status,\n\n ctx,\n\n }\n\n }\n\n\n", "file_path": "src/k8-types/src/store.rs", "rank": 51, "score": 30734.666219566832 }, { "content": " pub fn is_owned(&self, uid: &str) -> bool {\n\n match &self.ctx.parent_ctx {\n\n Some(parent) => parent.uid == uid,\n\n None => false,\n\n }\n\n }\n\n\n\n pub fn with_spec<J>(key: J, spec: S) -> Self\n\n where\n\n J: Into<S::Key>,\n\n {\n\n Self::new(\n\n key.into(),\n\n spec,\n\n S::Status::default(),\n\n MetaItemContext::default(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/k8-types/src/store.rs", "rank": 52, "score": 30734.5766708061 }, { "content": "\n\nimpl<S> K8List<S>\n\nwhere\n\n S: Spec,\n\n{\n\n #[allow(dead_code)]\n\n pub fn new() -> Self {\n\n K8List {\n\n api_version: S::api_version(),\n\n items: vec![],\n\n kind: S::kind(),\n\n metadata: ListMetadata {\n\n _continue: None,\n\n resource_version: S::api_version(),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl<S> Default for K8List<S>\n\nwhere\n\n S: Spec,\n\n{\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 53, "score": 30734.333537546394 }, { "content": "}\n\n\n\nimpl<S> UpdateK8ObjStatus<S>\n\nwhere\n\n S: Spec,\n\n{\n\n pub fn new(status: S::Status, metadata: UpdateItemMeta) -> Self {\n\n Self {\n\n api_version: S::api_version(),\n\n kind: S::kind(),\n\n metadata,\n\n status,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl<S> From<UpdateK8Obj<S>> for InputK8Obj<S>\n\nwhere\n\n S: Spec,\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 54, "score": 30734.033072491125 }, { "content": "pub struct DeleteOptions {\n\n pub kind: &'static str,\n\n pub api_version: &'static str,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub pretty: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub dry_run: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub grace_period_seconds: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub propagation_policy: Option<PropogationPolicy>,\n\n}\n\n\n\nimpl Default for DeleteOptions {\n\n fn default() -> Self {\n\n Self {\n\n kind: \"DeleteOptions\",\n\n api_version: \"v1\",\n\n pretty: None,\n\n dry_run: None,\n", "file_path": "src/k8-types/src/options.rs", "rank": 55, "score": 30733.928766177363 }, { "content": " }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Default, Debug, Clone, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Env {\n\n pub name: String,\n\n pub value: Option<String>,\n\n pub value_from: Option<EnvVarSource>,\n\n}\n\n\n\nimpl Env {\n\n pub fn key_value<T: Into<String>>(name: T, value: T) -> Self {\n\n Env {\n\n name: name.into(),\n\n value: Some(value.into()),\n\n value_from: None,\n\n }\n\n }\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 56, "score": 30733.913783039086 }, { "content": "mod crd;\n\nmod metadata;\n\npub mod options;\n\npub mod store;\n\n#[cfg(feature = \"core\")]\n\npub mod core;\n\n#[cfg(feature = \"app\")]\n\npub mod app;\n\n#[cfg(feature = \"storage\")]\n\npub mod storage;\n\n#[cfg(feature = \"batch\")]\n\npub mod batch;\n\n\n\npub use self::crd::*;\n\npub use self::metadata::*;\n\npub use self::spec_def::*;\n\n\n\nmod spec_def {\n\n\n\n use std::fmt::Debug;\n\n\n\n use serde::de::DeserializeOwned;\n\n use serde::Deserialize;\n\n use serde::Serialize;\n\n\n\n use super::Crd;\n\n\n", "file_path": "src/k8-types/src/lib.rs", "rank": 57, "score": 30733.78785751604 }, { "content": "\n\nimpl K8Meta for InputObjectMeta {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn namespace(&self) -> &str {\n\n &self.namespace\n\n }\n\n}\n\n\n\nimpl InputObjectMeta {\n\n // shorthand to create just with name and metadata\n\n pub fn named<S: Into<String>>(name: S, namespace: S) -> Self {\n\n InputObjectMeta {\n\n name: name.into(),\n\n namespace: namespace.into(),\n\n ..Default::default()\n\n }\n\n }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 58, "score": 30733.455603332113 }, { "content": "impl Default for OwnerReferences {\n\n fn default() -> Self {\n\n Self {\n\n api_version: \"v1\".to_owned(),\n\n block_owner_deletion: false,\n\n controller: None,\n\n kind: \"\".to_owned(),\n\n uid: \"\".to_owned(),\n\n name: \"\".to_owned(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum DeleteStatus<S>\n\nwhere\n\n S: Spec,\n\n{\n\n Deleted(DeletedStatus),\n\n ForegroundDelete(K8Obj<S>),\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 59, "score": 30733.384961331925 }, { "content": " pub fn status(&self) -> &S::Status {\n\n &self.status\n\n }\n\n\n\n pub fn set_status(&mut self, status: S::Status) {\n\n self.status = status;\n\n }\n\n\n\n pub fn ctx(&self) -> &MetaItemContext {\n\n &self.ctx\n\n }\n\n\n\n pub fn set_ctx(&mut self, ctx: MetaItemContext) {\n\n self.ctx = ctx;\n\n }\n\n\n\n pub fn parts(self) -> (S::Key, S, MetaItemContext) {\n\n (self.key, self.spec, self.ctx)\n\n }\n\n\n", "file_path": "src/k8-types/src/store.rs", "rank": 60, "score": 30732.702459431544 }, { "content": " Self::default()\n\n }\n\n }\n\n}\n\n\n\nimpl ::std::default::Default for MetaItemContext {\n\n fn default() -> Self {\n\n Self {\n\n item_ctx: None,\n\n parent_ctx: None,\n\n }\n\n }\n\n}\n\n\n\n/// define default store spec assuming key is string\n\n#[macro_export]\n\nmacro_rules! default_store_spec {\n\n ($spec:ident,$status:ident,$name:expr) => {\n\n impl crate::store::StoreSpec for $spec {\n\n const LABEL: &'static str = $name;\n", "file_path": "src/k8-types/src/store.rs", "rank": 61, "score": 30731.96679488353 }, { "content": " Self {\n\n name: meta.name,\n\n namespace: meta.namespace,\n\n resource_version: meta.resource_version,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct OwnerReferences {\n\n pub api_version: String,\n\n #[serde(default)]\n\n pub block_owner_deletion: bool,\n\n pub controller: Option<bool>,\n\n pub kind: String,\n\n pub name: String,\n\n pub uid: String,\n\n}\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 62, "score": 30731.614124288968 }, { "content": "{\n\n fn from(update: UpdateK8Obj<S>) -> Self {\n\n Self {\n\n api_version: update.api_version,\n\n kind: update.kind,\n\n metadata: update.metadata.into(),\n\n spec: update.spec,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl From<ItemMeta> for InputObjectMeta {\n\n fn from(update: ItemMeta) -> Self {\n\n Self {\n\n name: update.name,\n\n namespace: update.namespace,\n\n ..Default::default()\n\n }\n\n }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 63, "score": 30731.578737781125 }, { "content": " E: de::Error,\n\n {\n\n match value {\n\n \"Success\" => Ok(DeleteResponse::OkStatus(StatusEnum::SUCCESS)),\n\n \"Failure\" => Ok(DeleteResponse::OkStatus(StatusEnum::FAILURE)),\n\n _ => Err(de::Error::custom(format!(\"unrecognized status: {}\",value)))\n\n }\n\n\n\n\n\n }\n\n\n\n fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error>\n\n where\n\n M: MapAccess<'de>,\n\n {\n\n Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(StatusVisitor(PhantomData))\n\n }\n\n\n\n}\n\n*/\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 64, "score": 30731.39421833043 }, { "content": "use serde::Serialize;\n\n\n\n/// goes as query parameter\n\n#[derive(Serialize, Default, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ListOptions {\n\n pub pretty: Option<bool>,\n\n #[serde(rename = \"continue\")]\n\n pub continu: Option<String>,\n\n pub field_selector: Option<String>,\n\n pub include_uninitialized: Option<bool>,\n\n pub label_selector: Option<String>,\n\n pub limit: Option<u32>,\n\n pub resource_version: Option<String>,\n\n pub timeout_seconds: Option<u32>,\n\n pub watch: Option<bool>,\n\n}\n\n\n\n#[derive(Serialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/k8-types/src/options.rs", "rank": 65, "score": 30731.355977690648 }, { "content": " ..Default::default()\n\n }\n\n }\n\n}\n\n\n\npub type InputK8Obj<S> = K8SpecObj<S, InputObjectMeta>;\n\npub type UpdateK8Obj<S> = K8SpecObj<S, ItemMeta>;\n\n\n\n/// Used for updating k8obj\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UpdateK8ObjStatus<S>\n\nwhere\n\n S: Spec,\n\n{\n\n pub api_version: String,\n\n pub kind: String,\n\n pub metadata: UpdateItemMeta,\n\n pub status: S::Status,\n\n pub data: PhantomData<S>,\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 66, "score": 30731.223825409037 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\npub struct ObjectFieldSelector {\n\n pub field_path: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::Env;\n\n use super::ObjectMeta;\n\n\n\n #[test]\n\n fn test_metadata_label() {\n\n let metadata =\n\n ObjectMeta::default().set_labels(vec![(\"app\".to_owned(), \"test\".to_owned())]);\n\n\n\n let maps = metadata.labels;\n\n assert_eq!(maps.len(), 1);\n\n assert_eq!(maps.get(\"app\").unwrap(), \"test\");\n\n }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 67, "score": 30731.189865587978 }, { "content": "\n\n type K8Spec = Self;\n\n type Status = $status;\n\n type Key = String;\n\n type Owner = Self;\n\n\n\n fn convert_from_k8(\n\n k8_obj: crate::K8Obj<Self::K8Spec>,\n\n ) -> Result<Option<crate::store::MetaItem<Self>>, std::io::Error> {\n\n let ctx =\n\n crate::store::MetaItemContext::default().with_ctx(k8_obj.metadata.clone());\n\n Ok(Some(crate::store::MetaItem::new(\n\n k8_obj.metadata.name,\n\n k8_obj.spec,\n\n k8_obj.status,\n\n ctx,\n\n )))\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/k8-types/src/store.rs", "rank": 68, "score": 30731.01969596139 }, { "content": " self.status = status;\n\n self\n\n }\n\n\n\n pub fn as_status_update(&self, status: S::Status) -> UpdateK8ObjStatus<S> {\n\n UpdateK8ObjStatus {\n\n api_version: S::api_version(),\n\n kind: S::kind(),\n\n metadata: self.metadata.as_update(),\n\n status,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl<S> K8Obj<S>\n\nwhere\n\n S: Spec,\n\n{\n\n pub fn as_input(&self) -> InputK8Obj<S> {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 69, "score": 30730.605684508322 }, { "content": " pub fn key_field_ref<T: Into<String>>(name: T, field_path: T) -> Self {\n\n Env {\n\n name: name.into(),\n\n value: None,\n\n value_from: Some(EnvVarSource {\n\n field_ref: Some(ObjectFieldSelector {\n\n field_path: field_path.into(),\n\n }),\n\n }),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Default, Debug, Clone, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct EnvVarSource {\n\n field_ref: Option<ObjectFieldSelector>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Default, Debug, Clone, PartialEq)]\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 70, "score": 30729.985378842364 }, { "content": "use std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::fmt::Display;\n\nuse std::io::Error as IoError;\n\n\n\nuse crate::K8Obj;\n\nuse crate::ObjectMeta;\n\nuse crate::Spec;\n\n\n\n// Spec that can store in meta store\n", "file_path": "src/k8-types/src/store.rs", "rank": 71, "score": 30729.587633750838 }, { "content": "//!\n\n//! # CRD Definition\n\n//!\n\n//! Interface to the CRD header definition in K8 key value store\n\n//!\n\n#[derive(Debug)]\n\npub struct Crd {\n\n pub group: &'static str,\n\n pub version: &'static str,\n\n pub names: CrdNames,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CrdNames {\n\n pub kind: &'static str,\n\n pub plural: &'static str,\n\n pub singular: &'static str,\n\n}\n\n\n\npub const GROUP: &str = \"fluvio.infinyon.com\";\n\npub const V1: &str = \"v1\";\n", "file_path": "src/k8-types/src/crd.rs", "rank": 72, "score": 30729.467827444078 }, { "content": "}\n\n\n\n/// status for actual deletion\n\n#[derive(Deserialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct DeletedStatus {\n\n pub api_version: String,\n\n pub code: Option<u16>,\n\n pub details: Option<StatusDetails>,\n\n pub kind: String,\n\n pub message: Option<String>,\n\n pub reason: Option<String>,\n\n pub status: StatusEnum,\n\n}\n\n\n\n/// Default status implementation\n\n#[allow(clippy::upper_case_acronyms)]\n\n#[derive(Deserialize, Debug, Eq, PartialEq, Clone)]\n\npub enum StatusEnum {\n\n #[serde(rename = \"Success\")]\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 73, "score": 30729.318546093928 }, { "content": " SUCCESS,\n\n #[serde(rename = \"Failure\")]\n\n FAILURE,\n\n}\n\n\n\n/*\n\n#[serde(deserialize_with = \"StatusEnum::deserialize_with\")]\n\n pub status: StatusEnum,\n\n*/\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\npub struct StatusDetails {\n\n pub name: String,\n\n pub group: Option<String>,\n\n pub kind: String,\n\n pub uid: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 74, "score": 30729.218716421405 }, { "content": "#[serde(bound(serialize = \"S: Serialize\"))]\n\n#[serde(bound(deserialize = \"S: DeserializeOwned\"))]\n\npub struct K8Obj<S>\n\nwhere\n\n S: Spec,\n\n{\n\n #[serde(default = \"S::api_version\")]\n\n pub api_version: String,\n\n #[serde(default = \"S::kind\")]\n\n pub kind: String,\n\n #[serde(default)]\n\n pub metadata: ObjectMeta,\n\n #[serde(default)]\n\n pub spec: S,\n\n #[serde(flatten)]\n\n pub header: S::Header,\n\n #[serde(default)]\n\n pub status: S::Status,\n\n}\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 75, "score": 30729.20764585802 }, { "content": "\n\n #[test]\n\n fn test_env() {\n\n let env = Env::key_value(\"lang\", \"english\");\n\n assert_eq!(env.name, \"lang\");\n\n assert_eq!(env.value, Some(\"english\".to_owned()));\n\n }\n\n}\n\n\n\n/*\n\n#[cfg(test)]\n\nmod test_delete {\n\n\n\n\n\n\n\n use serde_json;\n\n use serde::{ Serialize,Deserialize};\n\n\n\n use crate::{ Spec,Status, DefaultHeader, Crd, CrdNames};\n\n use super::DeleteResponse;\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 76, "score": 30728.986542946157 }, { "content": " TemplateSpec {\n\n metadata: None,\n\n spec,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[serde(bound(serialize = \"K8Obj<S>: Serialize\"))]\n\n#[serde(bound(deserialize = \"K8Obj<S>: DeserializeOwned\"))]\n\npub struct K8List<S>\n\nwhere\n\n S: Spec,\n\n{\n\n pub api_version: String,\n\n pub kind: String,\n\n pub metadata: ListMetadata,\n\n pub items: Vec<K8Obj<S>>,\n\n}\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 77, "score": 30728.98115569321 }, { "content": " grace_period_seconds: None,\n\n propagation_policy: None,\n\n }\n\n }\n\n}\n\n#[derive(Serialize, Debug)]\n\npub enum PropogationPolicy {\n\n Orphan,\n\n Background,\n\n Foreground,\n\n}\n\n\n\n#[derive(Serialize, Default)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Precondition {\n\n pub uid: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/k8-types/src/options.rs", "rank": 78, "score": 30728.83862324118 }, { "content": "\n\n pub fn as_item(&self) -> ItemMeta {\n\n ItemMeta {\n\n name: self.name.clone(),\n\n namespace: self.namespace.clone(),\n\n }\n\n }\n\n\n\n pub fn as_update(&self) -> UpdateItemMeta {\n\n UpdateItemMeta {\n\n name: self.name.clone(),\n\n namespace: self.namespace.clone(),\n\n resource_version: self.resource_version.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct InputObjectMeta {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 79, "score": 30728.587778460464 }, { "content": " name: name.into(),\n\n ..Default::default()\n\n }\n\n }\n\n\n\n /// create owner references point to this metadata\n\n /// if name or uid doesn't exists return none\n\n pub fn make_owner_reference<S: Spec>(&self) -> OwnerReferences {\n\n OwnerReferences {\n\n kind: S::kind(),\n\n name: self.name.clone(),\n\n uid: self.uid.clone(),\n\n // controller: Some(true),\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub fn namespace(&self) -> &str {\n\n &self.namespace\n\n }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 80, "score": 30728.2229799188 }, { "content": "\n\n /// create child references that points to this\n\n pub fn make_child_input_metadata<S: Spec>(&self, childname: String) -> InputObjectMeta {\n\n let owner_references: Vec<OwnerReferences> = vec![self.make_owner_reference::<S>()];\n\n\n\n InputObjectMeta {\n\n name: childname,\n\n namespace: self.namespace().to_owned(),\n\n owner_references,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub fn as_input(&self) -> InputObjectMeta {\n\n InputObjectMeta {\n\n name: self.name.clone(),\n\n namespace: self.namespace.clone(),\n\n ..Default::default()\n\n }\n\n }\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 81, "score": 30727.9772027376 }, { "content": " K8SpecObj {\n\n api_version: self.api_version.clone(),\n\n kind: self.kind.clone(),\n\n metadata: self.metadata.as_input(),\n\n spec: self.spec.clone(),\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n/// For creating, only need spec\n\n#[derive(Deserialize, Serialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[serde(bound(serialize = \"S: Serialize, M: Serialize\"))]\n\n#[serde(bound(deserialize = \"S: DeserializeOwned, M: DeserializeOwned\"))]\n\npub struct K8SpecObj<S, M>\n\nwhere\n\n S: Spec,\n\n{\n\n pub api_version: String,\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 82, "score": 30727.832318882545 }, { "content": " &TEST_API\n\n }\n\n }\n\n\n\n #[derive(Deserialize, Serialize,Debug, Default,Clone)]\n\n struct TestStatus(bool);\n\n\n\n impl Status for TestStatus{}\n\n\n\n #[test]\n\n fn test_deserialize_test_options() {\n\n let data = r#\"\n\n {\n\n \"kind\": \"Status\",\n\n \"apiVersion\": \"v1\",\n\n \"metadata\": {\n\n\n\n },\n\n \"status\": \"Success\",\n\n \"details\": {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 83, "score": 30727.685785831705 }, { "content": "\n\n use super::ListOptions;\n\n\n\n #[test]\n\n fn test_list_query() {\n\n let opt = ListOptions {\n\n pretty: Some(true),\n\n watch: Some(true),\n\n ..Default::default()\n\n };\n\n\n\n let qs = serde_qs::to_string(&opt).unwrap();\n\n assert_eq!(qs, \"pretty=true&watch=true\")\n\n }\n\n}\n", "file_path": "src/k8-types/src/options.rs", "rank": 84, "score": 30727.671774351515 }, { "content": "\n\n const TEST_API: Crd = Crd {\n\n group: \"test\",\n\n version: \"v1\",\n\n names: CrdNames {\n\n kind: \"test\",\n\n plural: \"test\",\n\n singular: \"test\",\n\n },\n\n };\n\n\n\n\n\n #[derive(Deserialize, Serialize, Default, Debug, Clone)]\n\n struct TestSpec {}\n\n\n\n impl Spec for TestSpec {\n\n type Status = TestStatus;\n\n type Header = DefaultHeader;\n\n\n\n fn metadata() -> &'static Crd {\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 85, "score": 30727.462664714938 }, { "content": " \"name\": \"test\",\n\n \"group\": \"test.infinyon.com\",\n\n \"kind\": \"test\",\n\n \"uid\": \"62fc6733-c505-40c1-9dbb-dcd71e93528f\"\n\n }\"#;\n\n\n\n // Parse the string of data into serde_json::Value.\n\n let _status: DeleteResponse<TestSpec> = serde_json::from_str(data).expect(\"response\");\n\n }\n\n}\n\n*/\n\n\n\n/*\n\n\n\n\n\nimpl<'de, S> Deserialize<'de> for DeleteResponse<S>\n\n where\n\n S: Spec\n\n{\n\n\n", "file_path": "src/k8-types/src/metadata.rs", "rank": 86, "score": 30727.38778157146 }, { "content": " debug!(\"using k8 token: {:#?}\", token);\n\n Ok(Self {\n\n client,\n\n host,\n\n token,\n\n })\n\n }\n\n\n\n fn hostname(&self) -> &str {\n\n &self.host\n\n }\n\n\n\n fn finish_request<B>(&self, request: &mut Request<B>) -> Result<(), ClientError>\n\n where\n\n B: Into<Body>,\n\n {\n\n if let Some(ref token) = self.token {\n\n let full_token = format!(\"Bearer {}\", token);\n\n request\n\n .headers_mut()\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 87, "score": 30391.678686210642 }, { "content": "/// K8 Cluster accessible thru API\n\n#[derive(Debug)]\n\npub struct K8Client {\n\n client: HyperClient,\n\n host: String,\n\n token: Option<String>,\n\n}\n\n\n\nimpl K8Client {\n\n // load using default k8 config\n\n pub fn default() -> Result<Self, ClientError> {\n\n let config = K8Config::load()?;\n\n Self::new(config)\n\n }\n\n\n\n pub fn new(config: K8Config) -> Result<Self, ClientError> {\n\n let helper = HyperConfigBuilder::new(config)?;\n\n let host = helper.host();\n\n let token = helper.token();\n\n let client = helper.build()?;\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 88, "score": 30391.402448571487 }, { "content": "use std::fmt::Debug;\n\nuse std::fmt::Display;\n\nuse std::sync::Arc;\n\n\n\nuse async_trait::async_trait;\n\nuse bytes::Buf;\n\nuse futures_util::future::FutureExt;\n\nuse futures_util::stream::empty;\n\nuse futures_util::stream::BoxStream;\n\nuse futures_util::stream::Stream;\n\nuse futures_util::stream::StreamExt;\n\nuse futures_util::stream::TryStreamExt;\n\nuse hyper::body::aggregate;\n\nuse hyper::body::Bytes;\n\nuse hyper::header::HeaderValue;\n\nuse hyper::header::ACCEPT;\n\nuse hyper::header::AUTHORIZATION;\n\nuse hyper::header::CONTENT_TYPE;\n\nuse hyper::Body;\n\nuse hyper::Request;\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 89, "score": 30389.11823791845 }, { "content": " .insert(AUTHORIZATION, HeaderValue::from_str(&full_token)?);\n\n }\n\n Ok(())\n\n }\n\n\n\n /// handle request. this is async function\n\n async fn handle_request<T>(&self, mut request: Request<Body>) -> Result<T, ClientError>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n use std::io::Read;\n\n\n\n self.finish_request(&mut request)?;\n\n\n\n let resp = self.client.request(request).await?;\n\n\n\n let status = resp.status();\n\n debug!(\"response status: {:#?}\", status);\n\n\n\n if status.is_success() {\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 90, "score": 30387.770633088207 }, { "content": "\n\n /// return stream of chunks, chunk is a bytes that are stream thru http channel\n\n #[allow(clippy::useless_conversion)]\n\n fn stream_of_chunks<S>(&self, uri: Uri) -> impl Stream<Item = Bytes> + '_\n\n where\n\n S: Spec,\n\n K8Watch<S>: DeserializeOwned,\n\n {\n\n debug!(\"streaming: {}\", uri);\n\n\n\n let ft = async move {\n\n let mut request = match http::Request::get(uri).body(Body::empty()) {\n\n Ok(req) => req,\n\n Err(err) => {\n\n error!(\"error uri err: {}\", err);\n\n return empty().right_stream();\n\n }\n\n };\n\n\n\n if let Err(err) = self.finish_request(&mut request) {\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 91, "score": 30387.73884881246 }, { "content": " &self,\n\n metadata: &M,\n\n option: Option<DeleteOptions>,\n\n ) -> Result<DeleteStatus<S>, ClientError>\n\n where\n\n S: Spec,\n\n M: K8Meta + Send + Sync,\n\n {\n\n use crate::k8_types::DeletedStatus;\n\n\n\n let uri = item_uri::<S>(self.hostname(), metadata.name(), metadata.namespace(), None);\n\n debug!(\"{}: delete item on url: {}\", S::label(), uri);\n\n\n\n let body = if let Some(option_value) = option {\n\n let bytes = serde_json::to_vec(&option_value)?;\n\n trace!(\"delete raw : {}\", String::from_utf8_lossy(&bytes));\n\n\n\n bytes.into()\n\n } else {\n\n Body::empty()\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 92, "score": 30386.81527696335 }, { "content": "use hyper::Uri;\n\nuse serde::de::DeserializeOwned;\n\nuse serde_json::Value;\n\nuse tracing::debug;\n\nuse tracing::error;\n\nuse tracing::trace;\n\n\n\nuse k8_config::K8Config;\n\nuse crate::meta_client::{ListArg, MetadataClient, NameSpace, PatchMergeType, TokenStreamResult};\n\nuse crate::k8_types::{\n\n InputK8Obj, K8List, K8Meta, K8Obj, DeleteStatus, K8Watch, Spec, UpdateK8ObjStatus,\n\n};\n\nuse crate::k8_types::options::{ListOptions, DeleteOptions};\n\n\n\nuse crate::uri::{item_uri, items_uri};\n\nuse crate::ClientError;\n\n\n\nuse super::wstream::WatchStream;\n\nuse super::{HyperClient, HyperConfigBuilder, ListStream};\n\n\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 93, "score": 30386.664804760545 }, { "content": " );\n\n err\n\n });\n\n Ok(vec![match result {\n\n Ok(obj) => {\n\n trace!(\"de serialized: {:#?}\", obj);\n\n Ok(obj)\n\n }\n\n Err(err) => Err(err.into()),\n\n }])\n\n })\n\n }\n\n\n\n pub async fn retrieve_items_inner<S, N>(\n\n &self,\n\n namespace: N,\n\n options: Option<ListOptions>,\n\n ) -> Result<K8List<S>, ClientError>\n\n where\n\n S: Spec,\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 94, "score": 30385.962187560515 }, { "content": " let bytes = serde_json::to_vec(&value)?;\n\n trace!(\n\n \"update raw: {}\",\n\n String::from_utf8_lossy(&bytes).to_string()\n\n );\n\n\n\n let request = Request::put(uri)\n\n .header(CONTENT_TYPE, \"application/json\")\n\n .body(bytes.into())?;\n\n\n\n self.handle_request(request).await\n\n }\n\n\n\n /// patch existing with spec\n\n async fn patch<S, M>(\n\n &self,\n\n metadata: &M,\n\n patch: &Value,\n\n merge_type: PatchMergeType,\n\n ) -> Result<K8Obj<S>, ClientError>\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 95, "score": 30385.91696484715 }, { "content": " N: Into<NameSpace> + Send + Sync,\n\n {\n\n let uri = items_uri::<S>(self.hostname(), namespace.into(), options);\n\n debug!(\"{}: retrieving items: {}\", S::label(), uri);\n\n let items = self\n\n .handle_request(Request::get(uri).body(Body::empty())?)\n\n .await?;\n\n trace!(\"items retrieved: {:#?}\", items);\n\n Ok(items)\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl MetadataClient for K8Client {\n\n type MetadataClientError = ClientError;\n\n\n\n /// retrieval a single item\n\n async fn retrieve_item<S, M>(&self, metadata: &M) -> Result<K8Obj<S>, ClientError>\n\n where\n\n S: Spec,\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 96, "score": 30384.51603506919 }, { "content": " .header(CONTENT_TYPE, \"application/json\")\n\n .body(bytes.into())?;\n\n\n\n self.handle_request(request).await\n\n }\n\n\n\n /// update status\n\n async fn update_status<S>(&self, value: &UpdateK8ObjStatus<S>) -> Result<K8Obj<S>, ClientError>\n\n where\n\n S: Spec,\n\n {\n\n let uri = item_uri::<S>(\n\n self.hostname(),\n\n &value.metadata.name,\n\n &value.metadata.namespace,\n\n Some(\"/status\"),\n\n );\n\n debug!(\"updating '{}' status - uri: {}\", value.metadata.name, uri);\n\n trace!(\"update status: {:#?}\", &value);\n\n\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 97, "score": 30384.120343108218 }, { "content": " where\n\n S: Spec,\n\n M: K8Meta + Display + Send + Sync,\n\n {\n\n debug!(\"patching item at '{}'\", metadata);\n\n trace!(\"patch json value: {:#?}\", patch);\n\n let uri = item_uri::<S>(self.hostname(), metadata.name(), metadata.namespace(), None);\n\n\n\n let bytes = serde_json::to_vec(&patch)?;\n\n\n\n trace!(\n\n \"patch uri: {}, raw: {}\",\n\n uri,\n\n String::from_utf8_lossy(&bytes).to_string()\n\n );\n\n\n\n let request = Request::patch(uri)\n\n .header(ACCEPT, \"application/json\")\n\n .header(CONTENT_TYPE, merge_type.content_type())\n\n .body(bytes.into())?;\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 98, "score": 30383.21367020921 }, { "content": " /// return get stream of uri\n\n fn stream<S>(&self, uri: Uri) -> impl Stream<Item = TokenStreamResult<S, ClientError>> + '_\n\n where\n\n K8Watch<S>: DeserializeOwned,\n\n S: Spec + 'static,\n\n S::Status: 'static,\n\n S::Header: 'static,\n\n {\n\n self.stream_of_chunks(uri).map(move |chunk| {\n\n trace!(\n\n \"decoding raw stream : {}\",\n\n String::from_utf8_lossy(&chunk).to_string()\n\n );\n\n\n\n let result: Result<K8Watch<S>, serde_json::Error> = serde_json::from_slice(&chunk)\n\n .map_err(|err| {\n\n error!(\"parsing error, chunk_len: {}, error: {}\", chunk.len(), err);\n\n error!(\n\n \"error raw stream {}\",\n\n String::from_utf8_lossy(&chunk).to_string()\n", "file_path": "src/k8-client/src/client/client_impl.rs", "rank": 99, "score": 30382.400673701522 } ]
Rust
samples/command_executer/src/lib.rs
costisin/aws-nitro-enclaves-cli
de77067677ef2287661063b4529e937b52115a60
pub mod command_parser; pub mod protocol_helpers; pub mod utils; use command_parser::{CommandOutput, FileArgs, ListenArgs, RunArgs}; use protocol_helpers::{recv_loop, recv_u64, send_loop, send_u64}; use nix::sys::socket::listen as listen_vsock; use nix::sys::socket::{accept, bind, connect, shutdown, socket}; use nix::sys::socket::{AddressFamily, Shutdown, SockAddr, SockFlag, SockType}; use nix::unistd::close; use num_derive::FromPrimitive; use num_traits::FromPrimitive; use std::cmp::min; use std::convert::TryInto; use std::fs::File; use std::io::{Read, Write}; use std::os::unix::io::{AsRawFd, RawFd}; use std::process::Command; pub const VMADDR_CID_ANY: u32 = 0xFFFFFFFF; pub const BUF_MAX_LEN: usize = 8192; pub const BACKLOG: usize = 128; const MAX_CONNECTION_ATTEMPTS: usize = 5; #[derive(Debug, Clone, FromPrimitive)] enum CmdId { RunCmd = 0, RecvFile, SendFile, RunCmdNoWait, } struct VsockSocket { socket_fd: RawFd, } impl VsockSocket { fn new(socket_fd: RawFd) -> Self { VsockSocket { socket_fd } } } impl Drop for VsockSocket { fn drop(&mut self) { shutdown(self.socket_fd, Shutdown::Both) .unwrap_or_else(|e| eprintln!("Failed to shut socket down: {:?}", e)); close(self.socket_fd).unwrap_or_else(|e| eprintln!("Failed to close socket: {:?}", e)); } } impl AsRawFd for VsockSocket { fn as_raw_fd(&self) -> RawFd { self.socket_fd } } fn vsock_connect(cid: u32, port: u32) -> Result<VsockSocket, String> { let sockaddr = SockAddr::new_vsock(cid, port); let mut err_msg = String::new(); for i in 0..MAX_CONNECTION_ATTEMPTS { let vsocket = VsockSocket::new( socket( AddressFamily::Vsock, SockType::Stream, SockFlag::empty(), None, ) .map_err(|err| format!("Failed to create the socket: {:?}", err))?, ); match connect(vsocket.as_raw_fd(), &sockaddr) { Ok(_) => return Ok(vsocket), Err(e) => err_msg = format!("Failed to connect: {}", e), } std::thread::sleep(std::time::Duration::from_secs(1 << i)); } Err(err_msg) } fn run_server(fd: RawFd, no_wait: bool) -> Result<(), String> { let len = recv_u64(fd)?; let mut buf = [0u8; BUF_MAX_LEN]; recv_loop(fd, &mut buf, len)?; let len_usize = len.try_into().map_err(|err| format!("{:?}", err))?; let command = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!("{:?}", err))?; let command_output = if no_wait { #[rustfmt::skip] let output = Command::new("sh") .arg("-c") .arg(command) .spawn(); if output.is_err() { CommandOutput::new( String::new(), format!("Could not execute the command {}", command), 1, ) } else { CommandOutput::new(String::new(), String::new(), 0) } } else { let output = Command::new("sh") .arg("-c") .arg(command) .output() .map_err(|err| format!("Could not execute the command {}: {:?}", command, err))?; CommandOutput::new_from(output)? }; let json_output = serde_json::to_string(&command_output) .map_err(|err| format!("Could not serialize the output: {:?}", err))?; let buf = json_output.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(fd, len)?; send_loop(fd, &buf, len)?; Ok(()) } fn recv_file_server(fd: RawFd) -> Result<(), String> { let len = recv_u64(fd)?; let mut buf = [0u8; BUF_MAX_LEN]; recv_loop(fd, &mut buf, len)?; let len_usize = len.try_into().map_err(|err| format!("{:?}", err))?; let path = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!("{:?}", err))?; let mut file = File::open(path).map_err(|err| format!("Could not open file {:?}", err))?; let filesize = file .metadata() .map_err(|err| format!("Could not get file metadata {:?}", err))? .len(); send_u64(fd, filesize)?; println!("Sending file {} - size {}", path, filesize); let mut progress: u64 = 0; let mut tmpsize: u64; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); file.read_exact(&mut buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not read {:?}", err))?; send_loop(fd, &buf, tmpsize)?; progress += tmpsize } Ok(()) } fn send_file_server(fd: RawFd) -> Result<(), String> { let len = recv_u64(fd)?; let mut buf = [0u8; BUF_MAX_LEN]; recv_loop(fd, &mut buf, len)?; let len_usize = len.try_into().map_err(|err| format!("{:?}", err))?; let path = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!("{:?}", err))?; let mut file = File::create(path).map_err(|err| format!("Could not open file {:?}", err))?; let filesize = recv_u64(fd)?; println!("Receiving file {} - size {}", path, filesize); let mut progress: u64 = 0; let mut tmpsize: u64; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); recv_loop(fd, &mut buf, tmpsize)?; file.write_all(&buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not write {:?}", err))?; progress += tmpsize } Ok(()) } pub fn listen(args: ListenArgs) -> Result<(), String> { let socket_fd = socket( AddressFamily::Vsock, SockType::Stream, SockFlag::empty(), None, ) .map_err(|err| format!("Create socket failed: {:?}", err))?; let sockaddr = SockAddr::new_vsock(VMADDR_CID_ANY, args.port); bind(socket_fd, &sockaddr).map_err(|err| format!("Bind failed: {:?}", err))?; listen_vsock(socket_fd, BACKLOG).map_err(|err| format!("Listen failed: {:?}", err))?; loop { let fd = accept(socket_fd).map_err(|err| format!("Accept failed: {:?}", err))?; let cmdid = match recv_u64(fd) { Ok(id_u64) => match CmdId::from_u64(id_u64) { Some(c) => c, _ => { eprintln!("Error no such command"); continue; } }, Err(e) => { eprintln!("Error {}", e); continue; } }; match cmdid { CmdId::RunCmd => { if let Err(e) = run_server(fd, false) { eprintln!("Error {}", e); } } CmdId::RecvFile => { if let Err(e) = recv_file_server(fd) { eprintln!("Error {}", e); } } CmdId::SendFile => { if let Err(e) = send_file_server(fd) { eprintln!("Error {}", e); } } CmdId::RunCmdNoWait => { if let Err(e) = run_server(fd, true) { eprintln!("Error {}", e); } } } } } pub fn run(args: RunArgs) -> Result<i32, String> { let vsocket = vsock_connect(args.cid, args.port)?; let socket_fd = vsocket.as_raw_fd(); if args.no_wait { send_u64(socket_fd, CmdId::RunCmdNoWait as u64)?; } else { send_u64(socket_fd, CmdId::RunCmd as u64)?; } let buf = args.command.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(socket_fd, len)?; send_loop(socket_fd, &buf, len)?; let mut buf = [0u8; BUF_MAX_LEN]; let len = recv_u64(socket_fd)?; let mut json_output = String::new(); let mut to_recv = len; while to_recv > 0 { let recv_len = min(BUF_MAX_LEN as u64, to_recv); recv_loop(socket_fd, &mut buf, recv_len)?; to_recv -= recv_len; let to_recv_usize: usize = recv_len.try_into().map_err(|err| format!("{:?}", err))?; json_output.push_str( std::str::from_utf8(&buf[0..to_recv_usize]).map_err(|err| format!("{:?}", err))?, ); } let output: CommandOutput = serde_json::from_str(json_output.as_str()) .map_err(|err| format!("Could not deserialize the output: {:?}", err))?; print!("{}", output.stdout); eprint!("{}", output.stderr); let rc = match output.rc { Some(code) => code, _ => 0, }; Ok(rc) } pub fn recv_file(args: FileArgs) -> Result<(), String> { let mut file = File::create(&args.localfile) .map_err(|err| format!("Could not open localfile {:?}", err))?; let vsocket = vsock_connect(args.cid, args.port)?; let socket_fd = vsocket.as_raw_fd(); send_u64(socket_fd, CmdId::RecvFile as u64)?; let buf = args.remotefile.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(socket_fd, len)?; send_loop(socket_fd, &buf, len)?; let filesize = recv_u64(socket_fd)?; println!( "Receiving file {}(saving to {}) - size {}", &args.remotefile, &args.localfile[..], filesize ); let mut progress: u64 = 0; let mut tmpsize: u64; let mut buf = [0u8; BUF_MAX_LEN]; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); recv_loop(socket_fd, &mut buf, tmpsize)?; file.write_all(&buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not write {:?}", err))?; progress += tmpsize } Ok(()) } pub fn send_file(args: FileArgs) -> Result<(), String> { let mut file = File::open(&args.localfile).map_err(|err| format!("Could not open localfile {:?}", err))?; let vsocket = vsock_connect(args.cid, args.port)?; let socket_fd = vsocket.as_raw_fd(); send_u64(socket_fd, CmdId::SendFile as u64)?; let buf = args.remotefile.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(socket_fd, len)?; send_loop(socket_fd, &buf, len)?; let filesize = file .metadata() .map_err(|err| format!("Could not get file metadate {:?}", err))? .len(); send_u64(socket_fd, filesize)?; println!( "Sending file {}(sending to {}) - size {}", &args.localfile, &args.remotefile[..], filesize ); let mut buf = [0u8; BUF_MAX_LEN]; let mut progress: u64 = 0; let mut tmpsize: u64; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); file.read_exact(&mut buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not read {:?}", err))?; send_loop(socket_fd, &buf, tmpsize)?; progress += tmpsize } Ok(()) }
pub mod command_parser; pub mod protocol_helpers; pub mod utils; use command_parser::{CommandOutput, FileArgs, ListenArgs, RunArgs}; use protocol_helpers::{recv_loop, recv_u64, send_loop, send_u64}; use nix::sys::socket::listen as listen_vsock; use nix::sys::socket::{accept, bind, connect, shutdown, socket}; use nix::sys::socket::{AddressFamily, Shutdown, SockAddr, SockFlag, SockType}; use nix::unistd::close; use num_derive::FromPrimitive; use num_traits::FromPrimitive; use std::cmp::min; use std::convert::TryInto; use std::fs::File; use std::io::{Read, Write}; use std::os::unix::io::{AsRawFd, RawFd}; use std::process::Command; pub const VMADDR_CID_ANY: u32 = 0xFFFFFFFF; pub const BUF_MAX_LEN: usize = 8192; pub const BACKLOG: usize = 128; const MAX_CONNECTION_ATTEMPTS: usize = 5; #[derive(Debug, Clone, FromPrimitive)] enum CmdId { RunCmd = 0, RecvFile, SendFile, RunCmdNoWait, } struct VsockSocket { socket_fd: RawFd, } impl VsockSocket { fn new(socket_fd: RawFd) -> Self { VsockSocket { socket_fd } } } impl Drop for VsockSocket { fn drop(&mut self) { shutdown(self.socket_fd, Shutdown::Both) .unwrap_or_else(|e| eprintln!("Failed to shut socket down: {:?}", e)); close(self.socket_fd).unwrap_or_else(|e| eprintln!("Failed to close socket: {:?}", e)); } } impl AsRawFd for VsockSocket { fn as_raw_fd(&self) -> RawFd { self.socket_fd } } fn vsock_connect(cid: u32, port: u32) -> Result<VsockSocket, String> { let sockaddr = SockAddr::new_vsock(cid, port); let mut err_msg = String::new(); for i in 0..MAX_CONNECTION_ATTEMPTS { let vsocket = VsockSocket::new( socket( AddressFamily::Vsock, SockType::Stream, SockFlag::empty(), None, ) .map_err(|err| format!("Failed to create the socket: {:?}", err))?, ); match connect(vsocket.as_raw_fd(), &sockaddr) { Ok(_) => return Ok(vsocket), Err(e) => err_msg = format!("Failed to connect: {}", e), } std::thread::sleep(std::time::Duration::from_secs(1 << i)); } Err(err_msg) } fn run_server(fd: RawFd, no_wait: bool) -> Result<(), String> { let len = recv_u64(fd)?; let mut buf = [0u8; BUF_MAX_LEN]; recv_loop(fd, &mut buf, len)?; let len_usize = len.try_into().map_err(|err| format!("{:?}", err))?; let command = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!("{:?}", err))?; let command_output = if no_wait { #[rustfmt::skip] let output = Command::new("sh") .arg("-c") .arg(command) .spawn(); if output.is_err() { CommandOutput::new( String::new(), format!("Could not execute the command {}", command), 1, ) } else { CommandOutput::new(String::new(), String::new(), 0) } } else { let output = Command::new("sh") .arg("-c") .arg(command) .output() .map_err(|err| format!("Could not execute the command {}: {:?}", command, err))?; CommandOutput::new_from(output)? }; let json_output = serde_json::to_string(&command_output) .map_err(|err| format!("Could not serialize the output: {:?}", err))?; let buf = json_output.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(fd, len)?; send_loop(fd, &buf, len)?; Ok(()) } fn recv_file_server(fd: RawFd) -> Result<(), String> { let len = recv_u64(fd)?; let mut buf = [0u8; BUF_MAX_LEN]; recv_loop(fd, &mut buf, len)?; let len_usize = len.try_into().map_err(|err| format!("{:?}", err))?; let path = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!("{:?}", err))?; let mut file = File::open(path).map_err(|err| format!("Could not open file {:?}", err))?; let filesize = file .metadata() .map_err(|err| format!("Could not get file metadata {:?}", err))? .len(); send_u64(fd, filesize)?; println!("Sending file {} - size {}", path, filesize); let mut progress: u64 = 0; let mut tmpsize: u64; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); file.read_exact(&mut buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not read {:?}", err))?; send_loop(fd, &buf, tmpsize)?; progress += tmpsize } Ok(()) } fn send_file_server(fd: RawFd) -> Result<(), String> { let len = recv_u64(fd)?; let mut buf = [0u8; BUF_MAX_LEN]; recv_loop(fd, &mut buf, len)?; let len_usize = len.try_into().map_err(|err| format!("{:?}", err))?; let path = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!("{:?}", err))?; let mut file = File::create(path).map_err(|err| format!("Could not open file {:?}", err))?; let filesize = recv_u64(fd)?; println!("Receiving file {} - size {}", path, filesize); let mut progress: u64 = 0; let mut tmpsize: u64; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); recv_loop(fd, &mut buf, tmpsize)?; file.write_all(&buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not write {:?}", err))?; progress += tmpsize } Ok(()) } pub fn listen(args: ListenArgs) -> Result<(), String> { let socket_fd = socket( AddressFamily::Vsock, SockType::Stream, SockFlag::empty(), None, ) .map_err(|err| format!("Create socket failed: {:?}", err))?; let sockaddr = SockAddr::new_vsock(VMADDR_CID_ANY, args.port); bind(socket_fd, &sockaddr).map_err(|err| format!("Bind failed: {:?}", err))?; listen_vsock(socket_fd, BACKLOG).map_err(|err| format!("Listen failed: {:?}", err))?; loop { let fd = accept(socket_fd).map_err(|err| format!("Accept failed: {:?}", err))?; let cmdid = match recv_u64(fd) { Ok(id_u64) => match CmdId::from_u64(id_u64) { Some(c) => c, _ => { eprintln!("Error no such command"); continue; } }, Err(e) => { eprintln!("Error {}", e); continue; } }; match cmdid { CmdId::RunCmd => { if let Err(e) = run_server(fd, false) { eprintln!("Error {}", e); } } CmdId::RecvFile => { if let Err(e) = recv_file_server(fd) { eprintln!("Error {}", e); } } CmdId::SendFile => { if let Err(e) = send_file_server(fd) { eprintln!("Error {}", e); } } CmdId::RunCmdNoWait => { if let Err(e) = run_server(fd, true) { eprintln!("Error {}", e); } } } } } pub fn run(args: RunArgs) -> Result<i32, String> { let vsocket = vsock_connect(args.cid, args.port)?; let socket_fd = vsocket.as_raw_fd(); if args.no_wait { send_u64(socket_fd, CmdId::RunCmdNoWait as u64)?; } else { send_u64(socket_fd, CmdId::RunCmd as u64)?; } let buf = args.command.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(socket_fd, len)?; send_loop(socket_fd, &buf, len)?; let mut buf = [0u8; BUF_MAX_LEN]; let len = recv_u64(socket_fd)?; let mut json_output = String::new(); let mut to_recv = len; while to_recv > 0 { let recv_len = min(BUF_MAX_LEN as u64, to_recv); recv_loop(socket_fd, &mut buf, recv_len)?; to_recv -= recv_len; let to_recv_usize: usize = recv_len.try_into().map_err(|err| format!("{:?}", err))?; json_output.push_str( std::str::from_utf8(&buf[0..to_recv_usize]).map_err(|err| format!("{:?}", err))?, ); } let output: CommandOutput = serde_json::from_str(json_output.as_str()) .map_err(|err| format!("Could not deserialize the output: {:?}", err))?; print!("{}", output.stdout); eprint!("{}", output.stderr); let rc = match output.rc { Some(code) => code, _ => 0, }; Ok(rc) } pub fn recv_file(args: FileArgs) -> Result<(), String> { let mut file = File::create(&args.localfile) .map_err(|err| format!("Could not open localfile {:?}", err))?; let vsocket = vsock_connect(args.cid, args.port)?; let socket_fd = vsocket.as_raw_fd(); send_u64(socket_fd, CmdId::RecvFile as u64)?; let buf = args.remotefile.as_bytes(); let len: u64 = buf.len().try_into().map_err(|err| format!("{:?}", err))?; send_u64(socket_fd, len)?; send_loop(socket_fd, &buf, len)?; let filesize = recv_u64(socket_fd)?; println!( "Receiving file {}(saving to {}) - size {}", &args.remotefile, &args.localfile[..], filesize ); let mut progress: u64 = 0; let mut tmpsize: u64; let mut buf = [0u8; BUF_MAX_LEN]; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); recv_loop(socket_fd, &mut buf, tmpsize)?; file.write_all(&buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could n
r(|err| format!("{:?}", err))?; send_u64(socket_fd, len)?; send_loop(socket_fd, &buf, len)?; let filesize = file .metadata() .map_err(|err| format!("Could not get file metadate {:?}", err))? .len(); send_u64(socket_fd, filesize)?; println!( "Sending file {}(sending to {}) - size {}", &args.localfile, &args.remotefile[..], filesize ); let mut buf = [0u8; BUF_MAX_LEN]; let mut progress: u64 = 0; let mut tmpsize: u64; while progress < filesize { tmpsize = buf.len().try_into().map_err(|err| format!("{:?}", err))?; tmpsize = min(tmpsize, filesize - progress); file.read_exact(&mut buf[..tmpsize.try_into().map_err(|err| format!("{:?}", err))?]) .map_err(|err| format!("Could not read {:?}", err))?; send_loop(socket_fd, &buf, tmpsize)?; progress += tmpsize } Ok(()) }
ot write {:?}", err))?; progress += tmpsize } Ok(()) } pub fn send_file(args: FileArgs) -> Result<(), String> { let mut file = File::open(&args.localfile).map_err(|err| format!("Could not open localfile {:?}", err))?; let vsocket = vsock_connect(args.cid, args.port)?; let socket_fd = vsocket.as_raw_fd(); send_u64(socket_fd, CmdId::SendFile as u64)?; let buf = args.remotefile.as_bytes(); let len: u64 = buf.len().try_into().map_er
random
[ { "content": "pub fn recv_loop(fd: RawFd, buf: &mut [u8], len: u64) -> Result<(), String> {\n\n let len: usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let mut recv_bytes = 0;\n\n\n\n while recv_bytes < len {\n\n let size = match recv(fd, &mut buf[recv_bytes..len], MsgFlags::empty()) {\n\n Ok(size) => size,\n\n Err(nix::Error::Sys(EINTR)) => 0,\n\n Err(err) => return Err(format!(\"{:?}\", err)),\n\n };\n\n recv_bytes += size;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 0, "score": 456102.1243507443 }, { "content": "pub fn send_loop(fd: RawFd, buf: &[u8], len: u64) -> Result<(), String> {\n\n let len: usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let mut send_bytes = 0;\n\n\n\n while send_bytes < len {\n\n let size = match send(fd, &buf[send_bytes..len], MsgFlags::empty()) {\n\n Ok(size) => size,\n\n Err(nix::Error::Sys(EINTR)) => 0,\n\n Err(err) => return Err(format!(\"{:?}\", err)),\n\n };\n\n send_bytes += size;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 1, "score": 445150.83098310913 }, { "content": "pub fn recv_u64(fd: RawFd) -> Result<u64, String> {\n\n let mut buf = [0u8; 9];\n\n recv_loop(fd, &mut buf, 9)?;\n\n let val = LittleEndian::read_u64(&buf);\n\n Ok(val)\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 2, "score": 390323.5686168078 }, { "content": "pub fn send_u64(fd: RawFd, val: u64) -> Result<(), String> {\n\n let mut buf = [0u8; 9];\n\n LittleEndian::write_u64(&mut buf, val);\n\n send_loop(fd, &mut buf, 9)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 4, "score": 374686.06376070296 }, { "content": "/// Read a LE-encoded 64-bit unsigned value from a socket.\n\npub fn read_u64_le(socket: &mut dyn Read) -> NitroCliResult<u64> {\n\n let mut bytes = [0u8; std::mem::size_of::<u64>()];\n\n socket.read_exact(&mut bytes).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to read {} bytes from the given socket: {:?}\",\n\n std::mem::size_of::<u64>(),\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n\n\n Ok(u64::from_le_bytes(bytes))\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 7, "score": 349482.77920560504 }, { "content": "/// Write a LE-encoded 64-bit unsigned value to a socket.\n\npub fn write_u64_le(socket: &mut dyn Write, value: u64) -> NitroCliResult<()> {\n\n let bytes = value.to_le_bytes();\n\n socket.write_all(&bytes).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to write {} bytes to the given socket: {:?}\",\n\n std::mem::size_of::<u64>(),\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 11, "score": 337240.1688863947 }, { "content": "fn parse_port(args: &ArgMatches) -> Result<u32, String> {\n\n let port = args\n\n .value_of(\"port\")\n\n .ok_or(\"Could not find port argument\")?;\n\n port.parse()\n\n .map_err(|_err| \"port is not a number\".to_string())\n\n}\n\n\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 12, "score": 321514.9474942394 }, { "content": "pub fn recv_i32(fd: RawFd) -> Result<i32, String> {\n\n let mut buf = [0u8; 4];\n\n recv_loop(fd, &mut buf, 4)?;\n\n let val = LittleEndian::read_i32(&buf);\n\n Ok(val)\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 13, "score": 314959.92352139205 }, { "content": "/// Obtain an enclave's slot ID from its full ID.\n\npub fn get_slot_id(enclave_id: String) -> Result<u64, String> {\n\n let tokens: Vec<&str> = enclave_id.split(\"-enc\").collect();\n\n\n\n match tokens.get(1) {\n\n Some(slot_id) => u64::from_str_radix(*slot_id, 16)\n\n .map_err(|_err| \"Invalid enclave id format\".to_string()),\n\n None => Err(\"Invalid enclave_id.\".to_string()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_generate_enclave_id() {\n\n let slot_id: u64 = 7;\n\n let enc_id = generate_enclave_id(slot_id);\n\n let file_path = \"/sys/devices/virtual/dmi/id/board_asset_tag\";\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 14, "score": 312612.61564890726 }, { "content": "/// Get the directory containing Nitro CLI related log files.\n\npub fn get_log_file_base_path() -> String {\n\n match env::var(LOGS_DIR_PATH_ENV_VAR) {\n\n Ok(env_path) => env_path,\n\n Err(_) => LOGS_DIR_PATH.to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/common/logger.rs", "rank": 15, "score": 305114.7756890998 }, { "content": "pub fn send_i32(fd: RawFd, val: i32) -> Result<(), String> {\n\n let mut buf = [0u8; 4];\n\n LittleEndian::write_i32(&mut buf, val);\n\n send_loop(fd, &mut buf, 4)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 16, "score": 302530.58571570984 }, { "content": "fn parse_localfile(args: &ArgMatches) -> Result<String, String> {\n\n let output = args\n\n .value_of(\"localpath\")\n\n .ok_or(\"Could not find localpath\")?;\n\n Ok(String::from(output))\n\n}\n\n\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 17, "score": 294008.1266718526 }, { "content": "/// Get the path to the directory containing the Unix sockets owned by all enclave processes.\n\npub fn get_sockets_dir_path() -> PathBuf {\n\n let log_path = match env::var(SOCKETS_DIR_PATH_ENV_VAR) {\n\n Ok(env_path) => env_path,\n\n Err(_) => SOCKETS_DIR_PATH.to_string(),\n\n };\n\n Path::new(&log_path).to_path_buf()\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 18, "score": 291844.26005000225 }, { "content": "fn parse_cid(args: &ArgMatches) -> Result<u32, String> {\n\n let port = args.value_of(\"cid\").ok_or(\"Could not find cid argument\")?;\n\n port.parse()\n\n .map_err(|_err| \"cid is not a number\".to_string())\n\n}\n\n\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 19, "score": 291730.3607781278 }, { "content": "/// Open a file at a given location for writing and appending.\n\nfn open_log_file(file_path: &Path) -> NitroCliResult<File> {\n\n let file = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .read(false)\n\n .open(file_path)\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to open log file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![\n\n file_path\n\n .to_str()\n\n .unwrap_or(\"Invalid unicode log file name\"),\n\n \"Open\",\n\n ])\n\n })?;\n\n\n\n let log_file_uid = Uid::from_raw(\n", "file_path": "src/common/logger.rs", "rank": 20, "score": 290522.19917322195 }, { "content": "/// Get the path to the Unix socket owned by an enclave process which also owns the enclave with the given ID.\n\npub fn get_socket_path(enclave_id: &str) -> NitroCliResult<PathBuf> {\n\n // The full enclave ID is \"i-(...)-enc<enc_id>\" and we want to extract only <enc_id>.\n\n let tokens: Vec<_> = enclave_id.rsplit(\"-enc\").collect();\n\n let sockets_path = get_sockets_dir_path();\n\n Ok(sockets_path.join(tokens[0]).with_extension(\"sock\"))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[allow(unused_imports)]\n\n use super::*;\n\n\n\n use crate::common::commands_parser::EmptyArgs;\n\n\n\n const TMP_DIR_STR: &str = \"./tmp_sock_dir\";\n\n\n\n fn unset_envvar(varname: &String) {\n\n let _ = unsafe {\n\n libc::unsetenv(varname.as_ptr() as *const i8);\n\n };\n", "file_path": "src/common/mod.rs", "rank": 21, "score": 278296.61879317404 }, { "content": "fn parse_command(args: &ArgMatches) -> Result<String, String> {\n\n let command = args\n\n .value_of(\"command\")\n\n .ok_or(\"Could not find command argument\")?;\n\n Ok(String::from(command))\n\n}\n\n\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 22, "score": 270324.7679897451 }, { "content": "fn parse_remotefile(args: &ArgMatches) -> Result<String, String> {\n\n let output = args\n\n .value_of(\"remotepath\")\n\n .ok_or(\"Could not find remotepath\")?;\n\n Ok(String::from(output))\n\n}\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 23, "score": 264208.377040709 }, { "content": "/// Get a string representation of the bit-mask which holds the enclave launch flags.\n\npub fn flags_to_string(flags: u64) -> String {\n\n if flags & NE_ENCLAVE_DEBUG_MODE == NE_ENCLAVE_DEBUG_MODE {\n\n \"DEBUG_MODE\"\n\n } else {\n\n \"NONE\"\n\n }\n\n .to_string()\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 25, "score": 256821.1862585495 }, { "content": "/// Receive an object of a specified type from an input stream.\n\npub fn receive_from_stream<T>(input_stream: &mut dyn Read) -> NitroCliResult<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n let size = read_u64_le(input_stream)\n\n .map_err(|e| e.add_subaction(\"Failed to receive data size\".to_string()))?\n\n as usize;\n\n let mut raw_data: Vec<u8> = vec![0; size];\n\n input_stream.read_exact(&mut raw_data[..]).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to receive data: {:?}\", e),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n let data: T = serde_cbor::from_slice(&raw_data[..]).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to decode received data: {:?}\", e),\n\n NitroCliErrorEnum::SerdeError\n\n )\n\n })?;\n\n Ok(data)\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 26, "score": 255605.49167782674 }, { "content": "/// Generate a unique ID for a new enclave with the specified slot ID.\n\npub fn generate_enclave_id(slot_id: u64) -> NitroCliResult<String> {\n\n let file_path = \"/sys/devices/virtual/dmi/id/board_asset_tag\";\n\n if metadata(file_path).is_ok() {\n\n let mut file = File::open(file_path).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to open file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![file_path, \"Open\"])\n\n })?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to read from file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![file_path, \"Read\"])\n\n })?;\n\n contents.retain(|c| !c.is_whitespace());\n\n return Ok(format!(\"{}-enc{:x}\", contents, slot_id));\n\n }\n\n Ok(format!(\"i-0000000000000000-enc{:x}\", slot_id))\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 28, "score": 245744.39163337369 }, { "content": "/// Get the path to the log file.\n\nfn get_log_file_path() -> PathBuf {\n\n Path::new(&get_log_file_base_path()).join(LOG_FILE_NAME)\n\n}\n\n\n", "file_path": "src/common/logger.rs", "rank": 29, "score": 243701.76184568394 }, { "content": "/// Transfers a chunck of maximum 4KB from src to dst\n\n/// If no error occurs, returns true if the source disconnects and false otherwise\n\nfn transfer(src: &mut dyn Read, dst: &mut dyn Write) -> bool {\n\n let mut buffer = [0u8; BUFF_SIZE];\n\n\n\n let nbytes = src.read(&mut buffer);\n\n let nbytes = nbytes.unwrap_or(0);\n\n\n\n if nbytes == 0 {\n\n return true;\n\n }\n\n\n\n dst.write_all(&buffer[..nbytes]).is_err()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use rand;\n\n use std::fs;\n\n use std::fs::File;\n\n use std::io::Write;\n\n use std::process::Command;\n", "file_path": "vsock_proxy/src/starter.rs", "rank": 30, "score": 238601.75498446682 }, { "content": "fn write_config(config: Vec<String>) -> Result<NamedTempFile, DockerError> {\n\n let mut file = NamedTempFile::new().map_err(|_| DockerError::TempfileError)?;\n\n\n\n for line in config {\n\n file.write_fmt(format_args!(\"{}\\n\", line))\n\n .map_err(|_| DockerError::TempfileError)?;\n\n }\n\n\n\n Ok(file)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Read;\n\n\n\n /// Test extracted configuration is as expected\n\n #[test]\n\n fn test_config() {\n\n let docker = DockerUtil::new(String::from(\n", "file_path": "enclave_build/src/docker.rs", "rank": 31, "score": 223651.1808087624 }, { "content": "/// Returns detailed error information based on supplied arguments.\n\npub fn get_detailed_info(error_code_str: String, additional_info: &[String]) -> String {\n\n let mut ret = format!(\"[ {} ] \", error_code_str);\n\n let info_placeholder = \"MISSING_INFO\".to_string();\n\n\n\n match error_code_str.as_str() {\n\n \"E00\" => {\n\n ret.push_str(\"Unspecified error. This is used as a catch-all error and should not be used in the code.\");\n\n }\n\n \"E01\" => {\n\n ret.push_str(\n\n format!(\n\n \"Missing mandatory argument. User did not provide the `{}` argument.\",\n\n additional_info.get(0).unwrap_or(&info_placeholder)\n\n )\n\n .as_str(),\n\n );\n\n }\n\n \"E02\" => {\n\n ret.push_str(\n\n format!(\n", "file_path": "src/common/document_errors.rs", "rank": 32, "score": 223649.79785453266 }, { "content": "/// Parse the enclave image file path from the command-line arguments.\n\nfn parse_eif_path(args: &ArgMatches) -> NitroCliResult<String> {\n\n let eif_path = args.value_of(\"eif-path\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`eif-path` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n Ok(eif_path.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 33, "score": 216462.43441406902 }, { "content": "/// Computes the ceil of `lhs / rhs`. Used for reporting the lower\n\n/// limit of enclave memory based on the EIF file size.\n\npub fn ceil_div(lhs: u64, rhs: u64) -> u64 {\n\n if rhs == 0 {\n\n return std::u64::MAX;\n\n }\n\n\n\n lhs / rhs\n\n + match lhs % rhs {\n\n 0 => 0,\n\n _ => 1,\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 34, "score": 215401.6304980404 }, { "content": "/// Set a timeout on a vsock connection.\n\nfn vsock_set_connect_timeout(fd: RawFd, millis: i64) -> NitroCliResult<()> {\n\n let timeval = TimeVal::milliseconds(millis);\n\n let ret = unsafe {\n\n libc::setsockopt(\n\n fd as i32,\n\n libc::AF_VSOCK,\n\n SO_VM_SOCKETS_CONNECT_TIMEOUT,\n\n &timeval as *const _ as *const c_void,\n\n size_of::<TimeVal>() as u32,\n\n )\n\n };\n\n\n\n match ret {\n\n 0 => Ok(()),\n\n _ => Err(new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to configure SO_VM_SOCKETS_CONNECT_TIMEOUT: {:?}\",\n\n ret\n\n ),\n\n NitroCliErrorEnum::SocketConnectTimeoutError\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 35, "score": 214504.51179054595 }, { "content": "fn parse_error_code_str(args: &ArgMatches) -> NitroCliResult<String> {\n\n let error_code_str = args.value_of(\"error-code\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`error-code` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n Ok(error_code_str.to_string())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::common::construct_error_message;\n\n use crate::create_app;\n\n\n\n use clap::{App, AppSettings, Arg, SubCommand};\n\n\n\n /// Parse the path of the JSON config file\n", "file_path": "src/common/commands_parser.rs", "rank": 36, "score": 211812.37595081294 }, { "content": "fn parse_no_wait(args: &ArgMatches) -> bool {\n\n if args.is_present(\"no-wait\") {\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 37, "score": 211104.9929373604 }, { "content": "pub trait ExitGracefully<T, E> {\n\n fn ok_or_exit(self, message: &str) -> T;\n\n}\n\n\n\nimpl<T, E: std::fmt::Debug> ExitGracefully<T, E> for Result<T, E> {\n\n fn ok_or_exit(self, message: &str) -> T {\n\n match self {\n\n Ok(val) => val,\n\n Err(err) => {\n\n error!(\"{:?}: {}\", err, message);\n\n std::process::exit(1);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! create_app {\n\n () => {\n\n App::new(\"Vsock Tool\")\n", "file_path": "samples/command_executer/src/utils.rs", "rank": 39, "score": 210145.411365818 }, { "content": "fn vsock_connect(port: u32) -> VsockStream {\n\n let sockaddr = SockAddr::new_vsock(vsock_proxy::starter::VSOCK_PROXY_CID, port);\n\n VsockStream::connect(&sockaddr).expect(\"Could not connect\")\n\n}\n\n\n\n/// Test connection with both client and server sending each other messages\n", "file_path": "vsock_proxy/tests/connection_test.rs", "rank": 40, "score": 206599.2234807593 }, { "content": "/// Get the action associated with `cmd` as a String.\n\nfn get_command_action(cmd: EnclaveProcessCommandType) -> String {\n\n match cmd {\n\n EnclaveProcessCommandType::Run => \"Run Enclave\".to_string(),\n\n EnclaveProcessCommandType::Terminate | EnclaveProcessCommandType::TerminateComplete => {\n\n \"Terminate Enclave\".to_string()\n\n }\n\n EnclaveProcessCommandType::Describe => \"Describe Enclaves\".to_string(),\n\n EnclaveProcessCommandType::GetEnclaveCID => \"Get Enclave CID\".to_string(),\n\n EnclaveProcessCommandType::GetEnclaveFlags => \"Get Enclave Flags\".to_string(),\n\n EnclaveProcessCommandType::ConnectionListenerStop => \"Stop Connection Listener\".to_string(),\n\n _ => \"Unknown Command\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 41, "score": 206117.48345520865 }, { "content": "/// Notify both the user and the logger of an error.\n\npub fn notify_error(err_msg: &str) {\n\n eprintln!(\"{}\", err_msg);\n\n error!(\"{}\", err_msg);\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 42, "score": 202411.94333331526 }, { "content": "/// Returns a link with more detailed information regarding a specific error.\n\npub fn construct_help_link(error_code_str: String) -> String {\n\n format!(\n\n \"https://docs.aws.amazon.com/enclaves/latest/user/cli-errors.html#{}\",\n\n error_code_str\n\n )\n\n}\n\n\n", "file_path": "src/common/document_errors.rs", "rank": 43, "score": 202147.8855068369 }, { "content": "/// Parse the path of an output file from the command-line arguments.\n\nfn parse_output(args: &ArgMatches) -> Option<String> {\n\n args.value_of(\"output-file\").map(|val| val.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 44, "score": 199975.10828951874 }, { "content": "/// Logs the given backtrace string to a separate, backtrace-specific file.\n\n/// Returns a string denoting the path to the corresponding log file.\n\nfn log_backtrace(backtrace: String) -> Result<String, &'static str> {\n\n let log_path_base = get_log_file_base_path();\n\n\n\n // Check if backtrace logs location exists and create it if necessary.\n\n if !Path::new(&log_path_base).exists() {\n\n let create_logs_dir = std::fs::create_dir_all(&log_path_base);\n\n if create_logs_dir.is_err() {\n\n return Err(\"Could not create backtrace logs directory\");\n\n }\n\n }\n\n\n\n let utc_time_now = Utc::now().to_rfc3339();\n\n let log_path_str = format!(\"{}/err{}.log\", &log_path_base, utc_time_now);\n\n let log_path = Path::new(&log_path_str);\n\n let log_file = std::fs::File::create(log_path);\n\n if log_file.is_err() {\n\n return Err(\"Could not create backtrace log file\");\n\n }\n\n\n\n let write_result = log_file.unwrap().write_all(&backtrace.as_bytes());\n\n if write_result.is_err() {\n\n return Err(\"Could not write to backtrace log file\");\n\n }\n\n\n\n match log_path.to_str() {\n\n Some(log_path) => Ok(log_path.to_string()),\n\n None => Err(\"Could not return log file path\"),\n\n }\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 45, "score": 199689.47802320786 }, { "content": "/// Obtain an enclave's CID given its full ID.\n\npub fn enclave_proc_get_cid(enclave_id: &str) -> NitroCliResult<u64> {\n\n let mut comm = enclave_proc_connect_to_single(enclave_id)\n\n .map_err(|e| e.add_subaction(\"Failed to connect to enclave process\".to_string()))?;\n\n // TODO: Replicate output of old CLI on invalid enclave IDs.\n\n enclave_proc_command_send_single::<EmptyArgs>(\n\n EnclaveProcessCommandType::GetEnclaveCID,\n\n None,\n\n &mut comm,\n\n )\n\n .map_err(|e| e.add_subaction(\"Failed to send CID request to enclave process\".to_string()))?;\n\n\n\n info!(\"Sent command: GetEnclaveCID\");\n\n let enclave_cid = read_u64_le(&mut comm)\n\n .map_err(|e| e.add_subaction(String::from(\"Failed to read CID from enclave process\")))?;\n\n\n\n // We got the CID, so shut the connection down.\n\n comm.shutdown(std::net::Shutdown::Both).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to shut down connection after obtaining enclave CID: {:?}\",\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n\n\n Ok(enclave_cid)\n\n}\n\n\n", "file_path": "src/enclave_proc_comm.rs", "rank": 46, "score": 194958.5113902629 }, { "content": "/// Obtain an enclave's flags given its full ID.\n\npub fn enclave_proc_get_flags(enclave_id: &str) -> NitroCliResult<u64> {\n\n let mut comm = enclave_proc_connect_to_single(enclave_id)\n\n .map_err(|e| e.add_subaction(\"Failed to connect to enclave process\".to_string()))?;\n\n // TODO: Replicate output of old CLI on invalid enclave IDs.\n\n enclave_proc_command_send_single::<EmptyArgs>(\n\n EnclaveProcessCommandType::GetEnclaveFlags,\n\n None,\n\n &mut comm,\n\n )\n\n .map_err(|e| e.add_subaction(\"Failed to send flags request to enclave process\".to_string()))?;\n\n\n\n info!(\"Sent command: GetEnclaveFlags\");\n\n let enclave_flags = read_u64_le(&mut comm)\n\n .map_err(|e| e.add_subaction(String::from(\"Failed to read flags from enclave process\")))?;\n\n\n\n // We got the flags, so shut the connection down.\n\n comm.shutdown(std::net::Shutdown::Both).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to shut down connection after obtaining enclave flags: {:?}\",\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n\n\n Ok(enclave_flags)\n\n}\n", "file_path": "src/enclave_proc_comm.rs", "rank": 47, "score": 194958.5113902629 }, { "content": "/// Detailed information based on user-provided error code.\n\npub fn explain_error(error_code_str: String) {\n\n match error_code_str.as_str() {\n\n \"E00\" => {\n\n eprintln!(\"Unspecified error. This is used as a catch-all error and should not be used in the code.\");\n\n }\n\n \"E01\" => {\n\n eprintln!(\"Missing mandatory argument. Such error appears when the Nitro CLI is requested to perform an operation, but not all of the mandatory arguments were supplied.\\n\\tExample: `nitro-cli run-enclave --cpu-count 2 --eif-path /path/to/my/eif`. Note that in this case, the mandatory parameter `--memory` is missing a value.\");\n\n }\n\n \"E02\" => {\n\n eprintln!(\"CLI conflicting arguments. Such error appears when the Nitro CLI is supplied two contradicting arguments at the same time, such as `--cpu-count` and `--cpu-ids`.\\nIn this case, only one of the parameters should be supplied.\");\n\n }\n\n \"E03\" => {\n\n eprintln!(\"Invalid argument provided. Such error appears when the type of at least one of the arguments provided to the Nitro CLI does not match the expected type of that parameter.\\n\\tExample: `nitro-cli run-enclave --cpu-count 1z --memory 80 --eif-path /path/to/my/eif`. In this case, `cpu-count` is not a valid integer value.\" );\n\n }\n\n \"E04\" => {\n\n eprintln!(\"Socket pair creation failure. Such error apears when the Nitro CLI process attempts to open a stream pair in order to send a command to the enclave process, but the stream initialization fails.\");\n\n }\n\n \"E05\" => {\n\n eprintln!(\"Process spawn failure. Such error appears when the main Nitro CLI process failed to spawn the enclave process, in order to complete a `run-enclave` command.\");\n\n }\n", "file_path": "src/common/document_errors.rs", "rank": 48, "score": 193970.88513532546 }, { "content": "/// Parse the requested amount of enclave memory from the command-line arguments.\n\nfn parse_memory(args: &ArgMatches) -> NitroCliResult<u64> {\n\n let memory = args.value_of(\"memory\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`memory` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n memory.parse().map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"`memory` is not a number\",\n\n NitroCliErrorEnum::InvalidArgument\n\n )\n\n .add_info(vec![\"memory\", memory])\n\n })\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 49, "score": 191218.07820984133 }, { "content": "/// Obtain the enclave information requested by the `describe-enclaves` command.\n\npub fn get_enclave_describe_info(\n\n enclave_manager: &EnclaveManager,\n\n) -> NitroCliResult<EnclaveDescribeInfo> {\n\n let (slot_uid, enclave_cid, cpus_count, cpu_ids, memory_mib, flags, state) =\n\n enclave_manager.get_description_resources()?;\n\n let info = EnclaveDescribeInfo::new(\n\n generate_enclave_id(slot_uid)?,\n\n enclave_cid,\n\n cpus_count,\n\n cpu_ids,\n\n memory_mib,\n\n state.to_string(),\n\n flags_to_string(flags),\n\n );\n\n Ok(info)\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 50, "score": 190436.16568337823 }, { "content": "/// Obtain the enclave information requested by the `run-enclaves` command.\n\npub fn get_run_enclaves_info(\n\n enclave_cid: u64,\n\n slot_id: u64,\n\n cpu_ids: Vec<u32>,\n\n memory: u64,\n\n) -> NitroCliResult<EnclaveRunInfo> {\n\n let info = EnclaveRunInfo::new(\n\n generate_enclave_id(slot_id)?,\n\n enclave_cid,\n\n cpu_ids.len(),\n\n cpu_ids,\n\n memory,\n\n );\n\n Ok(info)\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 51, "score": 190436.16568337823 }, { "content": "/// Returns the value of the `NITRO_CLI_ARTIFACTS` environment variable.\n\n///\n\n/// This variable configures the path where the build artifacts should be saved.\n\nfn artifacts_path() -> NitroCliResult<String> {\n\n if let Ok(artifacts) = std::env::var(\"NITRO_CLI_ARTIFACTS\") {\n\n std::fs::create_dir_all(artifacts.clone()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create artifacts path {}: {:?}\", artifacts, e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![&artifacts, \"Create\"])\n\n })?;\n\n Ok(artifacts)\n\n } else if let Ok(home) = std::env::var(\"HOME\") {\n\n let artifacts = format!(\"{}/.nitro_cli/\", home);\n\n std::fs::create_dir_all(artifacts.clone()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create artifacts path {}: {:?}\", artifacts, e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![&artifacts, \"Create\"])\n\n })?;\n\n Ok(artifacts)\n\n } else {\n\n Err(new_nitro_cli_failure!(\n\n \"Could not find a folder for the CLI artifacts, set either HOME or NITRO_CLI_ARTIFACTS\",\n\n NitroCliErrorEnum::ArtifactsPathNotSet\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 52, "score": 189192.02763545213 }, { "content": "/// Returns the value of the `NITRO_CLI_BLOBS` environment variable.\n\n///\n\n/// This variable specifies where all the blobs necessary for building\n\n/// an enclave image are stored. As of now the blobs are:\n\n/// - *bzImage*: A kernel image.\n\n/// - *init*: The initial init process that is bootstraping the environment.\n\n/// - *linuxkit*: A slightly modified version of linuxkit.\n\n/// - *cmdline*: A file containing the kernel commandline.\n\nfn blobs_path() -> NitroCliResult<String> {\n\n // TODO Improve error message with a suggestion to the user\n\n // consider using the default path used by rpm install\n\n let blobs_res = std::env::var(\"NITRO_CLI_BLOBS\");\n\n\n\n Ok(blobs_res.unwrap_or_else(|_| DEFAULT_BLOBS_PATH.to_string()))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 53, "score": 189188.95472537228 }, { "content": "/// Connects to the enclave console and prints it continously.\n\npub fn enclave_console(enclave_cid: u64) -> NitroCliResult<()> {\n\n let console = Console::new(\n\n VMADDR_CID_HYPERVISOR,\n\n u32::try_from(enclave_cid).map_err(|err| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to parse enclave CID: {:?}\", err),\n\n NitroCliErrorEnum::IntegerParsingError\n\n )\n\n })? + CID_TO_CONSOLE_PORT_OFFSET,\n\n )\n\n .map_err(|e| e.add_subaction(\"Connect to enclave console\".to_string()))?;\n\n println!(\"Successfully connected to the console.\");\n\n console\n\n .read_to(io::stdout().by_ref())\n\n .map_err(|e| e.add_subaction(\"Connect to enclave console\".to_string()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 188229.86357751844 }, { "content": "/// Wrapper over the console connection function.\n\npub fn console_enclaves(enclave_cid: u64) -> NitroCliResult<()> {\n\n debug!(\"console_enclaves\");\n\n println!(\"Connecting to the console for enclave {}...\", enclave_cid);\n\n enclave_console(enclave_cid)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 55, "score": 188225.23326100764 }, { "content": "/// Send the given command, then close the channel that was used for sending it.\n\nfn send_command_and_close(cmd: EnclaveProcessCommandType, stream: &mut UnixStream) {\n\n let action_str = &get_command_action(cmd);\n\n\n\n enclave_proc_command_send_single::<EmptyArgs>(cmd, None, stream)\n\n .ok_or_exit_with_errno(Some(\"Failed to send command\"));\n\n stream\n\n .shutdown(std::net::Shutdown::Both)\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to close stream after sending command: {:?}\", e),\n\n NitroCliErrorEnum::SocketCloseError\n\n )\n\n .set_action(action_str.to_string())\n\n })\n\n .ok_or_exit_with_errno(Some(\"Failed to shut down stream\"));\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 56, "score": 187923.6834556615 }, { "content": "/// Parse the enclave's ID from the command-line arguments.\n\nfn parse_enclave_id(args: &ArgMatches) -> NitroCliResult<String> {\n\n let enclave_id = args.value_of(\"enclave-id\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`enclave-id` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n Ok(enclave_id.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 57, "score": 186703.15246554997 }, { "content": "/// Assembles the error message which gets displayed to the user.\n\npub fn construct_error_message(failure: &NitroCliFailure) -> String {\n\n // Suggestive error description comes first.\n\n let error_info: String = document_errors::get_detailed_info(\n\n (*ERROR_CODES.get(&failure.error_code).unwrap_or(&\"E00\")).to_string(),\n\n &failure.additional_info,\n\n );\n\n\n\n // Include a link to the documentation page.\n\n let help_link: String = document_errors::construct_help_link(\n\n (*ERROR_CODES.get(&failure.error_code).unwrap_or(&\"E00\")).to_string(),\n\n );\n\n let backtrace: String = document_errors::construct_backtrace(&failure);\n\n\n\n // Write backtrace to a log file.\n\n let log_path = log_backtrace(backtrace.clone());\n\n\n\n // Return final output, depending on whether the user requested the backtrace or not.\n\n match std::env::var(BACKTRACE_VAR) {\n\n Ok(display_backtrace) => match display_backtrace.as_str() {\n\n \"1\" => {\n", "file_path": "src/common/mod.rs", "rank": 58, "score": 184099.7492710174 }, { "content": "/// Print a message to a connection's standard output, if the connection is available.\n\npub fn safe_conn_println(conn: Option<&Connection>, msg: &str) -> NitroCliResult<()> {\n\n if conn.is_none() {\n\n return Ok(());\n\n }\n\n\n\n conn.unwrap().println(msg)\n\n}\n\n\n", "file_path": "src/enclave_proc/connection.rs", "rank": 59, "score": 182576.37163121035 }, { "content": "/// The default POSIX signal handling function, which notifies the enclave process to shut down gracefully.\n\nfn enclave_proc_handle_signals(comm_fd: RawFd, signal: Signal) -> bool {\n\n let mut stream = unsafe { UnixStream::from_raw_fd(comm_fd) };\n\n\n\n warn!(\n\n \"Received signal {:?}. The enclave process will now close.\",\n\n signal\n\n );\n\n send_command_and_close(\n\n EnclaveProcessCommandType::ConnectionListenerStop,\n\n &mut stream,\n\n );\n\n\n\n true\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 60, "score": 182203.23211186967 }, { "content": "/// Parse the requested number of CPUs from the command-line arguments.\n\nfn parse_cpu_count(args: &ArgMatches) -> NitroCliResult<Option<u32>> {\n\n let cpu_count = if let Some(cpu_count) = args.value_of(\"cpu-count\") {\n\n let cpu_count: u32 = cpu_count.parse().map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"`cpu-count` is not a number\",\n\n NitroCliErrorEnum::InvalidArgument\n\n )\n\n .add_info(vec![\"cpu-count\", cpu_count])\n\n })?;\n\n Some(cpu_count)\n\n } else {\n\n None\n\n };\n\n Ok(cpu_count)\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 61, "score": 180083.07049423349 }, { "content": "/// Parse the enclave's required CID from the command-line arguments.\n\nfn parse_enclave_cid(args: &ArgMatches) -> NitroCliResult<Option<u64>> {\n\n let enclave_cid = if let Some(enclave_cid) = args.value_of(\"enclave-cid\") {\n\n let enclave_cid: u64 = enclave_cid.parse().map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"`enclave-cid` is not a number\",\n\n NitroCliErrorEnum::InvalidArgument\n\n )\n\n .add_info(vec![\"enclave-cid\", enclave_cid])\n\n })?;\n\n\n\n // Do not use well-known CID values - 0, 1, 2 - as the enclave CID.\n\n // VMADDR_CID_ANY = -1U\n\n // VMADDR_CID_HYPERVISOR = 0\n\n // VMADDR_CID_LOCAL = 1\n\n // VMADDR_CID_HOST = 2\n\n // Note: 0 is used as a placeholder to auto-generate a CID.\n\n // <http://man7.org/linux/man-pages/man7/vsock.7.html>\n\n if enclave_cid == 0 {\n\n eprintln!(\"The enclave CID will be auto-generated as the provided CID is 0\");\n\n }\n", "file_path": "src/common/commands_parser.rs", "rank": 62, "score": 179940.22117352605 }, { "content": "/// Send a command to a single socket.\n\npub fn enclave_proc_command_send_single<T>(\n\n cmd: EnclaveProcessCommandType,\n\n args: Option<&T>,\n\n mut socket: &mut UnixStream,\n\n) -> NitroCliResult<()>\n\nwhere\n\n T: Serialize,\n\n{\n\n // Serialize the command type.\n\n let cmd_bytes = serde_cbor::to_vec(&cmd).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Invalid command format: {:?}\", e),\n\n NitroCliErrorEnum::InvalidCommand\n\n )\n\n })?;\n\n\n\n // The command is written twice. The first read is done by the connection listener to check if this is\n\n // a shut-down command. The second read is done by the enclave process for all non-shut-down commands.\n\n for _ in 0..2 {\n\n write_u64_le(&mut socket, cmd_bytes.len() as u64)\n", "file_path": "src/common/mod.rs", "rank": 63, "score": 179723.41636068115 }, { "content": "/// Parse the list of requested CPU IDs from the command-line arguments.\n\nfn parse_cpu_ids(args: &ArgMatches) -> NitroCliResult<Option<Vec<u32>>> {\n\n let cpu_ids_arg = args.values_of(\"cpu-ids\");\n\n match cpu_ids_arg {\n\n Some(iterator) => {\n\n let mut cpu_ids = Vec::new();\n\n for cpu_id in iterator {\n\n cpu_ids.push(cpu_id.parse().map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"`cpu-id` is not a number\",\n\n NitroCliErrorEnum::InvalidArgument\n\n )\n\n .add_info(vec![\"cpu-id\", cpu_id])\n\n })?);\n\n }\n\n Ok(Some(cpu_ids))\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 65, "score": 173600.94557532776 }, { "content": "/// Obtain the logger ID from the full enclave ID.\n\nfn get_logger_id(enclave_id: &str) -> String {\n\n // The full enclave ID is \"i-(...)-enc<enc_id>\" and we want to extract only <enc_id>.\n\n let tokens: Vec<_> = enclave_id.rsplit(\"-enc\").collect();\n\n format!(\"enc-{}:{}\", tokens[0], std::process::id())\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 66, "score": 171866.5851447145 }, { "content": "// Generate the sysroot path based on the architecture the program is built.\n\n// This is needed for the compiler to locate the architecture dependent\n\n// header files.\n\nfn get_gcc_sysroot() -> String {\n\n // Initialize the output because it might remain uninitialized with the\n\n // current control flow\n\n let mut output = Err(io::Error::from(io::ErrorKind::NotFound));\n\n\n\n // Use the `CC` environment variable if existing\n\n // which should give us directly the compiler's name\n\n let cc = env::var(\"CC\");\n\n if let Ok(cc) = cc {\n\n output = Command::new(cc).arg(\"--print-sysroot\").output();\n\n }\n\n\n\n if output.is_err() {\n\n // Otherwise, try to guess the compiler's name by the name\n\n // $TARGET-gcc, where $TARGET represents the target architecture\n\n // that was specified for build\n\n let target = env::var(\"TARGET\").unwrap();\n\n output = Command::new(format!(\"{}-gcc\", target))\n\n .arg(\"--print-sysroot\")\n\n .output();\n", "file_path": "build.rs", "rank": 67, "score": 171210.34214012095 }, { "content": "/// Notify that an error has occurred, also forwarding the error message to a connection.\n\nfn notify_error_with_conn(err_msg: &str, conn: &Connection, action: EnclaveProcessCommandType) {\n\n let action_str = &get_command_action(action);\n\n\n\n notify_error(err_msg);\n\n conn.eprintln(err_msg)\n\n .map_err(|e| e.set_action(action_str.to_string()))\n\n .ok_or_exit_with_errno(Some(\"Failed to forward error message to connection\"));\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 68, "score": 169920.20575970813 }, { "content": "/// Parse the debug-mode flag from the command-line arguments.\n\nfn debug_mode(args: &ArgMatches) -> Option<bool> {\n\n let val = args.is_present(\"debug-mode\");\n\n if val {\n\n Some(val)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 69, "score": 168185.5179805259 }, { "content": "/// Parse the Docker tag from the command-line arguments.\n\nfn parse_docker_tag(args: &ArgMatches) -> Option<String> {\n\n args.value_of(\"docker-uri\").map(|val| val.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 70, "score": 163904.774072956 }, { "content": "/// Parse the Docker directory from the command-line arguments.\n\nfn parse_docker_dir(args: &ArgMatches) -> Option<String> {\n\n args.value_of(\"docker-dir\").map(|val| val.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 71, "score": 163904.774072956 }, { "content": "fn parse_signing_certificate(args: &ArgMatches) -> Option<String> {\n\n args.value_of(\"signing-certificate\")\n\n .map(|val| val.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 72, "score": 163900.29502817985 }, { "content": "fn parse_private_key(args: &ArgMatches) -> Option<String> {\n\n args.value_of(\"private-key\").map(|val| val.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 73, "score": 163900.29502817985 }, { "content": "/// Obtain an enclave's description and provide it through the given connection.\n\npub fn describe_enclaves(\n\n enclave_manager: &EnclaveManager,\n\n connection: &Connection,\n\n) -> NitroCliResult<()> {\n\n debug!(\"describe_enclaves\");\n\n\n\n let info = get_enclave_describe_info(enclave_manager)\n\n .map_err(|e| e.add_subaction(String::from(\"Execute Describe Enclave command\")))?;\n\n connection.println(\n\n serde_json::to_string_pretty(&info)\n\n .map_err(|err| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to display enclave describe data: {:?}\", err),\n\n NitroCliErrorEnum::SerdeError\n\n )\n\n })?\n\n .as_str(),\n\n )\n\n}\n", "file_path": "src/enclave_proc/commands.rs", "rank": 74, "score": 162457.18201980533 }, { "content": "/// Terminate an enclave and provide the termination status through the given connection.\n\npub fn terminate_enclaves(\n\n enclave_manager: &mut EnclaveManager,\n\n connection: Option<&Connection>,\n\n) -> NitroCliResult<()> {\n\n let enclave_id = enclave_manager.enclave_id.clone();\n\n\n\n debug!(\"terminate_enclaves\");\n\n enclave_manager\n\n .update_state(EnclaveState::Terminating)\n\n .map_err(|e| e.add_subaction(\"Failed to update enclave state\".to_string()))?;\n\n if let Err(error_info) = enclave_manager.terminate_enclave() {\n\n safe_conn_eprintln(\n\n connection,\n\n format!(\n\n \"Warning: Failed to stop enclave {}\\nError message: {:?}\",\n\n enclave_manager.enclave_id,\n\n construct_error_message(&error_info).as_str()\n\n )\n\n .as_str(),\n\n )?;\n", "file_path": "src/enclave_proc/commands.rs", "rank": 75, "score": 162457.1173388776 }, { "content": "/// Launch an enclave with the specified arguments and provide the launch status through the given connection.\n\npub fn run_enclaves(\n\n args: &RunEnclavesArgs,\n\n connection: Option<&Connection>,\n\n) -> NitroCliResult<EnclaveManager> {\n\n debug!(\"run_enclaves\");\n\n\n\n let eif_file = File::open(&args.eif_path).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to open the EIF file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![&args.eif_path, \"Open\"])\n\n })?;\n\n\n\n let cpu_ids = CpuInfo::new()\n\n .map_err(|e| e.add_subaction(\"Failed to construct CPU information\".to_string()))?\n\n .get_cpu_config(args)\n\n .map_err(|e| e.add_subaction(\"Failed to get CPU configuration\".to_string()))?;\n\n let mut enclave_manager = EnclaveManager::new(\n\n args.enclave_cid,\n", "file_path": "src/enclave_proc/commands.rs", "rank": 76, "score": 162456.99339108064 }, { "content": "/// Connect to all existing enclave processes, returning a connection to each.\n\npub fn enclave_proc_connect_to_all() -> NitroCliResult<Vec<UnixStream>> {\n\n let paths = fs::read_dir(get_sockets_dir_path()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to access sockets directory: {:?}\", e),\n\n NitroCliErrorEnum::ReadFromDiskFailure\n\n )\n\n })?;\n\n Ok(paths\n\n .filter_map(|path| path.ok())\n\n .map(|path| path.path())\n\n .filter(|path| !path.is_dir())\n\n .filter_map(|path| {\n\n // Get the file path string.\n\n if let Some(path_str) = path.to_str() {\n\n // Enclave process sockets are named \"<enclave_id>.sock\".\n\n if !path_str.ends_with(\".sock\") {\n\n return None;\n\n }\n\n\n\n // At this point we have found a potential socket.\n", "file_path": "src/enclave_proc_comm.rs", "rank": 77, "score": 157136.61555515556 }, { "content": "/// Print a message to a connection's standard error, if the connection is available.\n\npub fn safe_conn_eprintln(conn: Option<&Connection>, msg: &str) -> NitroCliResult<()> {\n\n if conn.is_none() {\n\n return Ok(());\n\n }\n\n\n\n conn.unwrap().eprintln(msg)\n\n}\n", "file_path": "src/enclave_proc/connection.rs", "rank": 78, "score": 154302.520015678 }, { "content": "/// Terminates all enclave instances belonging to the current user (or all\n\n/// instances, if the current user has `root` permissions).\n\npub fn terminate_all_enclaves() -> NitroCliResult<()> {\n\n let sockets_dir = get_sockets_dir_path();\n\n let mut replies: Vec<UnixStream> = vec![];\n\n let sockets = std::fs::read_dir(sockets_dir.as_path()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Error while accessing sockets directory: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![\n\n sockets_dir\n\n .as_path()\n\n .to_str()\n\n .unwrap_or(\"Invalid unicode directory name\"),\n\n \"Read\",\n\n ])\n\n })?;\n\n\n\n let mut err_socket_files: usize = 0;\n\n let mut failed_connections: Vec<PathBuf> = Vec::new();\n\n for socket in sockets {\n", "file_path": "src/lib.rs", "rank": 79, "score": 153442.62248582617 }, { "content": "/// Fetch JSON objects and statuses from all connected enclave processes.\n\npub fn enclave_proc_handle_outputs<T>(conns: &mut [UnixStream]) -> Vec<(T, i32)>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n let mut objects: Vec<(T, i32)> = Vec::new();\n\n\n\n for conn in conns.iter_mut() {\n\n // We only count connections that have yielded a valid JSON object and a status\n\n let (object, status) = enclave_proc_handle_output::<T>(conn);\n\n if let Some(object) = object {\n\n if let Some(status) = status {\n\n objects.push((object, status));\n\n }\n\n }\n\n }\n\n\n\n objects\n\n}\n\n\n", "file_path": "src/enclave_proc_comm.rs", "rank": 80, "score": 153375.00721340787 }, { "content": "/// Returns a string containing the backtrace recorded during propagating an error message\n\npub fn construct_backtrace(failure_info: &NitroCliFailure) -> String {\n\n let mut ret = String::new();\n\n let commit_id = env!(\"COMMIT_ID\");\n\n\n\n ret.push_str(&format!(\" Action: {}\\n Subactions:\", failure_info.action));\n\n for subaction in failure_info.subactions.iter().rev() {\n\n ret.push_str(&format!(\"\\n {}\", subaction));\n\n }\n\n ret.push_str(&format!(\"\\n Root error file: {}\", failure_info.file));\n\n ret.push_str(&format!(\"\\n Root error line: {}\", failure_info.line));\n\n\n\n ret.push_str(&format!(\n\n \"\\n Build commit: {}\",\n\n match commit_id.len() {\n\n 0 => \"not available\",\n\n _ => commit_id,\n\n }\n\n ));\n\n\n\n ret\n\n}\n\n\n", "file_path": "src/common/document_errors.rs", "rank": 81, "score": 150303.09045517482 }, { "content": "/// Broadcast a command to all available enclave processes.\n\npub fn enclave_proc_command_send_all<T>(\n\n cmd: EnclaveProcessCommandType,\n\n args: Option<&T>,\n\n) -> NitroCliResult<(Vec<UnixStream>, usize)>\n\nwhere\n\n T: Serialize,\n\n{\n\n // Open a connection to each valid socket.\n\n let mut replies: Vec<UnixStream> = vec![];\n\n let epoll_fd = epoll::epoll_create().map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to create epoll: {:?}\", e),\n\n NitroCliErrorEnum::EpollError\n\n )\n\n })?;\n\n let comms: Vec<NitroCliResult<()>> = enclave_proc_connect_to_all()\n\n .map_err(|e| {\n\n e.add_subaction(\"Failed to send command to all enclave processes\".to_string())\n\n })?\n\n .iter_mut()\n", "file_path": "src/enclave_proc_comm.rs", "rank": 82, "score": 146623.69795728917 }, { "content": "/// Open a connection to an enclave-specific socket.\n\npub fn enclave_proc_connect_to_single(enclave_id: &str) -> NitroCliResult<UnixStream> {\n\n let socket_path = get_socket_path(enclave_id).map_err(|e| {\n\n e.add_subaction(\"Connect to specific enclave process\".to_string())\n\n .set_error_code(NitroCliErrorEnum::SocketError)\n\n })?;\n\n UnixStream::connect(socket_path).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to connect to specific enclave process: {:?}\", e),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/enclave_proc_comm.rs", "rank": 83, "score": 145033.0655005389 }, { "content": "/// Spawn an enclave process and wait until it has detached and has\n\n/// taken ownership of its communication socket.\n\npub fn enclave_proc_spawn(logger: &EnclaveProcLogWriter) -> NitroCliResult<UnixStream> {\n\n let (cli_socket, enclave_proc_socket) = UnixStream::pair().map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create a socket pair: {:?}\", e),\n\n NitroCliErrorEnum::SocketPairCreationFailure\n\n )\n\n })?;\n\n\n\n // Prevent the descriptor from being closed when calling exec().\n\n let enclave_proc_fd = enclave_proc_socket.as_raw_fd();\n\n unsafe {\n\n let flags = libc::fcntl(enclave_proc_fd, libc::F_GETFD);\n\n libc::fcntl(enclave_proc_fd, libc::F_SETFD, flags & !libc::FD_CLOEXEC);\n\n }\n\n\n\n // Spawn an intermediate child process. This will fork again in order to\n\n // create the detached enclave process.\n\n let fork_status = fork();\n\n\n\n if let Ok(ForkResult::Child) = fork_status {\n", "file_path": "src/enclave_proc_comm.rs", "rank": 84, "score": 142648.9093656653 }, { "content": "/// Handle an event coming from an enclave.\n\nfn try_handle_enclave_event(connection: &Connection) -> NitroCliResult<HandledEnclaveEvent> {\n\n // Check if this is an enclave connection.\n\n if let Some(mut enc_events) = connection\n\n .get_enclave_event_flags()\n\n .map_err(|e| e.add_subaction(\"Failed to get enclave events flag\".to_string()))?\n\n {\n\n let enc_hup = enc_events.contains(EpollFlags::EPOLLHUP);\n\n\n\n // Check if non-hang-up events have occurred.\n\n enc_events.remove(EpollFlags::EPOLLHUP);\n\n if !enc_events.is_empty() {\n\n warn!(\"Received unexpected enclave event(s): {:?}\", enc_events);\n\n }\n\n\n\n // If we received the hang-up event we need to terminate cleanly.\n\n if enc_hup {\n\n warn!(\"Received hang-up event from the enclave. Enclave process will shut down.\");\n\n return Ok(HandledEnclaveEvent::HangUp);\n\n }\n\n\n\n // Non-hang-up enclave events are not fatal.\n\n return Ok(HandledEnclaveEvent::Unexpected);\n\n }\n\n\n\n Ok(HandledEnclaveEvent::None)\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 85, "score": 142642.147749677 }, { "content": "fn main() {\n\n let app = create_app!();\n\n let args = app.get_matches();\n\n\n\n match args.subcommand() {\n\n (\"listen\", Some(args)) => {\n\n let listen_args = ListenArgs::new_with(args).ok_or_exit(args.usage());\n\n listen(listen_args).ok_or_exit(args.usage());\n\n }\n\n (\"run\", Some(args)) => {\n\n let run_args = RunArgs::new_with(args).ok_or_exit(args.usage());\n\n let rc = run(run_args).ok_or_exit(args.usage());\n\n std::process::exit(rc);\n\n }\n\n (\"recv-file\", Some(args)) => {\n\n let subcmd_args = FileArgs::new_with(args).ok_or_exit(args.usage());\n\n recv_file(subcmd_args).ok_or_exit(args.usage());\n\n }\n\n (\"send-file\", Some(args)) => {\n\n let subcmd_args = FileArgs::new_with(args).ok_or_exit(args.usage());\n\n send_file(subcmd_args).ok_or_exit(args.usage());\n\n }\n\n (&_, _) => {}\n\n }\n\n}\n", "file_path": "samples/command_executer/src/main.rs", "rank": 86, "score": 142538.0422475175 }, { "content": "/// Handle a single command, returning whenever an error occurs.\n\nfn handle_command(\n\n cmd: EnclaveProcessCommandType,\n\n logger: &EnclaveProcLogWriter,\n\n connection: &Connection,\n\n conn_listener: &mut ConnectionListener,\n\n enclave_manager: &mut EnclaveManager,\n\n terminate_thread: &mut Option<std::thread::JoinHandle<()>>,\n\n) -> NitroCliResult<(i32, bool)> {\n\n Ok(match cmd {\n\n EnclaveProcessCommandType::Run => {\n\n // We should never receive a Run command if we are already running.\n\n if !enclave_manager.enclave_id.is_empty() {\n\n (libc::EEXIST, false)\n\n } else {\n\n let run_args = connection.read::<RunEnclavesArgs>().map_err(|e| {\n\n e.add_subaction(\"Failed to get run arguments\".to_string())\n\n .set_action(\"Run Enclave\".to_string())\n\n })?;\n\n info!(\"Run args = {:?}\", run_args);\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 87, "score": 142470.63699953142 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct FileTemplate {\n\n path: String,\n\n source: String,\n\n mode: String,\n\n}\n\n\n", "file_path": "enclave_build/src/yaml_generator.rs", "rank": 88, "score": 142274.7872550611 }, { "content": "/// Release the enclave descriptor.\n\nfn release_enclave_descriptor(enc_fd: RawFd) -> NitroCliResult<()> {\n\n // Close enclave descriptor.\n\n let rc = unsafe { libc::close(enc_fd) };\n\n if rc < 0 {\n\n return Err(new_nitro_cli_failure!(\n\n \"Failed to close enclave descriptor\",\n\n NitroCliErrorEnum::FileOperationFailure\n\n ));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 89, "score": 141631.91017884453 }, { "content": "#[derive(PartialEq, Eq, Hash)]\n\nenum CommandRequesterType {\n\n /// The requester is the user with the given UID.\n\n User(libc::uid_t),\n\n /// The requester is the group with the given GID.\n\n Group(libc::gid_t),\n\n /// The requester is any other user.\n\n Others,\n\n}\n\n\n", "file_path": "src/enclave_proc/connection.rs", "rank": 90, "score": 141333.74682927865 }, { "content": "/// The policy used to filter received commands based on the requester's type.\n\nstruct CommandRequesterPolicy {\n\n /// A mapping between a requester's type and all of its allowed commands.\n\n policy: HashMap<CommandRequesterType, Vec<EnclaveProcessCommandType>>,\n\n}\n\n\n", "file_path": "src/enclave_proc/connection.rs", "rank": 91, "score": 141213.28932311328 }, { "content": "/// The main event loop of the enclave process.\n\nfn process_event_loop(\n\n comm_stream: UnixStream,\n\n logger: &EnclaveProcLogWriter,\n\n) -> NitroCliResult<()> {\n\n let mut conn_listener = ConnectionListener::new()?;\n\n let mut enclave_manager = EnclaveManager::default();\n\n let mut terminate_thread: Option<std::thread::JoinHandle<()>> = None;\n\n let mut done = false;\n\n let mut ret_value = Ok(());\n\n\n\n // Start the signal handler before spawning any other threads. This is done since the\n\n // handler will mask all relevant signals from the current thread and this setting will\n\n // be automatically inherited by all threads spawned from this point on; we want this\n\n // because only the dedicated thread spawned by the handler should listen for signals.\n\n enclave_proc_configure_signal_handler(&conn_listener)\n\n .map_err(|e| e.add_subaction(\"Failed to configure signal handler\".to_string()))?;\n\n\n\n // Add the CLI communication channel to epoll.\n\n conn_listener\n\n .handle_new_connection(comm_stream)\n", "file_path": "src/enclave_proc/mod.rs", "rank": 92, "score": 139492.32476167358 }, { "content": "fn initial_digest(len: usize) -> Vec<u8> {\n\n vec![0; len]\n\n}\n\n\n\nimpl<T: Digest + Debug + Write + Clone> EifHasher<T> {\n\n pub fn new(block_size: usize, mut hasher: T) -> Result<Self, String> {\n\n let output_size = hasher.finalize_reset().len();\n\n if block_size > 0 && output_size * 2 > block_size {\n\n return Err(\"Invalid block_size\".to_string());\n\n }\n\n\n\n Ok(EifHasher {\n\n block: Vec::with_capacity(block_size),\n\n digest: initial_digest(output_size),\n\n block_size,\n\n output_size,\n\n hasher,\n\n })\n\n }\n\n\n", "file_path": "eif_defs/src/eif_hasher.rs", "rank": 93, "score": 136567.99369531134 }, { "content": "/// Create the enclave process.\n\nfn create_enclave_process(logger: &EnclaveProcLogWriter) -> NitroCliResult<()> {\n\n // To get a detached process, we first:\n\n // (1) Temporarily ignore specific signals (SIGHUP).\n\n // (2) Daemonize the current process.\n\n // (3) Wait until the detached process is orphaned.\n\n // (4) Restore signal handlers.\n\n let signal_handler = SignalHandler::new(&[SIGHUP])\n\n .mask_all()\n\n .map_err(|e| e.add_subaction(\"Failed to mask signals\".to_string()))?;\n\n let ppid = getpid();\n\n\n\n // Daemonize the current process. The working directory remains\n\n // unchanged and the standard descriptors are routed to '/dev/null'.\n\n daemon(true, false).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to daemonize enclave process: {:?}\", e),\n\n NitroCliErrorEnum::DaemonizeProcessFailure\n\n )\n\n })?;\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 94, "score": 136203.09893500863 }, { "content": "/// Initialize logging.\n\npub fn init_logger() -> NitroCliResult<EnclaveProcLogWriter> {\n\n // The log file is \"nitro-cli.log\" and is stored in the NPE resources directory.\n\n let log_writer = EnclaveProcLogWriter::new()?;\n\n\n\n // Initialize logging with the new log writer.\n\n flexi_logger::Logger::with_env_or_str(DEFAULT_LOG_LEVEL)\n\n .log_target(LogTarget::Writer(Box::new(log_writer.clone())))\n\n .start()\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to initialize enclave process logger: {:?}\", e),\n\n NitroCliErrorEnum::LoggerError\n\n )\n\n })?;\n\n\n\n // The log writer is provided for sharing between CLI-related processes.\n\n Ok(log_writer)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/common/logger.rs", "rank": 95, "score": 134003.50277494703 }, { "content": "/// The internal data type needed for describing an enclave.\n\ntype UnpackedHandle = (u64, u64, u64, Vec<u32>, u64, u64, EnclaveState);\n\n\n\n/// The bit indicating if an enclave has been launched in debug mode.\n\npub const NE_ENCLAVE_DEBUG_MODE: u64 = 0x1;\n\n\n\n/// Constant number used for computing the lower memory limit.\n\nconst ENCLAVE_MEMORY_EIF_SIZE_RATIO: u64 = 4;\n\n\n\n/// Enclave Image Format (EIF) flag.\n\nconst NE_EIF_IMAGE: u64 = 0x01;\n\n\n\n/// Flag indicating a memory region for enclave general usage.\n\nconst NE_DEFAULT_MEMORY_REGION: u64 = 0;\n\n\n\n/// Magic number for Nitro Enclave IOCTL codes.\n\nconst NE_MAGIC: u64 = 0xAE;\n\n\n\n/// Path corresponding to the Nitro Enclaves device file.\n\nconst NE_DEV_FILEPATH: &str = \"/dev/nitro_enclaves\";\n\n\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 96, "score": 133293.4558794103 }, { "content": "/// Launch the POSIX signal handler on a dedicated thread and ensure its events are accessible.\n\nfn enclave_proc_configure_signal_handler(conn_listener: &ConnectionListener) -> NitroCliResult<()> {\n\n let mut signal_handler = SignalHandler::new_with_defaults()\n\n .mask_all()\n\n .map_err(|e| e.add_subaction(\"Failed to configure signal handler\".to_string()))?;\n\n let (local_stream, thread_stream) = UnixStream::pair()\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to create stream pair: {:?}\", e),\n\n NitroCliErrorEnum::SocketPairCreationFailure\n\n )\n\n .set_action(\"Run Enclave\".to_string())\n\n })\n\n .ok_or_exit_with_errno(Some(\"Failed to create stream pair\"));\n\n\n\n conn_listener\n\n .add_stream_to_epoll(local_stream)\n\n .map_err(|e| {\n\n e.add_subaction(\n\n \"Failed to add stream to epoll when configuring signal handler\".to_string(),\n\n )\n\n })?;\n\n signal_handler.start_handler(thread_stream.into_raw_fd(), enclave_proc_handle_signals);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 97, "score": 132958.43294987327 }, { "content": "/// Build an enclave image file with the provided arguments.\n\npub fn build_enclaves(args: BuildEnclavesArgs) -> NitroCliResult<()> {\n\n debug!(\"build_enclaves\");\n\n eprintln!(\"Start building the Enclave Image...\");\n\n build_from_docker(\n\n &args.docker_uri,\n\n &args.docker_dir,\n\n &args.output,\n\n &args.signing_certificate,\n\n &args.private_key,\n\n )\n\n .map_err(|e| e.add_subaction(\"Failed to build EIF from docker\".to_string()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 98, "score": 132705.26185186612 }, { "content": "/// Build an enclave image file from a Docker image.\n\npub fn build_from_docker(\n\n docker_uri: &str,\n\n docker_dir: &Option<String>,\n\n output_path: &str,\n\n signing_certificate: &Option<String>,\n\n private_key: &Option<String>,\n\n) -> NitroCliResult<(File, BTreeMap<String, String>)> {\n\n let blobs_path =\n\n blobs_path().map_err(|e| e.add_subaction(\"Failed to retrieve blobs path\".to_string()))?;\n\n let cmdline_file_path = format!(\"{}/cmdline\", blobs_path);\n\n let mut cmdline_file = File::open(cmdline_file_path.clone()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not open kernel command line file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n .add_info(vec![&cmdline_file_path, \"Open\"])\n\n })?;\n\n\n\n let mut cmdline = String::new();\n\n cmdline_file.read_to_string(&mut cmdline).map_err(|e| {\n", "file_path": "src/lib.rs", "rank": 99, "score": 131675.36271220143 } ]
Rust
src/test/spec/unified_runner/mod.rs
awitten1/mongo-rust-driver
5a0192cf1ba11d9e44c453e14f90d32231689166
mod entity; mod matcher; mod operation; mod test_event; mod test_file; mod test_runner; use std::{convert::TryFrom, ffi::OsStr, fs::read_dir, path::PathBuf, time::Duration}; use futures::{future::FutureExt, stream::TryStreamExt}; use semver::Version; use tokio::sync::RwLockWriteGuard; use crate::{ bson::{doc, Document}, options::{CollectionOptions, FindOptions, ReadConcern, ReadPreference, SelectionCriteria}, test::{run_single_test, run_spec_test, LOCK}, RUNTIME, }; pub use self::{ entity::{ClientEntity, Entity, FindCursor, SessionEntity}, matcher::{events_match, results_match}, operation::{Operation, OperationObject}, test_event::{ExpectedCmapEvent, ExpectedCommandEvent, ExpectedEvent, ObserveEvent}, test_file::{ merge_uri_options, CollectionData, ExpectError, ExpectedEventType, TestFile, TestFileEntity, Topology, }, test_runner::{EntityMap, TestRunner}, }; use self::operation::Expectation; static SPEC_VERSIONS: &[Version] = &[ Version::new(1, 0, 0), Version::new(1, 1, 0), Version::new(1, 4, 0), Version::new(1, 5, 0), ]; const SKIPPED_OPERATIONS: &[&str] = &[ "bulkWrite", "count", "download", "download_by_name", "listCollectionObjects", "listDatabaseObjects", "mapReduce", "watch", ]; pub async fn run_unified_format_test(test_file: TestFile) { let version_matches = SPEC_VERSIONS.iter().any(|req| { if req.major != test_file.schema_version.major { return false; } if req.minor < test_file.schema_version.minor { return false; } true }); if !version_matches { panic!( "Test runner not compatible with specification version {}", &test_file.schema_version ); } let mut test_runner = TestRunner::new().await; if let Some(requirements) = test_file.run_on_requirements { let mut can_run_on = false; for requirement in requirements { if requirement.can_run_on(&test_runner.internal_client).await { can_run_on = true; } } if !can_run_on { println!("Client topology not compatible with test"); return; } } for test_case in test_file.tests { if let Some(skip_reason) = test_case.skip_reason { println!("Skipping {}: {}", &test_case.description, skip_reason); continue; } if test_case .operations .iter() .any(|op| SKIPPED_OPERATIONS.contains(&op.name.as_str())) { println!("Skipping {}", &test_case.description); continue; } println!("Running {}", &test_case.description); if let Some(requirements) = test_case.run_on_requirements { let mut can_run_on = false; for requirement in requirements { if requirement.can_run_on(&test_runner.internal_client).await { can_run_on = true; } } if !can_run_on { println!( "{}: client topology not compatible with test", &test_case.description ); return; } } if let Some(ref initial_data) = test_file.initial_data { for data in initial_data { test_runner.insert_initial_data(data).await; } } if let Some(ref create_entities) = test_file.create_entities { test_runner.populate_entity_map(create_entities).await; } for operation in test_case.operations { match operation.object { OperationObject::TestRunner => { operation .execute_test_runner_operation(&mut test_runner) .await; } OperationObject::Entity(ref id) => { let result = operation .execute_entity_operation(id, &mut test_runner) .await; match &operation.expectation { Expectation::Result { expected_value, save_as_entity, } => { let opt_entity = result.unwrap_or_else(|e| { panic!( "{} should succeed, but failed with the following error: {}", operation.name, e ) }); if expected_value.is_some() || save_as_entity.is_some() { let entity = opt_entity.unwrap_or_else(|| { panic!("{} did not return an entity", operation.name) }); if let Some(expected_bson) = expected_value { if let Entity::Bson(actual) = &entity { assert!( results_match( Some(actual), expected_bson, operation.returns_root_documents(), Some(&test_runner.entities), ), "result mismatch, expected = {:#?} actual = {:#?}", expected_bson, actual, ); } else { panic!( "Incorrect entity type returned from {}, expected BSON", operation.name ); } } if let Some(id) = save_as_entity { if test_runner.entities.insert(id.clone(), entity).is_some() { panic!( "Entity with id {} already present in entity map", id ); } } } } Expectation::Error(expect_error) => { let error = result .expect_err(&format!("{} should return an error", operation.name)); expect_error.verify_result(error); } Expectation::Ignore => (), } } } if test_case.description == "Server supports implicit sessions" { RUNTIME.delay_for(Duration::from_secs(1)).await; } } test_runner.fail_point_guards.clear(); if let Some(ref events) = test_case.expect_events { for expected in events { let entity = test_runner.entities.get(&expected.client).unwrap(); let client = entity.as_client(); client.sync_workers().await; let event_type = expected .event_type .unwrap_or(test_file::ExpectedEventType::Command); let actual_events: Vec<ExpectedEvent> = client .get_filtered_events(event_type) .into_iter() .map(Into::into) .collect(); let expected_events = &expected.events; assert_eq!( actual_events.len(), expected_events.len(), "actual:\n{:#?}\nexpected:\n{:#?}", actual_events, expected_events ); for (actual, expected) in actual_events.iter().zip(expected_events) { assert!( events_match(actual, expected, Some(&test_runner.entities)), "event mismatch: expected = {:#?}, actual = {:#?}", expected, actual, ); } } } if let Some(ref outcome) = test_case.outcome { for expected_data in outcome { let db_name = &expected_data.database_name; let coll_name = &expected_data.collection_name; let selection_criteria = SelectionCriteria::ReadPreference(ReadPreference::Primary); let read_concern = ReadConcern::local(); let options = CollectionOptions::builder() .selection_criteria(selection_criteria) .read_concern(read_concern) .build(); let collection = test_runner .internal_client .get_coll_with_options(db_name, coll_name, options); let options = FindOptions::builder().sort(doc! { "_id": 1 }).build(); let actual_data: Vec<Document> = collection .find(doc! {}, options) .await .unwrap() .try_collect() .await .unwrap(); assert_eq!(expected_data.documents, actual_data); } } println!("{} succeeded", &test_case.description); } } #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn test_examples() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; run_spec_test( &["unified-test-format", "examples"], run_unified_format_test, ) .await; } #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn valid_fail() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; let path: PathBuf = [ env!("CARGO_MANIFEST_DIR"), "src", "test", "spec", "json", "unified-test-format", "valid-fail", ] .iter() .collect(); for entry in read_dir(&path).unwrap() { let test_file_path = PathBuf::from(entry.unwrap().file_name()); let path = path.join(&test_file_path); let path_display = path.display().to_string(); std::panic::AssertUnwindSafe(run_single_test(path, &run_unified_format_test)) .catch_unwind() .await .expect_err(&format!("tests from {} should have failed", path_display)); } } #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn valid_pass() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; run_spec_test( &["unified-test-format", "valid-pass"], run_unified_format_test, ) .await; } const SKIPPED_INVALID_TESTS: &[&str] = &[ "expectedEventsForClient-events_conflicts_with_cmap_eventType.json", "expectedEventsForClient-events_conflicts_with_command_eventType.json", "expectedEventsForClient-events_conflicts_with_default_eventType.json", ]; #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn invalid() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; let path: PathBuf = [ env!("CARGO_MANIFEST_DIR"), "src", "test", "spec", "json", "unified-test-format", "invalid", ] .iter() .collect(); for entry in read_dir(&path).unwrap() { let test_file = entry.unwrap(); if !test_file.file_type().unwrap().is_file() { continue; } let test_file_path = PathBuf::from(test_file.file_name()); if test_file_path.extension().and_then(OsStr::to_str) != Some("json") { continue; } let test_file_str = test_file_path.as_os_str().to_str().unwrap(); if SKIPPED_INVALID_TESTS .iter() .any(|skip| *skip == test_file_str) { println!("Skipping {}", test_file_str); continue; } let path = path.join(&test_file_path); let path_display = path.display().to_string(); let json: serde_json::Value = serde_json::from_reader(std::fs::File::open(path.as_path()).unwrap()).unwrap(); let result: Result<TestFile, _> = bson::from_bson( bson::Bson::try_from(json).unwrap_or_else(|_| panic!("{}", path_display)), ); if let Ok(test_file) = result { panic!( "{}: should be invalid, parsed to:\n{:#?}", path_display, test_file ); } } }
mod entity; mod matcher; mod operation; mod test_event; mod test_file; mod test_runner; use std::{convert::TryFrom, ffi::OsStr, fs::read_dir, path::PathBuf, time::Duration}; use futures::{future::FutureExt, stream::TryStreamExt}; use semver::Version; use tokio::sync::RwLockWriteGuard; use crate::{ bson::{doc, Document}, options::{CollectionOptions, FindOptions, ReadConcern, ReadPreference, SelectionCriteria}, test::{run_single_test, run_spec_test, LOCK}, RUNTIME, }; pub use self::{ entity::{ClientEntity, Entity, FindCursor, SessionEntity}, matcher::{events_match, results_match}, operation::{Operation, OperationObject}, test_event::{ExpectedCmapEvent, ExpectedCommandEvent, ExpectedEvent, ObserveEvent}, test_file::{ merge_uri_options, CollectionData, ExpectError, ExpectedEventType, TestFile, TestFileEntity, Topology, }, test_runner::{EntityMap, TestRunner}, }; use self::operation::Expectation; static SPEC_VERSIONS: &[Version] = &[ Version::new(1, 0, 0), Version::new(1, 1, 0), Version::new(1, 4, 0), Version::new(1, 5, 0), ]; const SKIPPED_OPERATIONS: &[&str] = &[ "bulkWrite", "count", "download", "download_by_name", "listCollectionObjects", "listDatabaseObjects", "mapReduce", "watch", ]; pub async fn run_unified_format_test(test_file: TestFile) { let version_matches = SPEC_VERSIONS.iter().any(|req| { if req.major != test_file.schema_version.major { return false; } if req.minor < test_file.schema_version.minor { return false; } true }); if !version_matches { panic!( "Test runner not compatible with specification version {}", &test_file.schema_version ); } let mut test_runner = TestRunner::new().await; if let Some(requirements) = test_file.run_on_requirements { let mut can_run_on = false; for requirement in requirements { if requirement.can_run_on(&test_runner.internal_client).await { can_run_on = true; } } if !can_run_on { println!("Client topology not compatible with test"); return; } } for test_case in test_file.tests { if let Some(skip_reason) = test_case.skip_reason { println!("Skipping {}: {}", &test_case.description, skip_reason); continue; } if test_case .operations .iter() .any(|op| SKIPPED_OPERATIONS.contains(&op.name.as_str())) { println!("Skipping {}", &test_case.description); continue; } println!("Running {}", &test_case.description); if let Some(requirements) = test_case.run_on_requirements { let mut can_run_on = false; for requirement in requirements { if requirement.can_run_on(&test_runner.internal_client).await { can_run_on = true; } } if !can_run_on { println!( "{}: client topology not compatible with test", &test_case.description ); return; } } if let Some(ref initial_data) = test_file.initial_data { for data in initial_data { test_runner.insert_initial_data(data).await; } } if let Some(ref create_entities) = test_file.create_entities { test_runner.populate_entity_map(create_entities).await; } for operation in test_case.operations { match operation.object { OperationObject::TestRunner => { operation .execute_test_runner_operation(&mut test_runner) .await; } OperationObject::Entity(ref id) => { let result = operation .execute_entity_operation(id, &mut test_runner) .await; match &operation.expectation { Expectation::Result { expected_value, save_as_entity, } => { let opt_entity = result.unwrap_or_else(|e| { panic!( "{} should succeed, but failed with the following error: {}", operation.name, e ) }); if expected_value.is_some() || save_as_entity.is_some() { let entity = opt_entity.unwrap_or_else(|| { panic!("{} did not return an entity", operation.name) }); if let Some(expected_bson) = expected_value { if let Entity::Bson(actual) = &entity { assert!( results_match( Some(actual), expected_bson, operation.returns_root_documents(), Some(&test_runner.entities), ), "result mismatch, expected = {:#?} actual = {:#?}", expected_bson, actual, ); } else { panic!( "Incorrect entity type returned from {}, expected BSON", operation.name ); } } if let Some(id) = save_as_entity { if test_runner.entities.insert(id.clone(), entity).is_some() { panic!( "Entity with id {} already present in entity map", id ); } } } } Expectation::Error(expect_error) => { let error = result .expect_err(&format!("{} should return an error", operation.name)); expect_error.verify_result(error); } Expectation::Ignore => (), } } } if test_case.description == "Server supports implicit sessions" { RUNTIME.delay_for(Duration::from_secs(1)).await; } } test_runner.fail_point_guards.clear(); if let Some(ref events) = test_case.expect_events { for expected in events { let entity = test_runner.entities.get(&expected.client).unwrap(); let client = entity.as_client(); client.sync_workers().await; let event_type = expected .event_type .unwrap_or(test_file::ExpectedEventType::Command); let actual_events: Vec<ExpectedEvent> = client .get_filtered_events(event_type) .into_iter() .map(Into::into) .collect(); let expected_events = &expected.events; assert_eq!( actual_events.len(), expected_events.len(), "actual:\n{:#?}\nexpected:\n{:#?}", actual_events, expected_events ); for (actual, expected) in actual_events.iter().zip(expected_events) { assert!( events_match(actual, expected, Some(&test_runner.entities)), "event mismatch: expected = {:#?}, actual = {:#?}", expected, actual, ); } } } if let Some(ref outcome) = test_case.outcome { for expected_data in outcome { let db_name = &expected_data.database_name; let coll_name = &expected_data.collection_name; let selection_criteria = SelectionCriteria::ReadPreference(ReadPreference::Primary); let read_concern = ReadConcern::local(); let options = CollectionOptions::builder() .selection_criteria(selection_criteria) .read_concern(read_concern) .build(); let collection = test_runner .internal_client .get_coll_with_options(db_name, coll_name, options); let options = FindOptions::builder().sort(doc! { "_id": 1 }).build(); let actual_data: Vec<Document> = collection .find(doc! {}, options) .await .unwrap() .try_collect() .await .unwrap(); assert_eq!(expected_data.documents, actual_data); } } println!("{} succeeded", &test_case.description); } } #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn test_examples() { let _guar
#[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn valid_fail() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; let path: PathBuf = [ env!("CARGO_MANIFEST_DIR"), "src", "test", "spec", "json", "unified-test-format", "valid-fail", ] .iter() .collect(); for entry in read_dir(&path).unwrap() { let test_file_path = PathBuf::from(entry.unwrap().file_name()); let path = path.join(&test_file_path); let path_display = path.display().to_string(); std::panic::AssertUnwindSafe(run_single_test(path, &run_unified_format_test)) .catch_unwind() .await .expect_err(&format!("tests from {} should have failed", path_display)); } } #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn valid_pass() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; run_spec_test( &["unified-test-format", "valid-pass"], run_unified_format_test, ) .await; } const SKIPPED_INVALID_TESTS: &[&str] = &[ "expectedEventsForClient-events_conflicts_with_cmap_eventType.json", "expectedEventsForClient-events_conflicts_with_command_eventType.json", "expectedEventsForClient-events_conflicts_with_default_eventType.json", ]; #[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn invalid() { let _guard: RwLockWriteGuard<_> = LOCK.run_exclusively().await; let path: PathBuf = [ env!("CARGO_MANIFEST_DIR"), "src", "test", "spec", "json", "unified-test-format", "invalid", ] .iter() .collect(); for entry in read_dir(&path).unwrap() { let test_file = entry.unwrap(); if !test_file.file_type().unwrap().is_file() { continue; } let test_file_path = PathBuf::from(test_file.file_name()); if test_file_path.extension().and_then(OsStr::to_str) != Some("json") { continue; } let test_file_str = test_file_path.as_os_str().to_str().unwrap(); if SKIPPED_INVALID_TESTS .iter() .any(|skip| *skip == test_file_str) { println!("Skipping {}", test_file_str); continue; } let path = path.join(&test_file_path); let path_display = path.display().to_string(); let json: serde_json::Value = serde_json::from_reader(std::fs::File::open(path.as_path()).unwrap()).unwrap(); let result: Result<TestFile, _> = bson::from_bson( bson::Bson::try_from(json).unwrap_or_else(|_| panic!("{}", path_display)), ); if let Ok(test_file) = result { panic!( "{}: should be invalid, parsed to:\n{:#?}", path_display, test_file ); } } }
d: RwLockWriteGuard<_> = LOCK.run_exclusively().await; run_spec_test( &["unified-test-format", "examples"], run_unified_format_test, ) .await; }
function_block-function_prefixed
[ { "content": "fn entity_matches(id: &str, actual: Option<&Bson>, entities: &EntityMap) -> bool {\n\n let bson = entities.get(id).unwrap().as_bson();\n\n results_match_inner(actual, bson, false, false, Some(entities))\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 0, "score": 609087.4541693649 }, { "content": "fn type_matches(types: &Bson, actual: &Bson) -> bool {\n\n match types {\n\n Bson::Array(types) => types.iter().any(|t| type_matches(t, actual)),\n\n Bson::String(str) => match str.as_ref() {\n\n \"double\" => actual.element_type() == ElementType::Double,\n\n \"string\" => actual.element_type() == ElementType::String,\n\n \"object\" => actual.element_type() == ElementType::EmbeddedDocument,\n\n \"array\" => actual.element_type() == ElementType::Array,\n\n \"binData\" => actual.element_type() == ElementType::Binary,\n\n \"undefined\" => actual.element_type() == ElementType::Undefined,\n\n \"objectId\" => actual.element_type() == ElementType::ObjectId,\n\n \"bool\" => actual.element_type() == ElementType::Boolean,\n\n \"date\" => actual.element_type() == ElementType::DateTime,\n\n \"null\" => actual.element_type() == ElementType::Null,\n\n \"regex\" => actual.element_type() == ElementType::RegularExpression,\n\n \"dbPointer\" => actual.element_type() == ElementType::DbPointer,\n\n \"javascript\" => actual.element_type() == ElementType::JavaScriptCode,\n\n \"symbol\" => actual.element_type() == ElementType::Symbol,\n\n \"javascriptWithScope\" => actual.element_type() == ElementType::JavaScriptCodeWithScope,\n\n \"int\" => actual.element_type() == ElementType::Int32,\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 1, "score": 441339.26920310047 }, { "content": "fn numbers_match(actual: &Bson, expected: &Bson) -> bool {\n\n if actual.element_type() == expected.element_type() {\n\n return actual == expected;\n\n }\n\n\n\n match (get_int(actual), get_int(expected)) {\n\n (Some(actual), Some(expected)) => actual == expected,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 2, "score": 428527.8816444054 }, { "content": "pub fn results_match(\n\n actual: Option<&Bson>,\n\n expected: &Bson,\n\n returns_root_documents: bool,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n results_match_inner(actual, expected, returns_root_documents, true, entities)\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 3, "score": 411198.8854408264 }, { "content": "fn server_type_from_str(s: &str) -> Option<ServerType> {\n\n let t = match s {\n\n \"Standalone\" => ServerType::Standalone,\n\n \"Mongos\" => ServerType::Mongos,\n\n \"RSPrimary\" => ServerType::RsPrimary,\n\n \"RSSecondary\" => ServerType::RsSecondary,\n\n \"RSArbiter\" => ServerType::RsArbiter,\n\n \"RSOther\" => ServerType::RsOther,\n\n \"RSGhost\" => ServerType::RsGhost,\n\n \"LoadBalancer\" => ServerType::LoadBalancer,\n\n \"Unknown\" | \"PossiblePrimary\" => ServerType::Unknown,\n\n _ => return None,\n\n };\n\n\n\n Some(t)\n\n}\n\n\n\nasync fn run_test(test_file: TestFile) {\n\n let test_description = &test_file.description;\n\n\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 4, "score": 406006.9380474634 }, { "content": "pub fn merge_uri_options(given_uri: &str, uri_options: Option<&Document>) -> String {\n\n let uri_options = match uri_options {\n\n Some(opts) => opts,\n\n None => return given_uri.to_string(),\n\n };\n\n let mut given_uri_parts = given_uri.split('?');\n\n\n\n let mut uri = String::from(given_uri_parts.next().unwrap());\n\n // A connection string has two slashes before the host list and one slash before the auth db\n\n // name. If an auth db name is not provided the latter slash might not be present, so it needs\n\n // to be added manually.\n\n if uri.chars().filter(|c| *c == '/').count() < 3 {\n\n uri.push('/');\n\n }\n\n uri.push('?');\n\n\n\n if let Some(options) = given_uri_parts.next() {\n\n let options = options.split('&');\n\n for option in options {\n\n let key = option.split('=').next().unwrap();\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 5, "score": 398817.15797154745 }, { "content": "fn validate_userinfo(s: &str, userinfo_type: &str) -> Result<()> {\n\n if s.chars().any(|c| USERINFO_RESERVED_CHARACTERS.contains(&c)) {\n\n return Err(ErrorKind::InvalidArgument {\n\n message: format!(\"{} must be URL encoded\", userinfo_type),\n\n }\n\n .into());\n\n }\n\n\n\n // All instances of '%' in the username must be part of an percent-encoded substring. This means\n\n // that there must be two hexidecimal digits following any '%' in the username.\n\n if s.split('%')\n\n .skip(1)\n\n .any(|part| part.len() < 2 || part[0..2].chars().any(|c| !c.is_ascii_hexdigit()))\n\n {\n\n return Err(ErrorKind::InvalidArgument {\n\n message: \"username/password cannot contain unescaped %\".to_string(),\n\n }\n\n .into());\n\n }\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 6, "score": 398156.28527851764 }, { "content": "fn verify_max_staleness(max_staleness: Option<Duration>) -> crate::error::Result<()> {\n\n verify_max_staleness_inner(max_staleness)\n\n .map_err(|s| crate::error::ErrorKind::InvalidArgument { message: s }.into())\n\n}\n\n\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 7, "score": 394795.7629048139 }, { "content": "fn cmap_events_match(actual: &ExpectedCmapEvent, expected: &ExpectedCmapEvent) -> bool {\n\n match (actual, expected) {\n\n (\n\n ExpectedCmapEvent::PoolCleared {\n\n has_service_id: actual_has_service_id,\n\n },\n\n ExpectedCmapEvent::PoolCleared {\n\n has_service_id: expected_has_service_id,\n\n },\n\n ) => {\n\n expected_has_service_id.is_none() || (actual_has_service_id == expected_has_service_id)\n\n }\n\n (\n\n ExpectedCmapEvent::ConnectionClosed {\n\n reason: actual_reason,\n\n },\n\n ExpectedCmapEvent::ConnectionClosed {\n\n reason: expected_reason,\n\n },\n\n ) => expected_reason.is_none() || (actual_reason == expected_reason),\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 8, "score": 391597.10732761415 }, { "content": "fn init_db_and_coll(client: &Client, db_name: &str, coll_name: &str) -> Collection<Document> {\n\n let coll = client.database(db_name).collection(coll_name);\n\n coll.drop(None).unwrap();\n\n coll\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 9, "score": 380733.3938628146 }, { "content": "pub fn events_match(\n\n actual: &ExpectedEvent,\n\n expected: &ExpectedEvent,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match (actual, expected) {\n\n (ExpectedEvent::Command(act), ExpectedEvent::Command(exp)) => {\n\n command_events_match(act, exp, entities)\n\n }\n\n (ExpectedEvent::Cmap(act), ExpectedEvent::Cmap(exp)) => cmap_events_match(act, exp),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 10, "score": 379340.78567052225 }, { "content": "fn assert_same_lsid_on_last_two_commands(client: &EventClient) {\n\n let events = client.get_all_command_started_events();\n\n let lsid1 = events[events.len() - 1].command.get(\"lsid\").unwrap();\n\n let lsid2 = events[events.len() - 2].command.get(\"lsid\").unwrap();\n\n assert_eq!(lsid1, lsid2);\n\n}\n", "file_path": "src/test/spec/v2_runner/mod.rs", "rank": 11, "score": 353694.8160360554 }, { "content": "fn command_write_concerns(client: &EventClient, key: &str) -> Vec<Document> {\n\n client\n\n .get_command_started_events(&[key])\n\n .into_iter()\n\n .map(|d| d.command.get_document(\"writeConcern\").unwrap().clone())\n\n .collect()\n\n}\n", "file_path": "src/concern/test.rs", "rank": 12, "score": 351606.9818659642 }, { "content": "fn is_master_response_from_server_type(server_type: ServerType) -> Option<IsMasterCommandResponse> {\n\n let mut response = IsMasterCommandResponse::default();\n\n\n\n match server_type {\n\n ServerType::Unknown => {\n\n return None;\n\n }\n\n ServerType::Mongos => {\n\n response.msg = Some(\"isdbgrid\".into());\n\n }\n\n ServerType::RsPrimary => {\n\n response.set_name = Some(\"foo\".into());\n\n response.is_writable_primary = Some(true);\n\n }\n\n ServerType::RsOther => {\n\n response.set_name = Some(\"foo\".into());\n\n response.hidden = Some(true);\n\n }\n\n ServerType::RsSecondary => {\n\n response.set_name = Some(\"foo\".into());\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 13, "score": 349090.741245168 }, { "content": "fn init_db_and_typed_coll<T>(client: &Client, db_name: &str, coll_name: &str) -> Collection<T>\n\nwhere\n\n T: Serialize + DeserializeOwned + Unpin + Debug,\n\n{\n\n let coll = client.database(db_name).collection(coll_name);\n\n coll.drop(None).unwrap();\n\n coll\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 14, "score": 348320.2858046061 }, { "content": "fn assert_different_lsid_on_last_two_commands(client: &EventClient) {\n\n let events = client.get_all_command_started_events();\n\n let lsid1 = events[events.len() - 1].command.get(\"lsid\").unwrap();\n\n let lsid2 = events[events.len() - 2].command.get(\"lsid\").unwrap();\n\n assert_ne!(lsid1, lsid2);\n\n}\n\n\n", "file_path": "src/test/spec/v2_runner/mod.rs", "rank": 15, "score": 348289.0003839334 }, { "content": "fn percent_decode(s: &str, err_message: &str) -> Result<String> {\n\n match percent_encoding::percent_decode_str(s).decode_utf8() {\n\n Ok(result) => Ok(result.to_string()),\n\n Err(_) => Err(ErrorKind::InvalidArgument {\n\n message: err_message.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 16, "score": 344638.1478110225 }, { "content": "fn lists_eq(actual: &Option<Vec<String>>, expected: &[String]) -> bool {\n\n if let Some(actual) = actual {\n\n actual.as_slice() == expected\n\n } else {\n\n expected.is_empty()\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\npub struct TestTopologyDescription {\n\n topology_type: String,\n\n set_name: Option<String>,\n\n servers: Vec<TestServerDescription>,\n\n}\n\n\n\nimpl PartialEq<TestTopologyDescription> for TopologyDescription {\n\n fn eq(&self, other: &TestTopologyDescription) -> bool {\n\n if self.topology_type.as_str() != other.topology_type.as_str()\n\n || self.set_name != other.set_name\n", "file_path": "src/sdam/description/topology/test/event.rs", "rank": 17, "score": 344578.73399559717 }, { "content": "fn deserialize_schema_version<'de, D>(deserializer: D) -> std::result::Result<Version, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let mut schema_version = String::deserialize(deserializer)?;\n\n // If the schema version does not contain a minor or patch version, append as necessary to\n\n // ensure the String parses correctly into a semver::Version.\n\n let count = schema_version.split('.').count();\n\n if count == 1 {\n\n schema_version.push_str(\".0.0\");\n\n } else if count == 2 {\n\n schema_version.push_str(\".0\");\n\n }\n\n Version::parse(&schema_version).map_err(|e| serde::de::Error::custom(format!(\"{}\", e)))\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\npub struct RunOnRequirement {\n\n min_server_version: Option<String>,\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 18, "score": 340753.9156450671 }, { "content": "/// Splits a string into a section before a given index and a section exclusively after the index.\n\n/// Empty portions are returned as `None`.\n\nfn exclusive_split_at(s: &str, i: usize) -> (Option<&str>, Option<&str>) {\n\n let (l, r) = s.split_at(i);\n\n\n\n let lout = if !l.is_empty() { Some(l) } else { None };\n\n let rout = if r.len() > 1 { Some(&r[1..]) } else { None };\n\n\n\n (lout, rout)\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 19, "score": 333294.83651614044 }, { "content": "fn verify_max_await_time(max_await_time: Option<Duration>, cursor_type: Option<CursorType>) {\n\n let ns = Namespace::empty();\n\n let find = Find::<Document>::new(\n\n ns,\n\n None,\n\n Some(FindOptions {\n\n cursor_type,\n\n max_await_time,\n\n ..Default::default()\n\n }),\n\n );\n\n\n\n let spec = handle_response_test(\n\n &find,\n\n doc! {\n\n \"cursor\": {\n\n \"id\": 123,\n\n \"ns\": \"a.b\",\n\n \"firstBatch\": [],\n\n },\n", "file_path": "src/operation/find/test.rs", "rank": 20, "score": 319544.87818368414 }, { "content": "pub fn assert_matches<A: Matchable + Debug, E: Matchable + Debug>(\n\n actual: &A,\n\n expected: &E,\n\n description: Option<&str>,\n\n) {\n\n let result = actual.matches(expected);\n\n assert!(\n\n result.is_ok(),\n\n \"{}\\n{:#?}\\n did not MATCH \\n{:#?}\\n MATCH failure: {}\",\n\n description.unwrap_or(\"\"),\n\n actual,\n\n expected,\n\n result.unwrap_err(),\n\n );\n\n}\n\n\n", "file_path": "src/test/util/matchable.rs", "rank": 21, "score": 319526.9317448953 }, { "content": "fn return_document_to_bool(return_document: Option<ReturnDocument>) -> Option<bool> {\n\n if let Some(return_document) = return_document {\n\n return match return_document {\n\n ReturnDocument::After => Some(true),\n\n ReturnDocument::Before => Some(false),\n\n };\n\n }\n\n None\n\n}\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 22, "score": 315706.8189558727 }, { "content": "fn serialize_true<S: Serializer>(s: S) -> std::result::Result<S::Ok, S::Error> {\n\n s.serialize_bool(true)\n\n}\n\n\n\n#[serde_with::skip_serializing_none]\n\n#[derive(Debug, TypedBuilder, Serialize)]\n\n#[builder(field_defaults(setter(into)))]\n\n#[serde(rename_all = \"camelCase\")]\n\npub(super) struct FindAndModifyOptions {\n\n #[serde(flatten)]\n\n pub(crate) modification: Modification,\n\n\n\n #[builder(default)]\n\n pub(crate) sort: Option<Document>,\n\n\n\n #[builder(default)]\n\n pub(crate) new: Option<bool>,\n\n\n\n #[builder(default)]\n\n pub(crate) upsert: Option<bool>,\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 23, "score": 308281.2752775359 }, { "content": "/// Validates that a `saslStart` or `saslContinue` command response is successful.\n\nfn validate_command_success(auth_mechanism: &str, response: &Document) -> Result<()> {\n\n let ok = match response.get(\"ok\") {\n\n Some(ok) => ok,\n\n None => return Ok(()),\n\n };\n\n\n\n match bson_util::get_int(ok) {\n\n Some(1) => Ok(()),\n\n Some(_) => Err(Error::authentication_error(\n\n auth_mechanism,\n\n response\n\n .get_str(\"errmsg\")\n\n .unwrap_or(\"Authentication failure\"),\n\n )),\n\n _ => Err(Error::invalid_authentication_response(auth_mechanism)),\n\n }\n\n}\n\n\n\n/// Encapsulates the parsing of the response to a `saslStart` or `saslContinue` command.\n\npub(super) struct SaslResponse {\n", "file_path": "src/client/auth/sasl.rs", "rank": 24, "score": 305110.2171570327 }, { "content": "fn results_match_inner(\n\n actual: Option<&Bson>,\n\n expected: &Bson,\n\n returns_root_documents: bool,\n\n root: bool,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match expected {\n\n Bson::Document(expected_doc) => {\n\n if let Some((key, value)) = expected_doc.iter().next() {\n\n if key.starts_with(\"$$\") && expected_doc.len() == 1 {\n\n return special_operator_matches((key, value), actual, entities);\n\n }\n\n }\n\n\n\n let actual_doc = match actual {\n\n Some(Bson::Document(actual)) => actual,\n\n // The only case in which None is an acceptable value is if the expected document\n\n // is a special operator; otherwise, the two documents do not match.\n\n _ => return false,\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 25, "score": 304289.13375166955 }, { "content": "fn command_events_match(\n\n actual: &ExpectedCommandEvent,\n\n expected: &ExpectedCommandEvent,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match (actual, expected) {\n\n (\n\n ExpectedCommandEvent::Started {\n\n command_name: actual_command_name,\n\n database_name: actual_database_name,\n\n command: actual_command,\n\n has_service_id: actual_has_service_id,\n\n },\n\n ExpectedCommandEvent::Started {\n\n command_name: expected_command_name,\n\n database_name: expected_database_name,\n\n command: expected_command,\n\n has_service_id: expected_has_service_id,\n\n },\n\n ) => {\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 26, "score": 304110.988217795 }, { "content": "fn special_operator_matches(\n\n (key, value): (&String, &Bson),\n\n actual: Option<&Bson>,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match key.as_ref() {\n\n \"$$exists\" => value.as_bool().unwrap() == actual.is_some(),\n\n \"$$type\" => type_matches(value, actual.unwrap()),\n\n \"$$unsetOrMatches\" => {\n\n if actual.is_some() {\n\n results_match_inner(actual, value, false, false, entities)\n\n } else {\n\n true\n\n }\n\n }\n\n \"$$matchesEntity\" => {\n\n let id = value.as_str().unwrap();\n\n entity_matches(id, actual, entities.unwrap())\n\n }\n\n \"$$matchesHexBytes\" => panic!(\"GridFS not implemented\"),\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 27, "score": 303143.95280064456 }, { "content": "/// Parses a string slice of the form \"<expected_key>=<body>\" into \"<body>\", if possible.\n\nfn parse_kvp(str: &str, expected_key: char) -> Result<String> {\n\n if !str.starts_with(expected_key) || str.chars().nth(1) != Some('=') {\n\n Err(Error::invalid_authentication_response(\"SCRAM\"))\n\n } else {\n\n Ok(str.chars().skip(2).collect())\n\n }\n\n}\n\n\n\n/// Model of the first message sent by the client.\n\n#[derive(Debug)]\n\npub(crate) struct ClientFirst {\n\n source: String,\n\n\n\n message: String,\n\n\n\n gs2_header: Range<usize>,\n\n\n\n bare: Range<usize>,\n\n\n\n nonce: String,\n", "file_path": "src/client/auth/scram.rs", "rank": 28, "score": 299435.13086473104 }, { "content": "pub fn get_default_name(description: &str) -> String {\n\n let mut db_name = description\n\n .replace('$', \"%\")\n\n .replace(' ', \"_\")\n\n .replace('.', \"_\");\n\n // database names must have fewer than 64 characters\n\n db_name.truncate(63);\n\n db_name\n\n}\n", "file_path": "src/test/util/mod.rs", "rank": 29, "score": 289962.7027574483 }, { "content": "/// Choose a server from several suitable choices within the latency window according to\n\n/// the algorithm laid out in the server selection specification.\n\nfn select_server_in_latency_window(in_window: Vec<&Arc<Server>>) -> Option<Arc<Server>> {\n\n if in_window.is_empty() {\n\n return None;\n\n } else if in_window.len() == 1 {\n\n return Some(in_window[0].clone());\n\n }\n\n\n\n let mut rng = SmallRng::from_entropy();\n\n in_window\n\n .choose_multiple(&mut rng, 2)\n\n .min_by_key(|s| s.operation_count())\n\n .map(|server| (*server).clone())\n\n}\n\n\n\nimpl TopologyDescription {\n\n pub(crate) fn server_selection_timeout_error_message(\n\n &self,\n\n criteria: &SelectionCriteria,\n\n ) -> String {\n\n if self.has_available_servers() {\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 30, "score": 286272.05659868254 }, { "content": "fn write_concern_to_document(write_concern: &WriteConcern) -> Result<Document> {\n\n match bson::to_bson(&write_concern)? {\n\n Bson::Document(doc) => Ok(doc),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/test/spec/read_write_concern/mod.rs", "rank": 31, "score": 282498.92169344064 }, { "content": "fn verify_max_staleness_inner(max_staleness: Option<Duration>) -> std::result::Result<(), String> {\n\n if max_staleness\n\n .map(|staleness| staleness > Duration::from_secs(0) && staleness < Duration::from_secs(90))\n\n .unwrap_or(false)\n\n {\n\n return Err(\"max staleness cannot be both positive and below 90 seconds\".into());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 32, "score": 278812.47671202 }, { "content": "fn build_test(\n\n db_name: &str,\n\n mut list_collections: ListCollections<Document>,\n\n mut expected_body: Document,\n\n) {\n\n let mut cmd = list_collections\n\n .build(&StreamDescription::new_testing())\n\n .expect(\"build should succeed\");\n\n assert_eq!(cmd.name, \"listCollections\");\n\n assert_eq!(cmd.target_db, db_name);\n\n\n\n bson_util::sort_document(&mut cmd.body);\n\n bson_util::sort_document(&mut expected_body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build() {\n", "file_path": "src/operation/list_collections/test.rs", "rank": 33, "score": 278550.35175049736 }, { "content": "pub fn deserialize_server_api_test_format<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<ServerApi>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n #[derive(Debug, Deserialize)]\n\n #[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\n struct ApiHelper {\n\n version: ServerApiVersion,\n\n strict: Option<bool>,\n\n deprecation_errors: Option<bool>,\n\n }\n\n\n\n let h = ApiHelper::deserialize(deserializer)?;\n\n Ok(Some(ServerApi {\n\n version: h.version,\n\n strict: h.strict,\n\n deprecation_errors: h.deprecation_errors,\n\n }))\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 34, "score": 270240.0075215334 }, { "content": "/// A `GenericCursor` that optionally owns its own sessions.\n\n/// This is to be used by cursors associated with implicit sessions.\n\ntype ImplicitSessionCursor<T> = GenericCursor<ImplicitSessionGetMoreProvider<T>, T>;\n\n\n", "file_path": "src/cursor/mod.rs", "rank": 35, "score": 265974.0368641804 }, { "content": "fn deserialize_pool_created<'de, D>(deserializer: D) -> Result<PoolCreatedEvent, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let helper = PoolCreatedEventHelper::deserialize(deserializer)?;\n\n\n\n // The CMAP spec tests use \"42\" as a placeholder in the expected events to indicate that the\n\n // driver should assert that a value is present without any constraints on the value itself.\n\n // This idiom is used for the connection pool creation options even when no options are\n\n // specified, meaning that there isn't any useful assertion we can do based on this value.\n\n // Because of this, we deserialize the value `42` into `None` for the options, which prevents\n\n // deserialization failure due to an unexpected type. For other integer values, we raise an\n\n // error indicating that we expect `42` instead.\n\n let options = match helper.options {\n\n Some(PoolOptionsHelper::Options(opts)) => Some(opts),\n\n Some(PoolOptionsHelper::Number(42)) | None => None,\n\n Some(PoolOptionsHelper::Number(other)) => {\n\n return Err(serde::de::Error::invalid_value(\n\n Unexpected::Unsigned(other),\n\n &\"42\",\n", "file_path": "src/cmap/test/event.rs", "rank": 36, "score": 265779.7435372267 }, { "content": "fn is_auth_error(error: Error) -> bool {\n\n matches!(*error.kind, ErrorKind::Authentication { .. })\n\n}\n\n\n\n/// Performs an operation that requires authentication and verifies that it either succeeded or\n\n/// failed with an authentication error according to the `should_succeed` parameter.\n\nasync fn auth_test(client: Client, should_succeed: bool) {\n\n let result = client.list_database_names(None, None).await;\n\n if should_succeed {\n\n result.expect(\"operation should have succeeded\");\n\n } else {\n\n assert!(is_auth_error(result.unwrap_err()));\n\n }\n\n}\n\n\n\n/// Attempts to authenticate using the given username/password, optionally specifying a mechanism\n\n/// via the `ClientOptions` api.\n\n///\n\n/// Asserts that the authentication's success matches the provided parameter.\n\nasync fn auth_test_options(\n", "file_path": "src/test/client.rs", "rank": 37, "score": 262535.1168613922 }, { "content": "fn default_hosts() -> Vec<ServerAddress> {\n\n vec![ServerAddress::default()]\n\n}\n\n\n\nimpl Default for ClientOptions {\n\n fn default() -> Self {\n\n Self::builder().build()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Serialize for ClientOptions {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n #[derive(Serialize)]\n\n struct ClientOptionsHelper<'a> {\n\n appname: &'a Option<String>,\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 38, "score": 254091.44293305412 }, { "content": "fn filter_servers_by_tag_sets(servers: &mut Vec<&ServerDescription>, tag_sets: &[TagSet]) {\n\n if tag_sets.is_empty() {\n\n return;\n\n }\n\n\n\n for tag_set in tag_sets {\n\n let matches_tag_set = |server: &&ServerDescription| server.matches_tag_set(tag_set);\n\n\n\n if servers.iter().any(matches_tag_set) {\n\n servers.retain(matches_tag_set);\n\n\n\n return;\n\n }\n\n }\n\n\n\n servers.clear();\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 39, "score": 252211.84989622014 }, { "content": "fn get_int(value: &Bson) -> Option<i64> {\n\n bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json))\n\n}\n", "file_path": "src/test/util/matchable.rs", "rank": 40, "score": 250946.94710038512 }, { "content": "fn convert_read_preference(test_read_pref: TestReadPreference) -> Option<ReadPreference> {\n\n let max_staleness = test_read_pref\n\n .max_staleness_seconds\n\n .map(Duration::from_secs);\n\n let options = ReadPreferenceOptions::builder()\n\n .tag_sets(test_read_pref.tag_sets)\n\n .max_staleness(max_staleness)\n\n .build();\n\n\n\n let read_pref = match &test_read_pref.mode.as_ref()?[..] {\n\n \"Primary\" => ReadPreference::Primary,\n\n \"Secondary\" => ReadPreference::Secondary { options },\n\n \"PrimaryPreferred\" => ReadPreference::PrimaryPreferred { options },\n\n \"SecondaryPreferred\" => ReadPreference::SecondaryPreferred { options },\n\n \"Nearest\" => ReadPreference::Nearest { options },\n\n m => panic!(\"invalid read preference mode: {}\", m),\n\n };\n\n\n\n Some(read_pref)\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/test/logic.rs", "rank": 41, "score": 249932.87804179374 }, { "content": "fn normalize_write_concern_doc(mut write_concern_doc: Document) -> Document {\n\n if let Some(w_timeout) = write_concern_doc.remove(\"wtimeout\") {\n\n write_concern_doc.insert(\"wtimeoutMS\", w_timeout);\n\n }\n\n\n\n if let Some(j) = write_concern_doc.remove(\"j\") {\n\n write_concern_doc.insert(\"journal\", j);\n\n }\n\n\n\n write_concern_doc\n\n}\n\n\n\nasync fn run_connection_string_test(test_file: TestFile) {\n\n for test_case in test_file.tests {\n\n match ClientOptions::parse(&test_case.uri).await {\n\n Ok(options) => {\n\n assert!(test_case.valid);\n\n\n\n if let Some(ref expected_read_concern) = test_case.read_concern {\n\n let mut actual_read_concern = Document::new();\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 42, "score": 247860.92137991614 }, { "content": "fn write_concern_from_document(write_concern_doc: Document) -> Option<WriteConcern> {\n\n let mut write_concern = WriteConcern::default();\n\n\n\n for (key, value) in write_concern_doc {\n\n match (&key[..], value) {\n\n (\"w\", Bson::Int32(i)) => {\n\n write_concern.w = Some(Acknowledgment::from(i as u32));\n\n }\n\n (\"w\", Bson::String(s)) => {\n\n write_concern.w = Some(Acknowledgment::from(s));\n\n }\n\n (\"journal\", Bson::Boolean(b)) => {\n\n write_concern.journal = Some(b);\n\n }\n\n (\"wtimeoutMS\", Bson::Int32(i)) if i > 0 => {\n\n write_concern.w_timeout = Some(Duration::from_millis(i as u64));\n\n }\n\n (\"wtimeoutMS\", Bson::Int32(_)) => {\n\n // WriteConcern has an unsigned integer for the wtimeout field, so this is\n\n // impossible to test.\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 43, "score": 247430.51792253018 }, { "content": "fn parse_i64_ext_json(doc: &Document) -> Option<i64> {\n\n let number_string = doc.get(\"$numberLong\").and_then(Bson::as_str)?;\n\n number_string.parse::<i64>().ok()\n\n}\n\n\n", "file_path": "src/test/util/matchable.rs", "rank": 44, "score": 243411.76368410868 }, { "content": "fn kill_cursors_sent(client: &EventClient) -> bool {\n\n !client\n\n .get_command_started_events(&[\"killCursors\"])\n\n .is_empty()\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\n#[function_name::named]\n\nasync fn kill_cursors_on_drop() {\n\n let _guard: RwLockReadGuard<()> = LOCK.run_concurrently().await;\n\n\n\n let client = TestClient::new().await;\n\n let db = client.database(function_name!());\n\n let coll = db.collection(function_name!());\n\n\n\n drop_collection(&coll).await;\n\n\n\n coll.insert_many(vec![doc! { \"x\": 1 }, doc! { \"x\": 2 }], None)\n\n .await\n", "file_path": "src/test/coll.rs", "rank": 45, "score": 240330.6499468784 }, { "content": "#[derive(Clone, Copy, Debug, Deserialize)]\n\nenum TestServerType {\n\n Standalone,\n\n Mongos,\n\n #[serde(rename = \"RSPrimary\")]\n\n RsPrimary,\n\n #[serde(rename = \"RSSecondary\")]\n\n RsSecondary,\n\n #[serde(rename = \"RSArbiter\")]\n\n RsArbiter,\n\n #[serde(rename = \"RSOther\")]\n\n RsOther,\n\n #[serde(rename = \"RSGhost\")]\n\n RsGhost,\n\n LoadBalancer,\n\n Unknown,\n\n PossiblePrimary,\n\n}\n\n\n\nimpl TestServerType {\n\n fn into_server_type(self) -> Option<ServerType> {\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 46, "score": 240095.55947482245 }, { "content": "/// A `GenericCursor` that borrows its session.\n\n/// This is to be used with cursors associated with explicit sessions borrowed from the user.\n\ntype ExplicitSessionCursor<'session, T> =\n\n GenericCursor<ExplicitSessionGetMoreProvider<'session, T>, T>;\n\n\n\n/// A type that implements [`Stream`](https://docs.rs/futures/latest/futures/stream/index.html) which can be used to\n\n/// stream the results of a [`SessionCursor`]. Returned from [`SessionCursor::stream`].\n\n///\n\n/// This updates the buffer of the parent [`SessionCursor`] when dropped. [`SessionCursor::next`] or\n\n/// any further streams created from [`SessionCursor::stream`] will pick up where this one left off.\n\npub struct SessionCursorStream<'cursor, 'session, T = Document>\n\nwhere\n\n T: DeserializeOwned + Unpin + Send + Sync,\n\n{\n\n session_cursor: &'cursor mut SessionCursor<T>,\n\n generic_cursor: ExplicitSessionCursor<'session, T>,\n\n}\n\n\n\nimpl<'cursor, 'session, T> Stream for SessionCursorStream<'cursor, 'session, T>\n\nwhere\n\n T: DeserializeOwned + Unpin + Send + Sync,\n\n{\n", "file_path": "src/cursor/session.rs", "rank": 47, "score": 235125.57083806815 }, { "content": "struct ImplicitSessionGetMoreResult<T> {\n\n get_more_result: Result<GetMoreResult<T>>,\n\n session: Option<Box<ClientSession>>,\n\n}\n\n\n\nimpl<T> GetMoreProviderResult for ImplicitSessionGetMoreResult<T> {\n\n type Session = Option<Box<ClientSession>>;\n\n type DocumentType = T;\n\n\n\n fn as_ref(&self) -> std::result::Result<&GetMoreResult<T>, &Error> {\n\n self.get_more_result.as_ref()\n\n }\n\n\n\n fn into_parts(self) -> (Result<GetMoreResult<T>>, Self::Session) {\n\n (self.get_more_result, self.session)\n\n }\n\n}\n\n\n", "file_path": "src/cursor/mod.rs", "rank": 48, "score": 234431.21157068707 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n filter: Option<Document>,\n\n options: Option<FindOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let mut find = Find::<Document>::new(ns.clone(), filter, options);\n\n\n\n let mut cmd = find.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"find\");\n\n assert_eq!(cmd.target_db.as_str(), ns.db.as_str());\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n", "file_path": "src/operation/find/test.rs", "rank": 49, "score": 234279.07089266842 }, { "content": "fn build_test(\n\n target: impl Into<AggregateTarget>,\n\n pipeline: Vec<Document>,\n\n options: Option<AggregateOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let target = target.into();\n\n\n\n let mut aggregate = Aggregate::new(target.clone(), pipeline, options);\n\n\n\n let mut cmd = aggregate.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"aggregate\");\n\n assert_eq!(cmd.target_db.as_str(), target.db_name());\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n", "file_path": "src/operation/aggregate/test.rs", "rank": 50, "score": 234279.07089266842 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n cursor_id: i64,\n\n address: ServerAddress,\n\n batch_size: Option<u32>,\n\n max_time: Option<Duration>,\n\n mut expected_body: Document,\n\n) {\n\n let info = CursorInformation {\n\n ns: ns.clone(),\n\n id: cursor_id,\n\n address,\n\n batch_size,\n\n max_time,\n\n };\n\n let mut get_more = GetMore::<Document>::new(info, None);\n\n\n\n let build_result = get_more.build(&StreamDescription::new_testing());\n\n assert!(build_result.is_ok());\n\n\n", "file_path": "src/operation/get_more/test.rs", "rank": 51, "score": 234279.07089266842 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn typed_collection() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n let coll = init_db_and_typed_coll(&client, function_name!(), function_name!());\n\n\n\n #[derive(Serialize, Deserialize, Debug)]\n\n struct MyType {\n\n x: i32,\n\n str: String,\n\n }\n\n let my_type = MyType {\n\n x: 1,\n\n str: \"hello\".into(),\n\n };\n\n\n\n assert!(coll.insert_one(my_type, None).is_ok());\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 52, "score": 229626.1076862439 }, { "content": "pub trait TestOperation: Debug {\n\n fn execute_on_collection<'a>(\n\n &'a self,\n\n _collection: &'a Collection<Document>,\n\n _session: Option<&'a mut ClientSession>,\n\n ) -> BoxFuture<'a, Result<Option<Bson>>> {\n\n todo!()\n\n }\n\n\n\n fn execute_on_database<'a>(\n\n &'a self,\n\n _database: &'a Database,\n\n _session: Option<&'a mut ClientSession>,\n\n ) -> BoxFuture<'a, Result<Option<Bson>>> {\n\n todo!()\n\n }\n\n\n\n fn execute_on_client<'a>(\n\n &'a self,\n\n _client: &'a TestClient,\n", "file_path": "src/test/spec/v2_runner/operation.rs", "rank": 53, "score": 228478.58809445353 }, { "content": "pub trait TestOperation: Debug {\n\n fn execute_test_runner_operation<'a>(\n\n &'a self,\n\n _test_runner: &'a mut TestRunner,\n\n ) -> BoxFuture<'a, ()> {\n\n todo!()\n\n }\n\n\n\n fn execute_entity_operation<'a>(\n\n &'a self,\n\n _id: &'a str,\n\n _test_runner: &'a mut TestRunner,\n\n ) -> BoxFuture<'a, Result<Option<Entity>>> {\n\n todo!()\n\n }\n\n\n\n /// Whether or not this operation returns an array of root documents. This information is\n\n /// necessary to determine how the return value of an operation should be compared to the\n\n /// expected value.\n\n fn returns_root_documents(&self) -> bool {\n", "file_path": "src/test/spec/unified_runner/operation.rs", "rank": 54, "score": 228478.58809445353 }, { "content": "#[test]\n\nfn client_options() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let mut options = ClientOptions::parse(\"mongodb://localhost:27017/\").unwrap();\n\n\n\n options.original_uri.take();\n\n\n\n assert_eq!(\n\n options,\n\n ClientOptions::builder()\n\n .hosts(vec![ServerAddress::Tcp {\n\n host: \"localhost\".into(),\n\n port: Some(27017)\n\n }])\n\n .build()\n\n );\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 55, "score": 228031.369062021 }, { "content": "/// The number of digits in `n` in base 10.\n\n/// Useful for calculating the size of an array entry in BSON.\n\nfn num_decimal_digits(mut n: usize) -> u64 {\n\n let mut digits = 0;\n\n\n\n loop {\n\n n /= 10;\n\n digits += 1;\n\n\n\n if n == 0 {\n\n return digits;\n\n }\n\n }\n\n}\n\n\n\n/// Read a document's raw BSON bytes from the provided reader.\n\npub(crate) fn read_document_bytes<R: Read>(mut reader: R) -> Result<Vec<u8>> {\n\n let length = reader.read_i32()?;\n\n\n\n let mut bytes = Vec::with_capacity(length as usize);\n\n bytes.write_i32(length)?;\n\n\n", "file_path": "src/bson_util/mod.rs", "rank": 56, "score": 227748.84857047375 }, { "content": "struct DroppedClientSession {\n\n cluster_time: Option<ClusterTime>,\n\n server_session: ServerSession,\n\n client: Client,\n\n is_implicit: bool,\n\n options: Option<SessionOptions>,\n\n transaction: Transaction,\n\n snapshot_time: Option<Timestamp>,\n\n}\n\n\n\nimpl From<DroppedClientSession> for ClientSession {\n\n fn from(dropped_session: DroppedClientSession) -> Self {\n\n Self {\n\n cluster_time: dropped_session.cluster_time,\n\n server_session: dropped_session.server_session,\n\n client: dropped_session.client,\n\n is_implicit: dropped_session.is_implicit,\n\n options: dropped_session.options,\n\n transaction: dropped_session.transaction,\n\n snapshot_time: dropped_session.snapshot_time,\n", "file_path": "src/client/session/mod.rs", "rank": 57, "score": 225096.78556242224 }, { "content": "pub fn eq_matches<T: PartialEq + Debug>(\n\n name: &str,\n\n actual: &T,\n\n expected: &T,\n\n) -> Result<(), String> {\n\n if actual != expected {\n\n return Err(format!(\n\n \"expected {} {:?}, got {:?}\",\n\n name, expected, actual\n\n ));\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Matchable for Bson {\n\n fn is_placeholder(&self) -> bool {\n\n if let Bson::String(string) = self {\n\n string.as_str() == \"42\" || string.as_str() == \"\"\n\n } else {\n\n get_int(self) == Some(42)\n", "file_path": "src/test/util/matchable.rs", "rank": 58, "score": 222204.0593616229 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn client() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n\n\n client\n\n .database(function_name!())\n\n .collection(function_name!())\n\n .insert_one(Document::new(), None)\n\n .expect(\"insert should succeed\");\n\n\n\n let db_names = client\n\n .list_database_names(None, None)\n\n .expect(\"list_database_names should succeed\");\n\n assert!(db_names.contains(&function_name!().to_string()));\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 59, "score": 215615.55225592648 }, { "content": "fn verify_description_outcome(\n\n outcome: DescriptionOutcome,\n\n topology_description: TopologyDescription,\n\n test_description: &str,\n\n phase_description: String,\n\n) {\n\n assert_eq!(\n\n topology_description.topology_type, outcome.topology_type,\n\n \"{}: {}\",\n\n test_description, phase_description\n\n );\n\n\n\n assert_eq!(\n\n topology_description.set_name, outcome.set_name,\n\n \"{}: {}\",\n\n test_description, phase_description,\n\n );\n\n\n\n let expected_timeout = outcome\n\n .logical_session_timeout_minutes\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 60, "score": 212007.03715108067 }, { "content": "fn deserialize_uri_options_to_uri_string_option<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let uri_options = Document::deserialize(deserializer)?;\n\n Ok(Some(merge_uri_options(&DEFAULT_URI, Some(&uri_options))))\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Outcome {\n\n pub collection: CollectionOutcome,\n\n}\n\n\n\nimpl Outcome {\n\n pub async fn matches_actual(\n\n self,\n\n db_name: String,\n\n coll_name: String,\n", "file_path": "src/test/spec/v2_runner/test_file.rs", "rank": 61, "score": 210670.0606849346 }, { "content": "#[allow(dead_code)]\n\ntype Result<T> = std::result::Result<T, Err>;\n\n\n\n#[cfg(not(feature = \"sync\"))]\n\nasync fn _connecting() -> Result<()> {\n\n use mongodb::{options::ClientOptions, Client};\n\n\n\n // Parse a connection string into an options struct.\n\n let mut client_options = ClientOptions::parse(\"mongodb://localhost:27017\").await?;\n\n\n\n // Manually set an option.\n\n client_options.app_name = Some(\"My App\".to_string());\n\n\n\n // Get a handle to the deployment.\n\n let client = Client::with_options(client_options)?;\n\n\n\n // List the names of the databases in that deployment.\n\n for db_name in client.list_database_names(None, None).await? {\n\n println!(\"{}\", db_name);\n\n }\n\n\n", "file_path": "tests/readme_examples.rs", "rank": 62, "score": 209863.59493974975 }, { "content": "fn deserialize_checkout_failed<'de, D>(\n\n deserializer: D,\n\n) -> Result<ConnectionCheckoutFailedEvent, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let helper = ConnectionCheckoutFailedHelper::deserialize(deserializer)?;\n\n\n\n // The driver doesn't have a concept of a \"closed pool\", instead having the pool closed when the\n\n // pool is dropped. Because of this, the driver doesn't implement the \"poolClosed\" reason for a\n\n // connection checkout failure. While we skip over the corresponding tests in our spec test\n\n // runner, we still need to be able to deserialize the \"poolClosed\" reason to avoid the test\n\n // harness panicking, so we arbitrarily map the \"poolClosed\" to \"connectionError\".\n\n let reason = match helper.reason {\n\n CheckoutFailedReasonHelper::PoolClosed | CheckoutFailedReasonHelper::ConnectionError => {\n\n ConnectionCheckoutFailedReason::ConnectionError\n\n }\n\n CheckoutFailedReasonHelper::Timeout => ConnectionCheckoutFailedReason::Timeout,\n\n };\n\n\n\n Ok(ConnectionCheckoutFailedEvent {\n\n address: ServerAddress::Tcp {\n\n host: Default::default(),\n\n port: None,\n\n },\n\n reason,\n\n })\n\n}\n", "file_path": "src/cmap/test/event.rs", "rank": 63, "score": 207615.16110561707 }, { "content": "fn deserialize_op<'de, 'a, T: 'a + Deserialize<'de> + TestOperation>(\n\n value: Bson,\n\n) -> std::result::Result<Box<dyn TestOperation + 'a>, bson::de::Error> {\n\n T::deserialize(BsonDeserializer::new(value)).map(|op| Box::new(op) as Box<dyn TestOperation>)\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Operation {\n\n fn deserialize<D: Deserializer<'de>>(deserializer: D) -> std::result::Result<Self, D::Error> {\n\n #[derive(Debug, Deserialize)]\n\n #[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\n struct OperationDefinition {\n\n pub name: String,\n\n pub object: OperationObject,\n\n #[serde(default = \"default_arguments\")]\n\n pub arguments: Bson,\n\n pub expect_error: Option<ExpectError>,\n\n pub expect_result: Option<Bson>,\n\n pub save_result_as_entity: Option<String>,\n\n pub ignore_result_and_error: Option<bool>,\n\n }\n", "file_path": "src/test/spec/unified_runner/operation.rs", "rank": 64, "score": 205825.41871324176 }, { "content": "fn make_lookup_hosts(hosts: Vec<ServerAddress>) -> Result<LookupHosts> {\n\n Ok(LookupHosts {\n\n hosts: hosts.into_iter().map(Result::Ok).collect(),\n\n min_ttl: Duration::from_secs(60),\n\n })\n\n}\n\n\n\n// If a new DNS record is returned, it should be reflected in the topology.\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn add_new_dns_record() {\n\n let hosts = vec![\n\n localhost_test_build_10gen(27017),\n\n localhost_test_build_10gen(27018),\n\n localhost_test_build_10gen(27019),\n\n ];\n\n\n\n run_test(Ok(hosts.clone()), hosts.into_iter().collect()).await;\n\n}\n\n\n", "file_path": "src/sdam/srv_polling/test.rs", "rank": 65, "score": 205743.22237066732 }, { "content": " ///\n\n /// ```rust\n\n /// # use mongodb::{bson::{doc, Document}, error::Result, Client, ClientSession};\n\n /// #\n\n /// # async fn do_stuff() -> Result<()> {\n\n /// # let client = Client::with_uri_str(\"mongodb://example.com\").await?;\n\n /// # let coll = client.database(\"foo\").collection::<Document>(\"bar\");\n\n /// # let mut session = client.start_session(None).await?;\n\n /// session.start_transaction(None).await?;\n\n /// let result = coll.insert_one_with_session(doc! { \"x\": 1 }, None, &mut session).await?;\n\n /// session.commit_transaction().await?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n ///\n\n /// This operation will retry once upon failure if the connection and encountered error support\n\n /// retryability. See the documentation\n\n /// [here](https://docs.mongodb.com/manual/core/retryable-writes/) for more information on\n\n /// retryable writes.\n\n pub async fn commit_transaction(&mut self) -> Result<()> {\n", "file_path": "src/client/session/mod.rs", "rank": 66, "score": 204404.15752563634 }, { "content": " /// ```rust\n\n /// # use mongodb::{bson::{doc, Document}, error::Result, Client, ClientSession};\n\n /// #\n\n /// # async fn do_stuff() -> Result<()> {\n\n /// # let client = Client::with_uri_str(\"mongodb://example.com\").await?;\n\n /// # let coll = client.database(\"foo\").collection::<Document>(\"bar\");\n\n /// # let mut session = client.start_session(None).await?;\n\n /// session.start_transaction(None).await?;\n\n /// let result = coll.insert_one_with_session(doc! { \"x\": 1 }, None, &mut session).await?;\n\n /// session.commit_transaction().await?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub async fn start_transaction(\n\n &mut self,\n\n options: impl Into<Option<TransactionOptions>>,\n\n ) -> Result<()> {\n\n if self\n\n .options\n\n .as_ref()\n", "file_path": "src/client/session/mod.rs", "rank": 67, "score": 204403.84029220615 }, { "content": "/// # Client,\n\n/// ClientSession,\n\n/// Collection,\n\n/// };\n\n///\n\n/// # async fn do_stuff() -> Result<()> {\n\n/// # let client = Client::with_uri_str(\"mongodb://example.com\").await?;\n\n/// # let coll: Collection<Document> = client.database(\"foo\").collection(\"bar\");\n\n/// let mut session = client.start_session(None).await?;\n\n/// let options = TransactionOptions::builder()\n\n/// .read_concern(ReadConcern::majority())\n\n/// .write_concern(WriteConcern::builder().w(Acknowledgment::Majority).build())\n\n/// .build();\n\n/// session.start_transaction(options).await?;\n\n/// // A \"TransientTransactionError\" label indicates that the entire transaction can be retried\n\n/// // with a reasonable expectation that it will succeed.\n\n/// while let Err(error) = execute_transaction(&coll, &mut session).await {\n\n/// if !error.contains_label(TRANSIENT_TRANSACTION_ERROR) {\n\n/// break;\n\n/// }\n", "file_path": "src/client/session/mod.rs", "rank": 68, "score": 204400.3539588376 }, { "content": " ///\n\n /// ```rust\n\n /// # use mongodb::{bson::{doc, Document}, error::Result, Client, ClientSession, Collection};\n\n /// #\n\n /// # async fn do_stuff() -> Result<()> {\n\n /// # let client = Client::with_uri_str(\"mongodb://example.com\").await?;\n\n /// # let coll = client.database(\"foo\").collection::<Document>(\"bar\");\n\n /// # let mut session = client.start_session(None).await?;\n\n /// session.start_transaction(None).await?;\n\n /// match execute_transaction(&coll, &mut session).await {\n\n /// Ok(_) => session.commit_transaction().await?,\n\n /// Err(_) => session.abort_transaction().await?,\n\n /// }\n\n /// # Ok(())\n\n /// # }\n\n ///\n\n /// async fn execute_transaction(coll: &Collection<Document>, session: &mut ClientSession) -> Result<()> {\n\n /// coll.insert_one_with_session(doc! { \"x\": 1 }, None, session).await?;\n\n /// coll.delete_one_with_session(doc! { \"y\": 2 }, None, session).await?;\n\n /// Ok(())\n", "file_path": "src/client/session/mod.rs", "rank": 69, "score": 204398.21583642391 }, { "content": "mod cluster_time;\n\nmod pool;\n\n#[cfg(test)]\n\nmod test;\n\n\n\nuse std::{\n\n collections::HashSet,\n\n sync::Arc,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse lazy_static::lazy_static;\n\nuse uuid::Uuid;\n\n\n\nuse crate::{\n\n bson::{doc, spec::BinarySubtype, Binary, Bson, Document, Timestamp},\n\n cmap::conn::PinnedConnectionHandle,\n\n error::{ErrorKind, Result},\n\n operation::{AbortTransaction, CommitTransaction, Operation},\n\n options::{SessionOptions, TransactionOptions},\n", "file_path": "src/client/session/mod.rs", "rank": 70, "score": 204395.75308719042 }, { "content": " } else {\n\n let client = self.client.clone();\n\n let server_session = self.server_session.clone();\n\n RUNTIME.execute(async move {\n\n client.check_in_server_session(server_session).await;\n\n });\n\n }\n\n }\n\n}\n\n\n\n/// Client side abstraction of a server session. These are pooled and may be associated with\n\n/// multiple `ClientSession`s over the course of their lifetime.\n\n#[derive(Clone, Debug)]\n\npub(crate) struct ServerSession {\n\n /// The id of the server session to which this corresponds.\n\n id: Document,\n\n\n\n /// The last time an operation was executed with this session.\n\n last_use: std::time::Instant,\n\n\n", "file_path": "src/client/session/mod.rs", "rank": 71, "score": 204391.69299058433 }, { "content": " sdam::{ServerInfo, TransactionSupportStatus},\n\n selection_criteria::SelectionCriteria,\n\n Client,\n\n RUNTIME,\n\n};\n\npub use cluster_time::ClusterTime;\n\npub(super) use pool::ServerSessionPool;\n\n\n\nuse super::options::ServerAddress;\n\n\n\nlazy_static! {\n\n pub(crate) static ref SESSIONS_UNSUPPORTED_COMMANDS: HashSet<&'static str> = {\n\n let mut hash_set = HashSet::new();\n\n hash_set.insert(\"killcursors\");\n\n hash_set.insert(\"parallelcollectionscan\");\n\n hash_set\n\n };\n\n}\n\n\n\n/// A MongoDB client session. This struct represents a logical session used for ordering sequential\n", "file_path": "src/client/session/mod.rs", "rank": 72, "score": 204390.31134917526 }, { "content": " }\n\n\n\n /// Whether this session is dirty.\n\n #[cfg(test)]\n\n pub(crate) fn is_dirty(&self) -> bool {\n\n self.server_session.dirty\n\n }\n\n\n\n /// Starts a new transaction on this session with the given `TransactionOptions`. If no options\n\n /// are provided, the session's `defaultTransactionOptions` will be used. This session must\n\n /// be passed into each operation within the transaction; otherwise, the operation will be\n\n /// executed outside of the transaction.\n\n ///\n\n /// Errors returned from operations executed within a transaction may include a\n\n /// [`crate::error::TRANSIENT_TRANSACTION_ERROR`] label. This label indicates that the entire\n\n /// transaction can be retried with a reasonable expectation that it will succeed.\n\n ///\n\n /// Transactions are supported on MongoDB 4.0+. The Rust driver currently only supports\n\n /// transactions on replica sets.\n\n ///\n", "file_path": "src/client/session/mod.rs", "rank": 73, "score": 204388.8334171932 }, { "content": "/// operations. To create a `ClientSession`, call `start_session` on a `Client`.\n\n///\n\n/// `ClientSession` instances are not thread safe or fork safe. They can only be used by one thread\n\n/// or process at a time.\n\n///\n\n/// ## Transactions\n\n/// Transactions are used to execute a series of operations across multiple documents and\n\n/// collections atomically. For more information about when and how to use transactions in MongoDB,\n\n/// see the [manual](https://docs.mongodb.com/manual/core/transactions/).\n\n///\n\n/// Replica set transactions are supported on MongoDB 4.0+. Sharded transactions are supported on\n\n/// MongoDDB 4.2+. Transactions are associated with a `ClientSession`. To begin a transaction, call\n\n/// [`ClientSession::start_transaction`] on a `ClientSession`. The `ClientSession` must be passed to\n\n/// operations to be executed within the transaction.\n\n///\n\n/// ```rust\n\n/// use mongodb::{\n\n/// bson::{doc, Document},\n\n/// error::{Result, TRANSIENT_TRANSACTION_ERROR, UNKNOWN_TRANSACTION_COMMIT_RESULT},\n\n/// options::{Acknowledgment, ReadConcern, TransactionOptions, WriteConcern},\n", "file_path": "src/client/session/mod.rs", "rank": 74, "score": 204385.32189611052 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Drop for ClientSession {\n\n fn drop(&mut self) {\n\n if self.transaction.state == TransactionState::InProgress {\n\n let dropped_session = DroppedClientSession {\n\n cluster_time: self.cluster_time.clone(),\n\n server_session: self.server_session.clone(),\n\n client: self.client.clone(),\n\n is_implicit: self.is_implicit,\n\n options: self.options.clone(),\n\n transaction: self.transaction.take(),\n\n snapshot_time: self.snapshot_time,\n\n };\n\n RUNTIME.execute(async move {\n\n let mut session: ClientSession = dropped_session.into();\n\n let _result = session.abort_transaction().await;\n\n });\n", "file_path": "src/client/session/mod.rs", "rank": 75, "score": 204383.52251215195 }, { "content": "impl ClientSession {\n\n /// Creates a new `ClientSession` wrapping the provided server session.\n\n pub(crate) fn new(\n\n server_session: ServerSession,\n\n client: Client,\n\n options: Option<SessionOptions>,\n\n is_implicit: bool,\n\n ) -> Self {\n\n Self {\n\n client,\n\n server_session,\n\n cluster_time: None,\n\n is_implicit,\n\n options,\n\n transaction: Default::default(),\n\n snapshot_time: None,\n\n }\n\n }\n\n\n\n /// The client used to create this session.\n", "file_path": "src/client/session/mod.rs", "rank": 76, "score": 204382.5026337076 }, { "content": "/// }\n\n/// # Ok(())\n\n/// # }\n\n///\n\n/// async fn execute_transaction(coll: &Collection<Document>, session: &mut ClientSession) -> Result<()> {\n\n/// coll.insert_one_with_session(doc! { \"x\": 1 }, None, session).await?;\n\n/// coll.delete_one_with_session(doc! { \"y\": 2 }, None, session).await?;\n\n/// // An \"UnknownTransactionCommitResult\" label indicates that it is unknown whether the\n\n/// // commit has satisfied the write concern associated with the transaction. If an error\n\n/// // with this label is returned, it is safe to retry the commit until the write concern is\n\n/// // satisfied or an error without the label is returned.\n\n/// loop {\n\n/// let result = session.commit_transaction().await;\n\n/// if let Err(ref error) = result {\n\n/// if error.contains_label(UNKNOWN_TRANSACTION_COMMIT_RESULT) {\n\n/// continue;\n\n/// }\n\n/// }\n\n/// result?\n\n/// }\n", "file_path": "src/client/session/mod.rs", "rank": 77, "score": 204380.0417530508 }, { "content": " /// The highest seen cluster time this session has seen so far.\n\n /// This will be `None` if this session has not been used in an operation yet.\n\n pub fn cluster_time(&self) -> Option<&ClusterTime> {\n\n self.cluster_time.as_ref()\n\n }\n\n\n\n /// The options used to create this session.\n\n pub fn options(&self) -> Option<&SessionOptions> {\n\n self.options.as_ref()\n\n }\n\n\n\n /// Set the cluster time to the provided one if it is greater than this session's highest seen\n\n /// cluster time or if this session's cluster time is `None`.\n\n pub fn advance_cluster_time(&mut self, to: &ClusterTime) {\n\n if self.cluster_time().map(|ct| ct < to).unwrap_or(true) {\n\n self.cluster_time = Some(to.clone());\n\n }\n\n }\n\n\n\n /// Mark this session (and the underlying server session) as dirty.\n", "file_path": "src/client/session/mod.rs", "rank": 78, "score": 204379.49677274146 }, { "content": " pub fn client(&self) -> Client {\n\n self.client.clone()\n\n }\n\n\n\n /// The id of this session.\n\n pub fn id(&self) -> &Document {\n\n &self.server_session.id\n\n }\n\n\n\n /// Whether this session was created implicitly by the driver or explcitly by the user.\n\n pub(crate) fn is_implicit(&self) -> bool {\n\n self.is_implicit\n\n }\n\n\n\n /// Whether this session is currently in a transaction.\n\n pub(crate) fn in_transaction(&self) -> bool {\n\n self.transaction.state == TransactionState::Starting\n\n || self.transaction.state == TransactionState::InProgress\n\n }\n\n\n", "file_path": "src/client/session/mod.rs", "rank": 79, "score": 204377.2174346093 }, { "content": " pub(crate) fn mark_dirty(&mut self) {\n\n self.server_session.dirty = true;\n\n }\n\n\n\n /// Updates the date that the underlying server session was last used as part of an operation\n\n /// sent to the server.\n\n pub(crate) fn update_last_use(&mut self) {\n\n self.server_session.last_use = Instant::now();\n\n }\n\n\n\n /// Gets the current txn_number.\n\n pub(crate) fn txn_number(&self) -> i64 {\n\n self.server_session.txn_number\n\n }\n\n\n\n /// Increments the txn_number.\n\n pub(crate) fn increment_txn_number(&mut self) {\n\n self.server_session.txn_number += 1;\n\n }\n\n\n", "file_path": "src/client/session/mod.rs", "rank": 80, "score": 204377.20365383694 }, { "content": "/// }\n\n/// ```\n\n#[derive(Debug)]\n\npub struct ClientSession {\n\n cluster_time: Option<ClusterTime>,\n\n server_session: ServerSession,\n\n client: Client,\n\n is_implicit: bool,\n\n options: Option<SessionOptions>,\n\n pub(crate) transaction: Transaction,\n\n pub(crate) snapshot_time: Option<Timestamp>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Transaction {\n\n pub(crate) state: TransactionState,\n\n pub(crate) options: Option<TransactionOptions>,\n\n pub(crate) pinned: Option<TransactionPin>,\n\n pub(crate) recovery_token: Option<Document>,\n\n}\n", "file_path": "src/client/session/mod.rs", "rank": 81, "score": 204376.82430324852 }, { "content": " /// Whether a network error was encountered while using this session.\n\n dirty: bool,\n\n\n\n /// A monotonically increasing transaction number for this session.\n\n txn_number: i64,\n\n}\n\n\n\nimpl ServerSession {\n\n /// Creates a new session, generating the id client side.\n\n fn new() -> Self {\n\n let binary = Bson::Binary(Binary {\n\n subtype: BinarySubtype::Uuid,\n\n bytes: Uuid::new_v4().as_bytes().to_vec(),\n\n });\n\n\n\n Self {\n\n id: doc! { \"id\": binary },\n\n last_use: Instant::now(),\n\n dirty: false,\n\n txn_number: 0,\n", "file_path": "src/client/session/mod.rs", "rank": 82, "score": 204371.5235893396 }, { "content": " }\n\n TransactionState::Committed {\n\n data_committed: true,\n\n } => {\n\n let mut commit_transaction =\n\n CommitTransaction::new(self.transaction.options.clone());\n\n commit_transaction.update_for_retry();\n\n self.client\n\n .clone()\n\n .execute_operation(commit_transaction, self)\n\n .await\n\n }\n\n TransactionState::Committed {\n\n data_committed: false,\n\n } => Ok(()),\n\n }\n\n }\n\n\n\n /// Aborts the transaction that is currently active on this session. Any open transaction will\n\n /// be aborted automatically in the `Drop` implementation of `ClientSession`.\n", "file_path": "src/client/session/mod.rs", "rank": 83, "score": 204368.9533130391 }, { "content": " .and_then(|o| o.snapshot)\n\n .unwrap_or(false)\n\n {\n\n return Err(ErrorKind::Transaction {\n\n message: \"Transactions are not supported in snapshot sessions\".into(),\n\n }\n\n .into());\n\n }\n\n match self.transaction.state {\n\n TransactionState::Starting | TransactionState::InProgress => {\n\n return Err(ErrorKind::Transaction {\n\n message: \"transaction already in progress\".into(),\n\n }\n\n .into());\n\n }\n\n TransactionState::Committed { .. } => {\n\n self.unpin(); // Unpin session if previous transaction is committed.\n\n }\n\n _ => {}\n\n }\n", "file_path": "src/client/session/mod.rs", "rank": 84, "score": 204360.48071226708 }, { "content": " /// Increments the txn_number and returns the new value.\n\n pub(crate) fn get_and_increment_txn_number(&mut self) -> i64 {\n\n self.increment_txn_number();\n\n self.server_session.txn_number\n\n }\n\n\n\n /// Pin mongos to session.\n\n pub(crate) fn pin_mongos(&mut self, address: ServerAddress) {\n\n self.transaction.pinned = Some(TransactionPin::Mongos(SelectionCriteria::Predicate(\n\n Arc::new(move |server_info: &ServerInfo| *server_info.address() == address),\n\n )));\n\n }\n\n\n\n /// Pin the connection to the session.\n\n pub(crate) fn pin_connection(&mut self, handle: PinnedConnectionHandle) {\n\n self.transaction.pinned = Some(TransactionPin::Connection(handle));\n\n }\n\n\n\n pub(crate) fn unpin(&mut self) {\n\n self.transaction.pinned = None;\n", "file_path": "src/client/session/mod.rs", "rank": 85, "score": 204360.28378508828 }, { "content": " resolve_options!(\n\n self.client,\n\n options,\n\n [read_concern, write_concern, selection_criteria]\n\n );\n\n\n\n if let Some(ref options) = options {\n\n if !options\n\n .write_concern\n\n .as_ref()\n\n .map(|wc| wc.is_acknowledged())\n\n .unwrap_or(true)\n\n {\n\n return Err(ErrorKind::Transaction {\n\n message: \"transactions do not support unacknowledged write concerns\"\n\n .into(),\n\n }\n\n .into());\n\n }\n\n }\n", "file_path": "src/client/session/mod.rs", "rank": 86, "score": 204359.4116400474 }, { "content": " match &mut self.transaction.state {\n\n TransactionState::None => Err(ErrorKind::Transaction {\n\n message: \"no transaction started\".into(),\n\n }\n\n .into()),\n\n TransactionState::Aborted => Err(ErrorKind::Transaction {\n\n message: \"Cannot call commitTransaction after calling abortTransaction\".into(),\n\n }\n\n .into()),\n\n TransactionState::Starting => {\n\n self.transaction.commit(false);\n\n Ok(())\n\n }\n\n TransactionState::InProgress => {\n\n let commit_transaction = CommitTransaction::new(self.transaction.options.clone());\n\n self.transaction.commit(true);\n\n self.client\n\n .clone()\n\n .execute_operation(commit_transaction, self)\n\n .await\n", "file_path": "src/client/session/mod.rs", "rank": 87, "score": 204359.20286078253 }, { "content": " /// }\n\n /// ```\n\n ///\n\n /// This operation will retry once upon failure if the connection and encountered error support\n\n /// retryability. See the documentation\n\n /// [here](https://docs.mongodb.com/manual/core/retryable-writes/) for more information on\n\n /// retryable writes.\n\n pub async fn abort_transaction(&mut self) -> Result<()> {\n\n match self.transaction.state {\n\n TransactionState::None => Err(ErrorKind::Transaction {\n\n message: \"no transaction started\".into(),\n\n }\n\n .into()),\n\n TransactionState::Committed { .. } => Err(ErrorKind::Transaction {\n\n message: \"Cannot call abortTransaction after calling commitTransaction\".into(),\n\n }\n\n .into()),\n\n TransactionState::Aborted => Err(ErrorKind::Transaction {\n\n message: \"cannot call abortTransaction twice\".into(),\n\n }\n", "file_path": "src/client/session/mod.rs", "rank": 88, "score": 204358.57152605517 }, { "content": " match self.client.transaction_support_status().await? {\n\n TransactionSupportStatus::Supported => {\n\n let mut options = match options.into() {\n\n Some(mut options) => {\n\n if let Some(defaults) = self.default_transaction_options() {\n\n merge_options!(\n\n defaults,\n\n &mut options,\n\n [\n\n read_concern,\n\n write_concern,\n\n selection_criteria,\n\n max_commit_time\n\n ]\n\n );\n\n }\n\n Some(options)\n\n }\n\n None => self.default_transaction_options().cloned(),\n\n };\n", "file_path": "src/client/session/mod.rs", "rank": 89, "score": 204357.92850366785 }, { "content": " self.options = None;\n\n self.pinned = None;\n\n self.recovery_token = None;\n\n }\n\n\n\n pub(crate) fn pinned_mongos(&self) -> Option<&SelectionCriteria> {\n\n match &self.pinned {\n\n Some(TransactionPin::Mongos(s)) => Some(s),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub(crate) fn pinned_connection(&self) -> Option<&PinnedConnectionHandle> {\n\n match &self.pinned {\n\n Some(TransactionPin::Connection(c)) => Some(c),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn take(&mut self) -> Self {\n", "file_path": "src/client/session/mod.rs", "rank": 90, "score": 204356.56211747119 }, { "content": "\n\n self.increment_txn_number();\n\n self.transaction.start(options);\n\n Ok(())\n\n }\n\n _ => Err(ErrorKind::Transaction {\n\n message: \"Transactions are not supported by this deployment\".into(),\n\n }\n\n .into()),\n\n }\n\n }\n\n\n\n /// Commits the transaction that is currently active on this session.\n\n ///\n\n ///\n\n /// This method may return an error with a [`crate::error::UNKNOWN_TRANSACTION_COMMIT_RESULT`]\n\n /// label. This label indicates that it is unknown whether the commit has satisfied the write\n\n /// concern associated with the transaction. If an error with this label is returned, it is\n\n /// safe to retry the commit until the write concern is satisfied or an error without the label\n\n /// is returned.\n", "file_path": "src/client/session/mod.rs", "rank": 91, "score": 204356.5174232354 }, { "content": " }\n\n }\n\n\n\n /// Determines if this server session is about to expire in a short amount of time (1 minute).\n\n fn is_about_to_expire(&self, logical_session_timeout: Option<Duration>) -> bool {\n\n let timeout = match logical_session_timeout {\n\n Some(t) => t,\n\n None => return false,\n\n };\n\n let expiration_date = self.last_use + timeout;\n\n expiration_date < Instant::now() + Duration::from_secs(60)\n\n }\n\n}\n", "file_path": "src/client/session/mod.rs", "rank": 92, "score": 204355.6609242132 }, { "content": "\n\nimpl Transaction {\n\n pub(crate) fn start(&mut self, options: Option<TransactionOptions>) {\n\n self.state = TransactionState::Starting;\n\n self.options = options;\n\n self.recovery_token = None;\n\n }\n\n\n\n pub(crate) fn commit(&mut self, data_committed: bool) {\n\n self.state = TransactionState::Committed { data_committed };\n\n }\n\n\n\n pub(crate) fn abort(&mut self) {\n\n self.state = TransactionState::Aborted;\n\n self.options = None;\n\n self.pinned = None;\n\n }\n\n\n\n pub(crate) fn reset(&mut self) {\n\n self.state = TransactionState::None;\n", "file_path": "src/client/session/mod.rs", "rank": 93, "score": 204354.8621068988 }, { "content": " .into()),\n\n TransactionState::Starting => {\n\n self.transaction.abort();\n\n Ok(())\n\n }\n\n TransactionState::InProgress => {\n\n let write_concern = self\n\n .transaction\n\n .options\n\n .as_ref()\n\n .and_then(|options| options.write_concern.as_ref())\n\n .cloned();\n\n let abort_transaction =\n\n AbortTransaction::new(write_concern, self.transaction.pinned.take());\n\n self.transaction.abort();\n\n // Errors returned from running an abortTransaction command should be ignored.\n\n let _result = self\n\n .client\n\n .clone()\n\n .execute_operation(abort_transaction, &mut *self)\n", "file_path": "src/client/session/mod.rs", "rank": 94, "score": 204351.86161898327 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub(crate) enum TransactionState {\n\n None,\n\n Starting,\n\n InProgress,\n\n Committed {\n\n /// Whether any data was committed when commit_transaction was initially called. This is\n\n /// required to determine whether a commitTransaction command should be run if the user\n\n /// calls commit_transaction again.\n\n data_committed: bool,\n\n },\n\n Aborted,\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum TransactionPin {\n\n Mongos(SelectionCriteria),\n\n Connection(PinnedConnectionHandle),\n\n}\n\n\n", "file_path": "src/client/session/mod.rs", "rank": 95, "score": 204345.20087562938 }, { "content": " Transaction {\n\n state: self.state.clone(),\n\n options: self.options.take(),\n\n pinned: self.pinned.take(),\n\n recovery_token: self.recovery_token.take(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Transaction {\n\n fn default() -> Self {\n\n Self {\n\n state: TransactionState::None,\n\n options: None,\n\n pinned: None,\n\n recovery_token: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/client/session/mod.rs", "rank": 96, "score": 204335.53881385186 }, { "content": " .await;\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n fn default_transaction_options(&self) -> Option<&TransactionOptions> {\n\n self.options\n\n .as_ref()\n\n .and_then(|options| options.default_transaction_options.as_ref())\n\n }\n\n}\n\n\n", "file_path": "src/client/session/mod.rs", "rank": 97, "score": 204335.40272879248 }, { "content": "fn deserialize_command_started_events<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<Vec<CommandStartedEvent>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let docs = Vec::<Document>::deserialize(deserializer)?;\n\n Ok(Some(\n\n docs.iter()\n\n .map(|doc| {\n\n let event = doc.get_document(\"command_started_event\").unwrap();\n\n from_document(event.clone()).unwrap()\n\n })\n\n .collect(),\n\n ))\n\n}\n", "file_path": "src/test/spec/v2_runner/test_file.rs", "rank": 98, "score": 204237.2895941221 }, { "content": "fn localhost_test_build_10gen(port: u16) -> ServerAddress {\n\n ServerAddress::Tcp {\n\n host: \"localhost.test.build.10gen.cc\".into(),\n\n port: Some(port),\n\n }\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n static ref DEFAULT_HOSTS: Vec<ServerAddress> = vec![\n\n localhost_test_build_10gen(27017),\n\n localhost_test_build_10gen(27108),\n\n ];\n\n}\n\n\n\nasync fn run_test(new_hosts: Result<Vec<ServerAddress>>, expected_hosts: HashSet<ServerAddress>) {\n\n let mut options = ClientOptions::new_srv();\n\n options.hosts = DEFAULT_HOSTS.clone();\n\n options.test_options_mut().disable_monitoring_threads = true;\n\n let topology = Topology::new(options).unwrap();\n\n let mut monitor = SrvPollingMonitor::new(topology.downgrade()).unwrap();\n\n monitor\n\n .update_hosts(new_hosts.and_then(make_lookup_hosts), topology.clone())\n\n .await;\n\n\n\n assert_eq!(expected_hosts, topology.servers().await);\n\n}\n\n\n", "file_path": "src/sdam/srv_polling/test.rs", "rank": 99, "score": 203111.57401187054 } ]
Rust
src/cache/file_manager.rs
cloudfuse-io/cloud-readers-rs
498ae2e6bbcdc170659bb76d428e5135f8041724
use std::collections::BTreeMap; use std::fmt; use std::sync::{Arc, Condvar, Mutex}; use anyhow::{anyhow, bail, ensure, Result}; use itertools::Itertools; use tokio::sync::mpsc::UnboundedSender; use super::Range; #[derive(Clone)] pub(crate) enum Download { Pending(usize), Done(Arc<Vec<u8>>), Error(String), } impl fmt::Debug for Download { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Download::Pending(len) => write!(f, "Pending({} bytes)", len), Download::Done(data) => write!(f, "Done({} bytes)", data.len()), Download::Error(error) => write!(f, "Error({:?})", error), } } } pub(crate) struct RangeCursor { data: Arc<Vec<u8>>, offset: u64, } impl fmt::Debug for RangeCursor { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RangeCursor") .field("data", &format!("vec![x;{}]", self.data.len())) .field("offset", &self.offset) .finish() } } impl RangeCursor { fn try_new(data: Arc<Vec<u8>>, offset: u64) -> Result<Self> { ensure!( data.len() > offset as usize, "Out of bound in RangeCursor: (offset={}) >= (length={})", offset, data.len(), ); Ok(Self { data, offset }) } pub(crate) fn read(self, buf: &mut [u8]) -> usize { let len = std::cmp::min(buf.len(), self.data.len() - self.offset as usize); buf[0..len] .clone_from_slice(&self.data[self.offset as usize..(self.offset as usize + len)]); len } } #[derive(Clone)] pub(crate) struct FileCache { ranges: Arc<Mutex<BTreeMap<u64, Download>>>, cv: Arc<Condvar>, file_size: u64, } fn fmt_debug(map: &BTreeMap<u64, Download>) -> String { map.iter() .map(|(pos, dl)| format!("-- Start={:0>10} Status={:?}", pos, dl)) .join("\n") } impl fmt::Debug for FileCache { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let range_guard = self.ranges.lock().unwrap(); write!(f, "{}", fmt_debug(&*range_guard)) } } impl FileCache { pub(crate) fn new(file_size: u64) -> Self { Self { ranges: Arc::new(Mutex::new(BTreeMap::new())), cv: Arc::new(Condvar::new()), file_size, } } pub(crate) fn insert(&self, start: u64, download: Download) { let mut range_guard = self.ranges.lock().unwrap(); range_guard.insert(start, download); self.cv.notify_all() } fn get_range(&self, start: u64) -> Result<RangeCursor> { use std::ops::Bound::{Included, Unbounded}; let mut ranges_guard = self.ranges.lock().unwrap(); ensure!(ranges_guard.len() > 0, "No download scheduled"); let mut before = ranges_guard .range((Unbounded, Included(start))) .next_back() .map(|(start, dl)| (*start, dl.clone())); while let Some((_, Download::Pending(_))) = before { ranges_guard = self.cv.wait(ranges_guard).unwrap(); before = ranges_guard .range((Unbounded, Included(start))) .next_back() .map(|(start, dl)| (*start, dl.clone())); } let before = before.ok_or_else(|| { anyhow!( "Download not scheduled at position {}, scheduled ranges are:\n{}", start, fmt_debug(&*ranges_guard), ) })?; match before.1 { Download::Done(bytes) => { ensure!( before.0 + bytes.len() as u64 > start, "Download not scheduled at position {}, scheduled ranges are:\n{}", start, fmt_debug(&*ranges_guard), ); RangeCursor::try_new(bytes, start - before.0) } Download::Error(err) => bail!(err), Download::Pending(_) => unreachable!(), } } } #[derive(Clone)] pub struct FileManager { cache: FileCache, tx: UnboundedSender<Range>, } impl FileManager { pub(crate) fn new(cache: FileCache, tx: UnboundedSender<Range>) -> Self { Self { cache, tx } } pub fn queue_download(&self, ranges: Vec<Range>) -> Result<()> { for range in ranges { self.cache .insert(range.start, Download::Pending(range.length)); self.tx.send(range).map_err(|e| anyhow!(e.to_string()))?; } Ok(()) } pub(crate) fn get_range(&self, start: u64) -> Result<RangeCursor> { self.cache.get_range(start) } pub fn get_file_size(&self) -> u64 { self.cache.file_size } } #[cfg(test)] mod tests { use super::super::mock::*; use super::super::DownloadCache; use super::*; #[tokio::test] async fn test_read_at_0() { let file_manager = init_mock(1000).await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); assert_cursor(file_manager, 0, pattern(0, 100)) .await .expect("Could not get range at 0 with download of [0:100["); } #[tokio::test] async fn test_read_with_offset() { let file_manager = init_mock(1000).await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); assert_cursor(file_manager, 50, pattern(50, 100)) .await .expect("Could not get range at 50 with download of [0:100["); } #[tokio::test] async fn test_read_uninited() { let file_manager = init_mock(1000).await; let err_msg = assert_cursor(file_manager, 0, pattern(0, 100)) .await .expect_err("Read file without queued downloads should fail") .to_string(); assert_eq!(err_msg, "No download scheduled"); } #[tokio::test] async fn test_read_outside_download() { let file_manager = init_mock(1000).await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); let err_msg = assert_cursor(file_manager, 120, pattern(120, 200)) .await .expect_err("Read file without scheduled downloads should fail") .to_string(); assert_eq!( err_msg, "\ Download not scheduled at position 120, scheduled ranges are: -- Start=0000000000 Status=Done(100 bytes)" ); } #[tokio::test] async fn test_read_error_downloader() { let mut download_cache = DownloadCache::new(1); let file_manager = download_cache .register(Box::new(ErrorFileDescription {})) .await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); let error = tokio::task::spawn_blocking(move || -> Result<RangeCursor> { file_manager.get_range(0) }) .await .unwrap() .unwrap_err(); assert_eq!( format!("{:?}", error), "Error in ErrorDownloader\n\nCaused by:\n Download Failed" ); } async fn init_mock(len: u64) -> FileManager { let mut download_cache = DownloadCache::new(1); let pattern_file_description = PatternFileDescription::new(len); download_cache .register(Box::new(pattern_file_description)) .await } async fn assert_cursor(file_manager: FileManager, start: u64, target: Vec<u8>) -> Result<()> { let target_length = target.len(); let cursor = tokio::task::spawn_blocking(move || -> Result<RangeCursor> { file_manager.get_range(start) }) .await .unwrap()?; let mut content = vec![0u8; target_length]; cursor.read(&mut content); assert_eq!(content, target); Ok(()) } }
use std::collections::BTreeMap; use std::fmt; use std::sync::{Arc, Condvar, Mutex}; use anyhow::{anyhow, bail, ensure, Result}; use itertools::Itertools; use tokio::sync::mpsc::UnboundedSender; use super::Range; #[derive(Clone)] pub(crate) enum Download { Pending(usize), Done(Arc<Vec<u8>>), Error(String), } impl fmt::Debug for Download { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Download::Pending(len) => write!(f, "Pending({} bytes)", len), Download::Done(data) => write!(f, "Done({} bytes)", data.len()), Download::Error(error) => write!(f, "Error({:?})", error), } } } pub(crate) struct RangeCursor { data: Arc<Vec<u8>>, offset: u64, } impl fmt::Debug for RangeCursor { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RangeCursor") .field("data", &format!("vec![x;{}]", self.data.len())) .field("offset", &self.offset) .finish() } } impl RangeCursor { fn try_new(data: Arc<Vec<u8>>, offse
pub(crate) fn read(self, buf: &mut [u8]) -> usize { let len = std::cmp::min(buf.len(), self.data.len() - self.offset as usize); buf[0..len] .clone_from_slice(&self.data[self.offset as usize..(self.offset as usize + len)]); len } } #[derive(Clone)] pub(crate) struct FileCache { ranges: Arc<Mutex<BTreeMap<u64, Download>>>, cv: Arc<Condvar>, file_size: u64, } fn fmt_debug(map: &BTreeMap<u64, Download>) -> String { map.iter() .map(|(pos, dl)| format!("-- Start={:0>10} Status={:?}", pos, dl)) .join("\n") } impl fmt::Debug for FileCache { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let range_guard = self.ranges.lock().unwrap(); write!(f, "{}", fmt_debug(&*range_guard)) } } impl FileCache { pub(crate) fn new(file_size: u64) -> Self { Self { ranges: Arc::new(Mutex::new(BTreeMap::new())), cv: Arc::new(Condvar::new()), file_size, } } pub(crate) fn insert(&self, start: u64, download: Download) { let mut range_guard = self.ranges.lock().unwrap(); range_guard.insert(start, download); self.cv.notify_all() } fn get_range(&self, start: u64) -> Result<RangeCursor> { use std::ops::Bound::{Included, Unbounded}; let mut ranges_guard = self.ranges.lock().unwrap(); ensure!(ranges_guard.len() > 0, "No download scheduled"); let mut before = ranges_guard .range((Unbounded, Included(start))) .next_back() .map(|(start, dl)| (*start, dl.clone())); while let Some((_, Download::Pending(_))) = before { ranges_guard = self.cv.wait(ranges_guard).unwrap(); before = ranges_guard .range((Unbounded, Included(start))) .next_back() .map(|(start, dl)| (*start, dl.clone())); } let before = before.ok_or_else(|| { anyhow!( "Download not scheduled at position {}, scheduled ranges are:\n{}", start, fmt_debug(&*ranges_guard), ) })?; match before.1 { Download::Done(bytes) => { ensure!( before.0 + bytes.len() as u64 > start, "Download not scheduled at position {}, scheduled ranges are:\n{}", start, fmt_debug(&*ranges_guard), ); RangeCursor::try_new(bytes, start - before.0) } Download::Error(err) => bail!(err), Download::Pending(_) => unreachable!(), } } } #[derive(Clone)] pub struct FileManager { cache: FileCache, tx: UnboundedSender<Range>, } impl FileManager { pub(crate) fn new(cache: FileCache, tx: UnboundedSender<Range>) -> Self { Self { cache, tx } } pub fn queue_download(&self, ranges: Vec<Range>) -> Result<()> { for range in ranges { self.cache .insert(range.start, Download::Pending(range.length)); self.tx.send(range).map_err(|e| anyhow!(e.to_string()))?; } Ok(()) } pub(crate) fn get_range(&self, start: u64) -> Result<RangeCursor> { self.cache.get_range(start) } pub fn get_file_size(&self) -> u64 { self.cache.file_size } } #[cfg(test)] mod tests { use super::super::mock::*; use super::super::DownloadCache; use super::*; #[tokio::test] async fn test_read_at_0() { let file_manager = init_mock(1000).await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); assert_cursor(file_manager, 0, pattern(0, 100)) .await .expect("Could not get range at 0 with download of [0:100["); } #[tokio::test] async fn test_read_with_offset() { let file_manager = init_mock(1000).await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); assert_cursor(file_manager, 50, pattern(50, 100)) .await .expect("Could not get range at 50 with download of [0:100["); } #[tokio::test] async fn test_read_uninited() { let file_manager = init_mock(1000).await; let err_msg = assert_cursor(file_manager, 0, pattern(0, 100)) .await .expect_err("Read file without queued downloads should fail") .to_string(); assert_eq!(err_msg, "No download scheduled"); } #[tokio::test] async fn test_read_outside_download() { let file_manager = init_mock(1000).await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); let err_msg = assert_cursor(file_manager, 120, pattern(120, 200)) .await .expect_err("Read file without scheduled downloads should fail") .to_string(); assert_eq!( err_msg, "\ Download not scheduled at position 120, scheduled ranges are: -- Start=0000000000 Status=Done(100 bytes)" ); } #[tokio::test] async fn test_read_error_downloader() { let mut download_cache = DownloadCache::new(1); let file_manager = download_cache .register(Box::new(ErrorFileDescription {})) .await; file_manager .queue_download(vec![Range { start: 0, length: 100, }]) .expect("Could not queue Range on handle"); let error = tokio::task::spawn_blocking(move || -> Result<RangeCursor> { file_manager.get_range(0) }) .await .unwrap() .unwrap_err(); assert_eq!( format!("{:?}", error), "Error in ErrorDownloader\n\nCaused by:\n Download Failed" ); } async fn init_mock(len: u64) -> FileManager { let mut download_cache = DownloadCache::new(1); let pattern_file_description = PatternFileDescription::new(len); download_cache .register(Box::new(pattern_file_description)) .await } async fn assert_cursor(file_manager: FileManager, start: u64, target: Vec<u8>) -> Result<()> { let target_length = target.len(); let cursor = tokio::task::spawn_blocking(move || -> Result<RangeCursor> { file_manager.get_range(start) }) .await .unwrap()?; let mut content = vec![0u8; target_length]; cursor.read(&mut content); assert_eq!(content, target); Ok(()) } }
t: u64) -> Result<Self> { ensure!( data.len() > offset as usize, "Out of bound in RangeCursor: (offset={}) >= (length={})", offset, data.len(), ); Ok(Self { data, offset }) }
function_block-function_prefixed
[ { "content": "/// A downloader that always returns an error\n\nstruct ErrorDownloader;\n\n\n\n#[async_trait]\n\nimpl Downloader for ErrorDownloader {\n\n async fn download(&self, _file: String, _start: u64, _length: usize) -> Result<Vec<u8>> {\n\n Err(anyhow!(\"Download Failed\").context(\"Error in ErrorDownloader\"))\n\n }\n\n}\n\n\n\n/// A FileDescription implementation that uses [`ErrorDownloader`] to always fail.\n\npub struct ErrorFileDescription {}\n\n\n\nimpl FileDescription for ErrorFileDescription {\n\n fn get_downloader(&self) -> Arc<dyn Downloader> {\n\n Arc::new(ErrorDownloader)\n\n }\n\n\n\n fn get_downloader_id(&self) -> String {\n\n \"error_downloader\".to_owned()\n\n }\n\n\n\n fn get_uri(&self) -> String {\n\n \"error_uri\".to_owned()\n\n }\n\n\n\n fn get_file_size(&self) -> u64 {\n\n 1000\n\n }\n\n}\n", "file_path": "src/cache/mock.rs", "rank": 0, "score": 75372.59698021025 }, { "content": "/// A downloader that returns a simple pattern (1,2,3...254,255,1,2...)\n\n/// Waits for 10ms before returning its result to trigger cache misses\n\nstruct PatternDownloader;\n\n\n", "file_path": "src/cache/mock.rs", "rank": 2, "score": 54485.044586960255 }, { "content": "#[derive(Hash, PartialEq, Eq)]\n\nstruct CacheKey {\n\n pub downloader_id: String,\n\n pub uri: String,\n\n}\n\n\n", "file_path": "src/cache/download_cache.rs", "rank": 3, "score": 52488.99767586982 }, { "content": "type DownloaderMap = Arc<Mutex<HashMap<String, Arc<dyn Downloader>>>>;\n\n\n", "file_path": "src/cache/download_cache.rs", "rank": 4, "score": 46255.51126341609 }, { "content": "type DownloaderFactory = Box<dyn Fn(&str) -> Arc<dyn Downloader> + Send>;\n\n\n\npub struct S3FileDescription {\n\n region: String,\n\n bucket: String,\n\n key: String,\n\n size: u64,\n\n downloader_factory: DownloaderFactory,\n\n}\n\n\n\nimpl S3FileDescription {\n\n pub fn new(region: String, bucket: String, key: String, size: u64) -> Self {\n\n S3FileDescription {\n\n region,\n\n bucket,\n\n key,\n\n size,\n\n downloader_factory: Box::new(move |reg| Arc::new(S3Downloader::new(new_client(reg)))),\n\n }\n\n }\n", "file_path": "src/s3_rusoto/file_description.rs", "rank": 5, "score": 40103.426348261106 }, { "content": "type FileCacheMap = Arc<Mutex<HashMap<CacheKey, FileCache>>>;\n\n\n\n/// [Start Here] Structure for caching download clients and downloaded data chunks.\n\n///\n\n/// The Download cache converts [`FileDescription`] trait objects into instances of [`FileManager`],\n\n/// registering the downloader and the file URI while doing so.\n\n/// The actual download strategy is specified on the [`FileManager`] object.\n\npub struct DownloadCache {\n\n data: FileCacheMap,\n\n downloaders: DownloaderMap,\n\n semaphore: Arc<Semaphore>,\n\n stats: CacheStats,\n\n release_rate: usize,\n\n current_max_parallel: Arc<AtomicUsize>,\n\n absolute_max_parallel: usize,\n\n}\n\n\n\nimpl DownloadCache {\n\n /// Create a cache capable of storing data chunks for multiple files.\n\n /// * `max_parallel` - The maximum number of parallel downloads\n", "file_path": "src/cache/download_cache.rs", "rank": 6, "score": 35935.1701377106 }, { "content": "fn new_client(region: &str) -> S3Client {\n\n let region = Region::from_str(region).unwrap();\n\n S3Client::new(region)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use rusoto_core::signature::SignedRequest;\n\n use rusoto_mock::{MockCredentialsProvider, MockRequestDispatcher};\n\n use rusoto_s3::S3Client;\n\n\n\n #[tokio::test]\n\n async fn test_file_description() {\n\n let mut file_description = S3FileDescription::new(\n\n \"test-region-1\".to_owned(),\n\n \"test_bucket\".to_owned(),\n\n \"test_key\".to_owned(),\n\n 1000,\n\n );\n", "file_path": "src/s3_rusoto/file_description.rs", "rank": 7, "score": 26044.529046775857 }, { "content": "/// The pattern (1,2,3...254,255,1,2...) in the range [start,end[\n\npub fn pattern(start: usize, end: usize) -> Vec<u8> {\n\n (start..end).map(|i| (i % 256) as u8).collect::<Vec<_>>()\n\n}\n\n\n\n#[async_trait]\n\nimpl Downloader for PatternDownloader {\n\n async fn download(&self, _file: String, start: u64, length: usize) -> Result<Vec<u8>> {\n\n tokio::time::sleep(Duration::from_millis(10)).await;\n\n Ok(pattern(start as usize, start as usize + length))\n\n }\n\n}\n\n\n\n/// A FileDescription implementation that uses [`PatternDownloader`] to generate mock bytes\n\npub struct PatternFileDescription {\n\n length: u64,\n\n}\n\n\n\nimpl PatternFileDescription {\n\n #[allow(dead_code)]\n\n pub fn new(length: u64) -> Self {\n", "file_path": "src/cache/mock.rs", "rank": 8, "score": 23448.402518127063 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::{ensure, Context, Result};\n\nuse async_trait::async_trait;\n\nuse rusoto_s3::{GetObjectOutput, GetObjectRequest, S3Client, S3};\n\nuse tokio::io::AsyncReadExt;\n\n\n\nuse crate::Downloader;\n\n\n\n//// Implementation of the `download` function used by the range cache to fetch data\n\n\n\n#[derive(Clone)]\n\npub struct S3Downloader {\n\n client: Arc<S3Client>,\n\n}\n\n\n\n#[async_trait]\n\nimpl Downloader for S3Downloader {\n\n async fn download(&self, uri: String, start: u64, length: usize) -> Result<Vec<u8>> {\n\n let mut file_id_split = uri.split(\"/\");\n", "file_path": "src/s3_rusoto/downloader.rs", "rank": 9, "score": 20452.74304578991 }, { "content": " let range = format!(\"bytes={}-{}\", start, start + length as u64 - 1);\n\n let get_obj_req = GetObjectRequest {\n\n bucket: file_id_split.next().unwrap().to_owned(),\n\n key: file_id_split.collect::<Vec<&str>>().join(\"/\"),\n\n range: Some(range),\n\n ..Default::default()\n\n };\n\n let obj: GetObjectOutput = self\n\n .client\n\n .get_object(get_obj_req)\n\n .await\n\n .context(\"Rusoto GetObject error\")?;\n\n let mut reader = obj.body.unwrap().into_async_read();\n\n let mut res = vec![];\n\n res.reserve(length);\n\n let bytes_read = reader\n\n .read_to_end(&mut res)\n\n .await\n\n .context(\"Rusoto buffer read error\")?;\n\n ensure!(bytes_read == length, \"Not the expected number of bytes\");\n", "file_path": "src/s3_rusoto/downloader.rs", "rank": 10, "score": 20448.79090435124 }, { "content": " Some(x + release_rate)\n\n }\n\n })\n\n .unwrap();\n\n std::cmp::min(\n\n release_rate,\n\n std::cmp::max(absolute_max_parallel - old_current, 1),\n\n )\n\n }\n\n}\n\n\n\nimpl fmt::Debug for DownloadCache {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let data_guard = self.data.lock().unwrap();\n\n for (key, file_cache) in &*data_guard {\n\n write!(\n\n f,\n\n \"file = {} / {}:\\n{:?}\",\n\n key.downloader_id, key.uri, file_cache\n\n )?;\n", "file_path": "src/cache/download_cache.rs", "rank": 11, "score": 20448.550671153098 }, { "content": " Download::Done(Arc::new(downloaded_chunk))\n\n }\n\n Err(err) => Download::Error(format!(\"{:?}\", err)),\n\n };\n\n file_cache.insert(message.start, dl_enum);\n\n });\n\n }\n\n });\n\n file_manager\n\n }\n\n\n\n /// Provides a reference that gives access to the internal statistics in a synchronized fashion.\n\n pub fn get_stats(&self) -> &CacheStats {\n\n &self.stats\n\n }\n\n\n\n fn register_downloader(&self, file_description: &dyn FileDescription) -> Arc<dyn Downloader> {\n\n let downloader_id = file_description.get_downloader_id();\n\n let mut dls_guard = self.downloaders.lock().unwrap();\n\n let current = dls_guard.get(&downloader_id);\n", "file_path": "src/cache/download_cache.rs", "rank": 12, "score": 20447.580168283923 }, { "content": " let dl_start_time = Instant::now();\n\n\n\n let dl_res = downloader_ref\n\n .download(uri.clone(), message.start, message.length)\n\n .await;\n\n // once the download is completed, release the permit at the configured rate\n\n let new_permits = DownloadCache::permit_leap(\n\n &current_max_parallel,\n\n absolute_max_parallel,\n\n release_rate,\n\n );\n\n semaphore_ref.add_permits(new_permits);\n\n let dl_enum = match dl_res {\n\n Ok(downloaded_chunk) => {\n\n stats_ref.record_download(DownloadStat {\n\n dl_duration: dl_start_time.elapsed().as_millis() as u64,\n\n size: downloaded_chunk.len() as u64,\n\n dl_start: dl_start_time.duration_since(register_time).as_millis()\n\n as u64,\n\n });\n", "file_path": "src/cache/download_cache.rs", "rank": 13, "score": 20447.440438348804 }, { "content": " Ok(res)\n\n }\n\n}\n\n\n\nimpl S3Downloader {\n\n pub fn new(client: S3Client) -> Self {\n\n S3Downloader {\n\n client: Arc::new(client),\n\n }\n\n }\n\n}\n", "file_path": "src/s3_rusoto/downloader.rs", "rank": 14, "score": 20446.768258835295 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::Instant;\n\n\n\nuse tokio::sync::{mpsc::unbounded_channel, Semaphore};\n\n\n\nuse super::file_manager::{Download, FileCache, FileManager};\n\nuse super::{CacheStats, DownloadStat};\n\nuse super::{Downloader, FileDescription, Range};\n\n\n", "file_path": "src/cache/download_cache.rs", "rank": 15, "score": 20446.627855339495 }, { "content": " pub fn new(max_parallel: usize) -> Self {\n\n DownloadCache::new_with(max_parallel, 1, max_parallel)\n\n }\n\n\n\n /// Same as `new(max_parallel)`, but with finer options:\n\n /// * `initial_permits` - The maximum number of parallel downloads initially\n\n /// * `release_rate` - The number of new downloads that can be started each time a download finishes.\n\n /// A `release_rate` of 1 maintains the maximum parallel downloads to the `initial_permits`.\n\n /// * `max_parallel` - The maximum number of parallel downloads\n\n pub fn new_with(initial_permits: usize, release_rate: usize, max_parallel: usize) -> Self {\n\n Self {\n\n data: Arc::new(Mutex::new(HashMap::new())),\n\n downloaders: Arc::new(Mutex::new(HashMap::new())),\n\n semaphore: Arc::new(tokio::sync::Semaphore::new(initial_permits)),\n\n stats: CacheStats::new(),\n\n release_rate,\n\n current_max_parallel: Arc::new(AtomicUsize::new(initial_permits)),\n\n absolute_max_parallel: max_parallel,\n\n }\n\n }\n", "file_path": "src/cache/download_cache.rs", "rank": 16, "score": 20445.330519884876 }, { "content": "\n\n /// Converts a [`FileDescription`] trait object into a [`FileManager`].\n\n /// Spawns a task that will download the file chunks queued on the [`FileManager`].\n\n /// TODO do not re-download chunks if same file was already registered\n\n pub async fn register(&mut self, file_description: Box<dyn FileDescription>) -> FileManager {\n\n let register_time = Instant::now();\n\n let (tx, mut rx) = unbounded_channel::<Range>();\n\n let file_cache;\n\n {\n\n let mut data_guard = self.data.lock().unwrap();\n\n file_cache = data_guard\n\n .entry(CacheKey {\n\n downloader_id: file_description.get_downloader_id(),\n\n uri: file_description.get_uri(),\n\n })\n\n .or_insert_with(|| FileCache::new(file_description.get_file_size()))\n\n .clone();\n\n }\n\n let file_manager = FileManager::new(file_cache.clone(), tx);\n\n let downloader_ref = self.register_downloader(&*file_description);\n", "file_path": "src/cache/download_cache.rs", "rank": 17, "score": 20445.273058009738 }, { "content": " match &current {\n\n Some(downloader) => Arc::clone(downloader),\n\n None => {\n\n let new_downloader = file_description.get_downloader();\n\n dls_guard.insert(downloader_id, Arc::clone(&new_downloader));\n\n new_downloader\n\n }\n\n }\n\n }\n\n\n\n fn permit_leap(\n\n current_max_parallel: &AtomicUsize,\n\n absolute_max_parallel: usize,\n\n release_rate: usize,\n\n ) -> usize {\n\n let old_current = current_max_parallel\n\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |x| {\n\n if x + release_rate >= absolute_max_parallel {\n\n Some(absolute_max_parallel)\n\n } else {\n", "file_path": "src/cache/download_cache.rs", "rank": 18, "score": 20443.44372585098 }, { "content": " }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[tokio::test]\n\n async fn test_permit_leap() {\n\n // follow release rate if you can\n\n assert_eq!(DownloadCache::permit_leap(&AtomicUsize::new(1), 3, 1), 1);\n\n assert_eq!(DownloadCache::permit_leap(&AtomicUsize::new(1), 3, 2), 2);\n\n // release what remains before reaching max otherwise\n\n assert_eq!(DownloadCache::permit_leap(&AtomicUsize::new(1), 3, 3), 2);\n\n assert_eq!(DownloadCache::permit_leap(&AtomicUsize::new(1), 2, 3), 1);\n\n // permit leap must be at least one\n\n assert_eq!(DownloadCache::permit_leap(&AtomicUsize::new(3), 3, 3), 1);\n\n }\n\n}\n", "file_path": "src/cache/download_cache.rs", "rank": 19, "score": 20443.207651646757 }, { "content": " let semaphore_ref = Arc::clone(&self.semaphore);\n\n let stat_ref = self.stats.clone();\n\n let release_rate = self.release_rate;\n\n let absolute_max_parallel = self.absolute_max_parallel;\n\n let current_max_parallel = Arc::clone(&self.current_max_parallel);\n\n let uri = file_description.get_uri();\n\n tokio::spawn(async move {\n\n while let Some(message) = rx.recv().await {\n\n // obtain a permit, it will be released once the download completes\n\n let permit = semaphore_ref.acquire().await.unwrap();\n\n permit.forget();\n\n // run download in a dedicated task\n\n let downloader_ref = Arc::clone(&downloader_ref);\n\n let semaphore_ref = Arc::clone(&semaphore_ref);\n\n let current_max_parallel = Arc::clone(&current_max_parallel);\n\n let stats_ref = stat_ref.clone();\n\n let file_cache = file_cache.clone();\n\n let uri = uri.clone();\n\n tokio::spawn(async move {\n\n // start the actual download\n", "file_path": "src/cache/download_cache.rs", "rank": 20, "score": 20439.929895269233 }, { "content": "#[async_trait]\n\npub trait Downloader: Send + Sync {\n\n async fn download(&self, uri: String, start: u64, length: usize) -> Result<Vec<u8>>;\n\n}\n\n\n\n/// Start and length of a file chunk\n\n#[derive(Deserialize, Clone)]\n\npub struct Range {\n\n pub start: u64,\n\n pub length: usize,\n\n}\n\n\n", "file_path": "src/cache/models.rs", "rank": 21, "score": 17538.913343002067 }, { "content": " }\n\n let cache_cursor = self\n\n .cache\n\n .get_range(self.position)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;\n\n\n\n let bytes_read = cache_cursor.read(buf);\n\n\n\n // update reader position\n\n self.position += bytes_read as u64;\n\n Ok(bytes_read)\n\n }\n\n}\n\n\n\nimpl Seek for CacheCursor {\n\n /// implementation inspired from std::io::Cursor\n\n fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {\n\n let (base_pos, offset) = match pos {\n\n SeekFrom::Start(n) => {\n\n self.position = n;\n", "file_path": "src/cache/cursor.rs", "rank": 26, "score": 14.186048883935563 }, { "content": "\n\n async fn init_mock(len: u64) -> (DownloadCache, FileManager) {\n\n let mut download_cache = DownloadCache::new(2);\n\n\n\n let mock_file_description = PatternFileDescription::new(len);\n\n\n\n let file_manager = download_cache\n\n .register(Box::new(mock_file_description))\n\n .await;\n\n\n\n (download_cache, file_manager)\n\n }\n\n\n\n /// Assert that the next `target.len()` bytes of `cursor` match the bytes in `target`\n\n /// SPAWNS A NEW THREAD TO PERFORM THE READ BECAUSE IT IS BLOCKING!\n\n async fn assert_cursor(cursor: &CacheCursor, target: Vec<u8>) -> io::Result<()> {\n\n let target_length = target.len();\n\n let mut cursor = cursor.clone();\n\n // perform blocking read in separate thread!\n\n let result = tokio::task::spawn_blocking(move || -> io::Result<Vec<u8>> {\n", "file_path": "src/cache/cursor.rs", "rank": 28, "score": 11.576447861305667 }, { "content": " pub(crate) fn record_download(&self, _: DownloadStat) {}\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[tokio::test]\n\n #[cfg(feature = \"stats\")]\n\n async fn test_sync() {\n\n let stats = CacheStats::new();\n\n for d in mock_data() {\n\n stats.record_download(d);\n\n }\n\n let result = stats.recorded_downloads();\n\n assert_eq!(result.len(), mock_data().len())\n\n }\n\n\n\n #[tokio::test]\n", "file_path": "src/cache/stats.rs", "rank": 30, "score": 10.859020455107002 }, { "content": "use std::io::{self, Read, Seek, SeekFrom};\n\n\n\nuse anyhow::anyhow;\n\n\n\nuse super::FileManager;\n\n\n\n/// Cursor that allows the to Read/Seek through a [`FileManager`].\n\n///\n\n/// Blocks if bytes are read that were not yet downloaded\n\n/// Fails if bytes are read that were not scheduled for downloading\n\n#[derive(Clone)]\n\npub struct CacheCursor {\n\n pub cache: FileManager,\n\n pub position: u64,\n\n}\n\n\n\nimpl Read for CacheCursor {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n if self.position == self.cache.get_file_size() {\n\n return Ok(0);\n", "file_path": "src/cache/cursor.rs", "rank": 32, "score": 10.638640585934107 }, { "content": " impl CacheStats {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n inner: Arc::new(InnerCacheStats {\n\n downloads: Mutex::new(vec![]),\n\n }),\n\n }\n\n }\n\n\n\n pub fn recorded_downloads(&self) -> Vec<DownloadStat> {\n\n let guard = self.inner.downloads.lock().unwrap();\n\n guard.clone()\n\n }\n\n\n\n pub(crate) fn record_download(&self, dl: DownloadStat) {\n\n let mut guard = self.inner.downloads.lock().unwrap();\n\n guard.push(dl);\n\n }\n\n }\n\n}\n", "file_path": "src/cache/stats.rs", "rank": 36, "score": 9.285300114561458 }, { "content": " cache: file_manager,\n\n position: 0,\n\n };\n\n\n\n let target = pattern(0, file_size as usize);\n\n // perform blocking read in separate thread!\n\n let result = tokio::task::spawn_blocking(move || -> io::Result<Vec<u8>> {\n\n let mut content = vec![];\n\n // we try to read the whole file\n\n cursor.read_to_end(&mut content)?;\n\n Ok(content)\n\n })\n\n .await\n\n .unwrap()\n\n .expect(&format!(\n\n \"Could not read bytes bytes [0:{len}[ in download [0:{len}[\",\n\n len = file_size,\n\n ));\n\n assert_eq!(result, target);\n\n\n", "file_path": "src/cache/cursor.rs", "rank": 37, "score": 9.255917967447855 }, { "content": " return Ok(n);\n\n }\n\n SeekFrom::End(n) => (self.cache.get_file_size(), n),\n\n SeekFrom::Current(n) => (self.position, n),\n\n };\n\n let new_pos = if offset >= 0 {\n\n base_pos.checked_add(offset as u64)\n\n } else {\n\n base_pos.checked_sub((offset.wrapping_neg()) as u64)\n\n };\n\n match new_pos {\n\n Some(n) => {\n\n self.position = n;\n\n Ok(self.position)\n\n }\n\n None => Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n anyhow!(\"invalid seek to a negative or overflowing position\"),\n\n )),\n\n }\n", "file_path": "src/cache/cursor.rs", "rank": 38, "score": 8.833679082587128 }, { "content": " #[cfg(not(feature = \"stats\"))]\n\n async fn test_noop() {\n\n let stats = CacheStats::new();\n\n for d in mock_data() {\n\n stats.record_download(d);\n\n }\n\n let result = stats.recorded_downloads();\n\n assert_eq!(result.len(), 0)\n\n }\n\n\n\n fn mock_data() -> Vec<DownloadStat> {\n\n vec![\n\n DownloadStat {\n\n dl_duration: 231,\n\n dl_start: 4564,\n\n size: 10000000,\n\n },\n\n DownloadStat {\n\n dl_duration: 123,\n\n dl_start: 6547,\n\n size: 10000000,\n\n },\n\n ]\n\n }\n\n}\n", "file_path": "src/cache/stats.rs", "rank": 39, "score": 8.623450234918414 }, { "content": " Self { length }\n\n }\n\n}\n\n\n\nimpl FileDescription for PatternFileDescription {\n\n fn get_downloader(&self) -> Arc<dyn Downloader> {\n\n Arc::new(PatternDownloader)\n\n }\n\n\n\n fn get_downloader_id(&self) -> String {\n\n \"pattern_downloader\".to_owned()\n\n }\n\n\n\n fn get_uri(&self) -> String {\n\n \"pattern_uri\".to_owned()\n\n }\n\n\n\n fn get_file_size(&self) -> u64 {\n\n self.length\n\n }\n\n}\n\n\n", "file_path": "src/cache/mock.rs", "rank": 41, "score": 7.873804320538786 }, { "content": "\n\n#[allow(dead_code)]\n\npub mod noop {\n\n use super::DownloadStat;\n\n\n\n /// A structure that does noop when called to collect stats.\n\n ///\n\n /// Calls to on this structure will mostly be optimized out by the compiler.\n\n #[derive(Clone)]\n\n pub struct CacheStats;\n\n\n\n impl CacheStats {\n\n pub(crate) fn new() -> Self {\n\n Self\n\n }\n\n\n\n pub fn recorded_downloads(&self) -> Vec<DownloadStat> {\n\n vec![]\n\n }\n\n\n", "file_path": "src/cache/stats.rs", "rank": 42, "score": 7.754221970600429 }, { "content": "-- Start=0000000000 Status=Done(100 bytes)\n\n-- Start=0000000100 Status=Done(100 bytes)\n\n-- Start=0000000200 Status=Done(100 bytes)\"\n\n );\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_read_complete_file() {\n\n let file_size = 100;\n\n let (download_cache, file_manager) = init_mock(file_size).await;\n\n\n\n // we schedule the download of the whole file\n\n file_manager\n\n .queue_download(vec![Range {\n\n start: 0,\n\n length: file_size as usize,\n\n }])\n\n .expect(\"Could not queue Range on handle\");\n\n\n\n let mut cursor = CacheCursor {\n", "file_path": "src/cache/cursor.rs", "rank": 43, "score": 7.717175464496954 }, { "content": " pub fn set_downloader_factory(&mut self, factory: DownloaderFactory) {\n\n self.downloader_factory = factory;\n\n }\n\n}\n\n\n\nimpl FileDescription for S3FileDescription {\n\n fn get_downloader(&self) -> Arc<dyn Downloader> {\n\n (self.downloader_factory)(&self.region)\n\n }\n\n\n\n fn get_downloader_id(&self) -> String {\n\n format!(\"s3_rusoto:{}\", &self.region)\n\n }\n\n\n\n fn get_uri(&self) -> String {\n\n format!(\"{}/{}\", self.bucket, self.key)\n\n }\n\n\n\n fn get_file_size(&self) -> u64 {\n\n self.size\n\n }\n\n}\n\n\n", "file_path": "src/s3_rusoto/file_description.rs", "rank": 44, "score": 7.1509586450538745 }, { "content": "-- Start=0000000200 Status=Pending(100 bytes)\"\n\n );\n\n\n\n // try reading after of downloaded range\n\n cursor\n\n .seek(SeekFrom::Start(280))\n\n .expect(\"Cursor could not be moved\");\n\n let err_msg = assert_cursor(&cursor, pattern(0, 50))\n\n .await\n\n .expect_err(\"Read of [280:320[ should fail, only [200,300[ was downloaded\")\n\n .to_string();\n\n assert_eq!(\n\n err_msg,\n\n \"\\\n\nDownload not scheduled at position 300, scheduled ranges are:\n\n-- Start=0000000200 Status=Done(100 bytes)\"\n\n );\n\n }\n\n\n\n #[tokio::test]\n", "file_path": "src/cache/cursor.rs", "rank": 45, "score": 7.041952365065509 }, { "content": "use super::super::{Downloader, FileDescription};\n\nuse anyhow::{anyhow, Result};\n\nuse async_trait::async_trait;\n\nuse std::sync::Arc;\n\n\n\nuse tokio::time::Duration;\n\n\n\n/// A downloader that returns a simple pattern (1,2,3...254,255,1,2...)\n\n/// Waits for 10ms before returning its result to trigger cache misses\n", "file_path": "src/cache/mock.rs", "rank": 46, "score": 6.903894322400914 }, { "content": "pub use sync::CacheStats;\n\n\n\n#[allow(dead_code)]\n\npub mod sync {\n\n use super::DownloadStat;\n\n use std::sync::{Arc, Mutex};\n\n\n\n struct InnerCacheStats {\n\n downloads: Mutex<Vec<DownloadStat>>,\n\n }\n\n\n\n /// Reference to a synchronized container with inner statistics of the caching system.\n\n ///\n\n /// You can get these stats from the [`DownloadCache`](super::super::DownloadCache) directly.\n\n /// Clonig this object only clones the reference.\n\n #[derive(Clone)]\n\n pub struct CacheStats {\n\n inner: Arc<InnerCacheStats>,\n\n }\n\n\n", "file_path": "src/cache/stats.rs", "rank": 48, "score": 6.6118153862566285 }, { "content": "//! The stats module has two implementations that you can switch with the `stats` feature:\n\n//! - the `sync` module is a shared reference to synchronized statistics collectors\n\n//! - the `noop` module is an empty structure that does not collect any statistics\n\n\n\nuse serde::Serialize;\n\n\n\n/// KPIs relative to one specific range download\n\n#[derive(Clone, Serialize)]\n\npub struct DownloadStat {\n\n // range size in bytes\n\n pub size: u64,\n\n /// ms spent inside download function\n\n pub dl_duration: u64,\n\n /// ms elapsed since registering\n\n pub dl_start: u64,\n\n}\n\n\n\n#[cfg(not(feature = \"stats\"))]\n\npub use noop::CacheStats;\n\n#[cfg(feature = \"stats\")]\n", "file_path": "src/cache/stats.rs", "rank": 49, "score": 6.485771552041668 }, { "content": " // check the debug output for the cache\n\n assert_eq!(\n\n format!(\"{:?}\", download_cache),\n\n \"\\\n\nfile = pattern_downloader / pattern_uri:\n\n-- Start=0000000000 Status=Done(100 bytes)\"\n\n );\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_read_uninit() {\n\n let (_, file_manager) = init_mock(1000).await;\n\n\n\n let mut cursor = CacheCursor {\n\n cache: file_manager.clone(),\n\n position: 0,\n\n };\n\n\n\n // try reading whithout starting any download\n\n let err_msg = assert_cursor(&cursor, pattern(0, 50))\n", "file_path": "src/cache/cursor.rs", "rank": 50, "score": 5.8582336259571095 }, { "content": "file = pattern_downloader / pattern_uri:\n\n-- Start=0000000000 Status=Done(100 bytes)\"\n\n );\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_read_accross_ranges() {\n\n let (download_cache, file_manager) = init_mock(1000).await;\n\n\n\n file_manager\n\n .queue_download(vec![\n\n Range {\n\n start: 200,\n\n length: 100,\n\n },\n\n Range {\n\n start: 0,\n\n length: 100,\n\n },\n\n Range {\n", "file_path": "src/cache/cursor.rs", "rank": 51, "score": 5.501665488483848 }, { "content": "use std::str::FromStr;\n\nuse std::sync::Arc;\n\n\n\nuse super::S3Downloader;\n\nuse crate::{Downloader, FileDescription};\n\n\n\nuse rusoto_core::Region;\n\nuse rusoto_s3::S3Client;\n\n\n", "file_path": "src/s3_rusoto/file_description.rs", "rank": 52, "score": 5.253599702680495 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse serde::Deserialize;\n\n\n\n/// Connection to a server that allows to download chunks of files\n\n#[async_trait]\n", "file_path": "src/cache/models.rs", "rank": 53, "score": 5.133833358947118 }, { "content": "//! Optimized and flexible helpers for reading large data files from cloud storages (or more generally, from the network).\n\n//!\n\n//! To get started, create an instance of `DownloadCache` and start registering `FileDescription` trait objects.\n\n\n\nmod cache;\n\npub use cache::*;\n\n\n\n#[cfg(feature = \"s3_rusoto\")]\n\npub mod s3_rusoto;\n", "file_path": "src/lib.rs", "rank": 54, "score": 4.917701686478408 }, { "content": " let mut content = vec![0u8; target_length];\n\n cursor.read_exact(&mut content)?;\n\n Ok(content)\n\n })\n\n .await\n\n .unwrap()?;\n\n assert_eq!(result, target);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cache/cursor.rs", "rank": 55, "score": 4.831788915809531 }, { "content": "mod cursor;\n\nmod download_cache;\n\nmod file_manager;\n\nmod mock;\n\nmod models;\n\nmod stats;\n\n\n\npub use cursor::CacheCursor;\n\npub use download_cache::DownloadCache;\n\npub use file_manager::FileManager;\n\npub use models::*;\n\npub use stats::{CacheStats, DownloadStat};\n", "file_path": "src/cache/mod.rs", "rank": 56, "score": 4.748978773332439 }, { "content": "//! [`FileDescription`](`crate::FileDescription`) implementation for downloading from S3 using Rusoto\n\n\n\nmod downloader;\n\nmod file_description;\n\n\n\nuse downloader::S3Downloader;\n\npub use file_description::S3FileDescription;\n", "file_path": "src/s3_rusoto/mod.rs", "rank": 57, "score": 4.371164331221088 }, { "content": " file_description.set_downloader_factory(Box::new(|_| {\n\n let client = S3Client::new_with(\n\n MockRequestDispatcher::default()\n\n .with_body(\"\")\n\n .with_request_checker(|request: &SignedRequest| {\n\n assert_eq!(request.path, \"/test_bucket/test_key\");\n\n assert_eq!(\n\n std::str::from_utf8(&request.headers.get(\"range\").unwrap()[0]).unwrap(),\n\n \"bytes=50-149\"\n\n );\n\n }),\n\n MockCredentialsProvider,\n\n Default::default(),\n\n );\n\n Arc::new(S3Downloader::new(client))\n\n }));\n\n\n\n #[allow(unused_must_use)]\n\n {\n\n file_description\n\n .get_downloader()\n\n .download(file_description.get_uri(), 50, 100)\n\n .await;\n\n }\n\n }\n\n}\n", "file_path": "src/s3_rusoto/file_description.rs", "rank": 58, "score": 4.365753906086436 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::{mock::*, DownloadCache, Range};\n\n use super::*;\n\n\n\n use std::io::{self, Read, Seek, SeekFrom};\n\n\n\n #[tokio::test]\n\n async fn test_read_within_range() {\n\n let (download_cache, file_manager) = init_mock(1000).await;\n\n\n\n file_manager\n\n .queue_download(vec![Range {\n\n start: 0,\n\n length: 100,\n\n }])\n\n .expect(\"Could not queue Range on handle\");\n", "file_path": "src/cache/cursor.rs", "rank": 59, "score": 4.2687405505020415 }, { "content": "\n\n let mut cursor = CacheCursor {\n\n cache: file_manager,\n\n position: 0,\n\n };\n\n\n\n assert_cursor(&cursor, pattern(0, 50))\n\n .await\n\n .expect(\"could not read enough bytes [0:50[ in download [0:100[\");\n\n cursor\n\n .seek(SeekFrom::Start(20))\n\n .expect(\"Cursor could not be moved\");\n\n assert_cursor(&cursor, pattern(20, 70))\n\n .await\n\n .expect(\"Could not read bytes bytes [20:70[ in download [0:100[\");\n\n\n\n // check the debug output for the cache\n\n assert_eq!(\n\n format!(\"{:?}\", download_cache),\n\n \"\\\n", "file_path": "src/cache/cursor.rs", "rank": 60, "score": 4.234722579987321 }, { "content": "## Proposed solution\n\n\n\nThe proposed solution is to introduce a dedicated data structure that takes care of scheduling the downloads and caching the results, while providing a blocking API that implements the `std::io::Read` trait. The scheduling strategy remains customizable to adapt to different types of reads (chunks of a Parquet file will be read in a different order thant those of a CSV file) and different infrastructures (Cloud storage will behave differently from an on-premise HDFS). But most of the caching and synchronization mechanismes will be common to all the use cases.\n\n\n\nThe proposed strategy is the following one:\n\n\n\n![Solution flow generic](https://raw.githubusercontent.com/wiki/cloudfuse-io/cloud-readers-rs/solution_flow_generic.jpg)\n\n\n\nIf we apply it to the specific usecase of [Buzz](https://github.com/cloudfuse-io/buzz-rust), it boils down to this:\n\n\n\n![Solution flow buzz](https://raw.githubusercontent.com/wiki/cloudfuse-io/cloud-readers-rs/solution_flow_buzz.jpg)\n\n\n\n## Using the library\n\n\n\nThe library has a `stats` feature flag that allows to control whether it should collect execution statistics or not.\n", "file_path": "README.md", "rank": 61, "score": 3.947601724587573 }, { "content": "/// Fully specified description of a downloadable file\n\n///\n\n/// To make it fully generic, a FileDescription provides its own downloader.\n\n/// It also provides a downloader_id to allow the [`DownloadCache`](super::DownloadCache) to cache the downloader.\n\n/// It is not the responsability of the FileDescription to cache the downloader.\n\n/// Downloaders that will most efficiently be re-usable for a given set of files should be assigned the same id.\n\n/// For instance for AWS S3, this will be by region as the domain will be the same, which allows full re-use of\n\n/// the SSL connection (TODO: verify if it is not actually by bucket).\n\npub trait FileDescription: Send {\n\n fn get_downloader(&self) -> Arc<dyn Downloader>;\n\n\n\n fn get_downloader_id(&self) -> String;\n\n\n\n fn get_uri(&self) -> String;\n\n\n\n fn get_file_size(&self) -> u64;\n\n}\n", "file_path": "src/cache/models.rs", "rank": 62, "score": 3.8157021417133747 }, { "content": "# :cloud: Rust Cloud Readers :cloud:\n\n\n\n[![License: MIT](https://img.shields.io/badge/License-MIT-green.svg)](LICENSE)\n\n![Master](https://github.com/cloudfuse-io/cloud-readers-rs/actions/workflows/tests.yml/badge.svg)\n\n[![codecov](https://codecov.io/gh/cloudfuse-io/cloud-readers-rs/branch/master/graph/badge.svg?token=PUJFCQUNY3)](https://codecov.io/gh/cloudfuse-io/cloud-readers-rs)\n\n\n\nOptimized and flexible helpers for reading large data files from cloud storages (or more generally, from the network).\n\n\n\n## Context\n\n\n\nMost client libraries to read from the network in Rust are async. One example is the AWS S3 client library. On the other hand, data processing systems prefer to read data using the `std::io::Read` trait that is blocking, partly because it is more performant for large files, partly beacause many data processing libraries such as protobuf use that trait in their interfaces. In that case, the data processing flow looks like this:\n\n\n\n![General flow generic](https://raw.githubusercontent.com/wiki/cloudfuse-io/cloud-readers-rs/general_flow_generic.jpg)\n\n\n\nIf we apply this to the example of [Buzz](https://github.com/cloudfuse-io/buzz-rust), we get the following flow:\n\n\n\n![General flow buzz](https://raw.githubusercontent.com/wiki/cloudfuse-io/cloud-readers-rs/general_flow_buzz.jpg)\n\n\n\nAdditionally, the strategy to fetch data from the network will be different from reading from a high bandwith hard drive. When reading local data, the bottleneck might be the CPU and not the disk bandwidth, and even if that is not the case, you will usually not get much benefit from reading multiple chunks in parallel as one read stream will already fully utilizes the bandwidth of the disk (this might not be completely true when multiple drives are attached). The situation is completely different when reading from the network. Each read will usually be throttled on the data provider side. When using AWS S3 for instance, you get a much better overall bandwidth if you read 8 chunks in parallel (even for a single file). This means that it is worth decoupling the read strategy from the processing by eagerly downloading and caching in memory for future processings.\n\n\n", "file_path": "README.md", "rank": 63, "score": 3.7891082396953326 }, { "content": " start: 100,\n\n length: 100,\n\n },\n\n ])\n\n .expect(\"Could not queue Range on handle\");\n\n\n\n let cursor = CacheCursor {\n\n cache: file_manager,\n\n position: 0,\n\n };\n\n\n\n assert_cursor(&cursor, pattern(0, 300))\n\n .await\n\n .expect(\"Could not read bytes bytes [0:300[ in download [0:100[+[100:200[+[200:300[\");\n\n\n\n // check the debug output for the cache\n\n assert_eq!(\n\n format!(\"{:?}\", download_cache),\n\n \"\\\n\nfile = pattern_downloader / pattern_uri:\n", "file_path": "src/cache/cursor.rs", "rank": 64, "score": 3.7327737049963927 }, { "content": "# Notebook folder\n\n\n\nThis folder contains the results of some performance tests conducted on the library when using cloud storage. To make them easily readeable and reproduceable, we used Jupyter notebooks and Docker.\n\n\n\n## Deploying the infrastructure\n\n\n\nTo be able to run the tests with a cloud storage, you first need to deploy the benchmarking executable otherwise you would mainly be measuring your network latency to the cloud.\n\n\n\nTo run the notebooks, install Jupyter and run `jupyter lab` at the root of the project.\n\n\n\nThe notebook [`infra.ipynb`](infra.ipynb) helps you deploy an AWS Lambda function into one of your AWS accounts. You just need your credentials to be configured in the default AWS credentials file and change the `AWS_PROFILE` environment variable before starting jupyter to match your setup.\n\n\n\nWith the same notbook, you can also tear down the infrastructure that was deployed previously. The Terraform state is stored on a local docker volume. If you delete it before running `terraform destroy`, you won't be able to tear down the infrastructure automatically any more.\n\n\n\n## Running the benchmarks\n\n\n\nYou can run the tests by calling the AWS Lambda function deployed previously. \n\n\n\nNote that if you want to run tests on files that are not publicly exposed, you will need to provide the appropriate policy to the Lambda function in the Terraform script.\n\n\n", "file_path": "notebooks/README.md", "rank": 66, "score": 3.1375168109905305 }, { "content": " .await\n\n .expect_err(\"Read should fail if no download was scheduled\")\n\n .to_string();\n\n assert_eq!(err_msg, \"No download scheduled\");\n\n\n\n // try reading before of downloaded range\n\n file_manager\n\n .queue_download(vec![Range {\n\n start: 200,\n\n length: 100,\n\n }])\n\n .expect(\"Could not queue Range on handle\");\n\n let err_msg = assert_cursor(&cursor, pattern(0, 50))\n\n .await\n\n .expect_err(\"Read of [0:50[ should fail, only [200,300[ was downloaded\")\n\n .to_string();\n\n assert_eq!(\n\n err_msg,\n\n \"\\\n\nDownload not scheduled at position 0, scheduled ranges are:\n", "file_path": "src/cache/cursor.rs", "rank": 68, "score": 1.6394254704938664 }, { "content": " async fn test_seek() {\n\n let mut cursor = CacheCursor {\n\n cache: init_mock(100).await.1,\n\n position: 0,\n\n };\n\n\n\n cursor.seek(SeekFrom::Start(10)).unwrap();\n\n\n\n assert_eq!(cursor.position, 10);\n\n\n\n cursor.seek(SeekFrom::Current(10)).unwrap();\n\n\n\n assert_eq!(cursor.position, 20);\n\n\n\n cursor.seek(SeekFrom::End(-10)).unwrap();\n\n\n\n assert_eq!(cursor.position, 90);\n\n }\n\n\n\n //// Test Fixtures: ////\n", "file_path": "src/cache/cursor.rs", "rank": 69, "score": 1.6275246397173198 } ]
Rust
src/asset/tileset.rs
B-Reif/bevy_asefile
51d8abfc51aa9e406dad2e5cad4b93c22e7b3fc6
use asefile::{AsepriteFile, TilesetImageError}; use bevy::{ prelude::*, reflect::TypeUuid, render::texture::{Extent3d, TextureDimension, TextureFormat}, }; use std::fmt; pub(crate) type TilesetResult<T> = std::result::Result<T, TilesetError>; #[derive(Debug)] pub enum TilesetError { MissingId(asefile::TilesetId), NoPixels(asefile::TilesetId), } impl fmt::Display for TilesetError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TilesetError::MissingId(tileset_id) => { write!(f, "No tileset found with id: {}", tileset_id) } TilesetError::NoPixels(tileset_id) => { write!(f, "No pixel data for tileset with id: {}", tileset_id) } } } } impl From<&TilesetImageError> for TilesetError { fn from(e: &TilesetImageError) -> Self { match e { TilesetImageError::MissingTilesetId(id) => Self::MissingId(*id), TilesetImageError::NoPixelsInTileset(id) => Self::NoPixels(*id), } } } impl From<TilesetImageError> for TilesetError { fn from(e: TilesetImageError) -> Self { Self::from(&e) } } fn texture_from(ase: &AsepriteFile, tileset: &asefile::Tileset) -> TilesetResult<Texture> { let tileset_id = tileset.id(); let image = ase.tileset_image(&tileset_id)?; let size = Extent3d { width: image.width(), height: image.height(), depth: 1, }; Ok(Texture::new_fill( size, TextureDimension::D2, image.as_raw(), TextureFormat::Rgba8UnormSrgb, )) } #[derive(Debug)] pub struct TileSize { pub width: u16, pub height: u16, } impl TileSize { fn from_ase(ase_size: &asefile::TileSize) -> Self { Self { width: *ase_size.width(), height: *ase_size.height(), } } } #[derive(Debug, TypeUuid)] #[uuid = "0e2dbd05-dbad-46c9-a943-395f83dfa4ba"] pub struct Tileset { pub tile_count: u32, pub tile_size: TileSize, pub name: String, pub texture: Handle<Texture>, } impl Tileset { pub fn texture_size(&self) -> Vec2 { let TileSize { width, height } = self.tile_size; let tile_count = self.tile_count as f32; Vec2::new(width as f32, height as f32 * tile_count) } } #[derive(Debug)] pub(crate) struct TilesetData<T> { pub(crate) tile_count: u32, pub(crate) tile_size: TileSize, pub(crate) name: String, pub(crate) texture: T, } impl<T> TilesetData<T> { fn from_ase<F>(f: F, ase: &AsepriteFile, ase_tileset: &asefile::Tileset) -> TilesetResult<Self> where F: FnOnce(&AsepriteFile, &asefile::Tileset) -> TilesetResult<T>, { let texture = f(ase, ase_tileset)?; let ase_size = ase_tileset.tile_size(); Ok(Self { tile_count: *ase_tileset.tile_count(), tile_size: TileSize::from_ase(ase_size), name: ase_tileset.name().to_string(), texture, }) } } impl TilesetData<Texture> { pub(crate) fn from_ase_with_texture( ase: &AsepriteFile, ase_tileset: &asefile::Tileset, ) -> TilesetResult<Self> { TilesetData::<Texture>::from_ase(texture_from, ase, ase_tileset) } }
use asefile::{AsepriteFile, TilesetImageError}; use bevy::{ prelude::*, reflect::T
width, height } = self.tile_size; let tile_count = self.tile_count as f32; Vec2::new(width as f32, height as f32 * tile_count) } } #[derive(Debug)] pub(crate) struct TilesetData<T> { pub(crate) tile_count: u32, pub(crate) tile_size: TileSize, pub(crate) name: String, pub(crate) texture: T, } impl<T> TilesetData<T> { fn from_ase<F>(f: F, ase: &AsepriteFile, ase_tileset: &asefile::Tileset) -> TilesetResult<Self> where F: FnOnce(&AsepriteFile, &asefile::Tileset) -> TilesetResult<T>, { let texture = f(ase, ase_tileset)?; let ase_size = ase_tileset.tile_size(); Ok(Self { tile_count: *ase_tileset.tile_count(), tile_size: TileSize::from_ase(ase_size), name: ase_tileset.name().to_string(), texture, }) } } impl TilesetData<Texture> { pub(crate) fn from_ase_with_texture( ase: &AsepriteFile, ase_tileset: &asefile::Tileset, ) -> TilesetResult<Self> { TilesetData::<Texture>::from_ase(texture_from, ase, ase_tileset) } }
ypeUuid, render::texture::{Extent3d, TextureDimension, TextureFormat}, }; use std::fmt; pub(crate) type TilesetResult<T> = std::result::Result<T, TilesetError>; #[derive(Debug)] pub enum TilesetError { MissingId(asefile::TilesetId), NoPixels(asefile::TilesetId), } impl fmt::Display for TilesetError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TilesetError::MissingId(tileset_id) => { write!(f, "No tileset found with id: {}", tileset_id) } TilesetError::NoPixels(tileset_id) => { write!(f, "No pixel data for tileset with id: {}", tileset_id) } } } } impl From<&TilesetImageError> for TilesetError { fn from(e: &TilesetImageError) -> Self { match e { TilesetImageError::MissingTilesetId(id) => Self::MissingId(*id), TilesetImageError::NoPixelsInTileset(id) => Self::NoPixels(*id), } } } impl From<TilesetImageError> for TilesetError { fn from(e: TilesetImageError) -> Self { Self::from(&e) } } fn texture_from(ase: &AsepriteFile, tileset: &asefile::Tileset) -> TilesetResult<Texture> { let tileset_id = tileset.id(); let image = ase.tileset_image(&tileset_id)?; let size = Extent3d { width: image.width(), height: image.height(), depth: 1, }; Ok(Texture::new_fill( size, TextureDimension::D2, image.as_raw(), TextureFormat::Rgba8UnormSrgb, )) } #[derive(Debug)] pub struct TileSize { pub width: u16, pub height: u16, } impl TileSize { fn from_ase(ase_size: &asefile::TileSize) -> Self { Self { width: *ase_size.width(), height: *ase_size.height(), } } } #[derive(Debug, TypeUuid)] #[uuid = "0e2dbd05-dbad-46c9-a943-395f83dfa4ba"] pub struct Tileset { pub tile_count: u32, pub tile_size: TileSize, pub name: String, pub texture: Handle<Texture>, } impl Tileset { pub fn texture_size(&self) -> Vec2 { let TileSize {
random
[ { "content": "use crate::asset::{TileSize, Tileset};\n\nuse bevy::math::{UVec2, Vec2};\n\nuse bevy_ecs_tilemap::prelude::*;\n\n\n\nimpl From<TileSize> for Vec2 {\n\n fn from(tile_size: TileSize) -> Self {\n\n Vec2::new(tile_size.width as f32, tile_size.height as f32)\n\n }\n\n}\n\n\n\nimpl Tileset {\n\n /// Creates new [LayerSettings] using the [Tileset's](Tileset) own tile size and texture size.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// #[cfg(feature = \"bevy_ecs_tilemap\")]\n\n /// use bevy_ase::asset::Tileset;\n\n /// use bevy_ecs_tilemap::LayerSettings;\n\n /// use bevy::math::UVec2;\n", "file_path": "src/bevy_ecs_tilemap.rs", "rank": 0, "score": 22379.489292726226 }, { "content": " ///\n\n /// // Create new layer settings from a tileset, with specified map size and chunk size.\n\n /// fn my_layer_settings(tileset: &Tileset) -> LayerSettings {\n\n /// let map_size = UVec2::new(30, 30);\n\n /// let chunk_size = UVec2::new(15, 15);\n\n /// tileset.layer_settings(map_size, chunk_size) \n\n /// }\n\n /// ```\n\n pub fn layer_settings(&self, map_size: UVec2, chunk_size: UVec2) -> LayerSettings {\n\n LayerSettings::new(\n\n map_size,\n\n chunk_size,\n\n Vec2::new(self.tile_size.width as f32, self.tile_size.height as f32),\n\n self.texture_size(),\n\n )\n\n }\n\n}\n", "file_path": "src/bevy_ecs_tilemap.rs", "rank": 1, "score": 22372.745362086793 }, { "content": "Utilities for loading Aseprite files into a Bevy application.\n\n\n\nProvides an AssetLoader struct which directly\n\nreads .aseprite files without an intermediate import step.\n\nThe loader adds Resources generated by the files' data.\n\nSystems can invoke the loader's methods to start loading files,\n\nor query the loading state.\n\n\n\n# Resources\n\n\n\nThis library creates several types of resources:\n\n\n\n- Texture data, which contains the file's images.\n\n- TextureAtlas data, which contains mapping information for each sprite in a spritesheet.\n\n- Animation data.\n\n- Slice data.\n\n- Tileset data (from files created in Aseprite v1.3 beta).\n\n\n\n# Configuration\n\n\n\nThis library exposes AseLoaderDefaultPlugin with default settings.\n\nThis plugin initializes all of the above resources as Asset types, adds Loader and AseAssetLoader resources,\n\nand adds an importer system function to process loaded ase data.\n\n\n\nFor a custom configuration, import the constituent parts and add them to AppBuilder directly.\n\nThe Texture resource is required to be initialized. Other asset types are optional.\n\n\n\n# Examples\n\n\n\n```rs\n\nuse bevy::prelude::*;\n\nuse bevy_ase::asset::AseAsset;\n\nuse bevy_ase::loader::{AseLoaderDefaultPlugin, Loader};\n\nuse std::path::Path;\n\n\n\n// Initialize and run a bevy app with the default bevy_ase configuration.\n\nfn main() {\n\n App::build()\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(AseLoaderDefaultPlugin)\n\n .add_system(load_sprites.system());\n\n}\n\n\n\n// Get an aseprite asset and send it to the loader.\n\npub fn load_sprites(asset_server: Res<AssetServer>, mut loader: ResMut<Loader>) {\n\n let h: Handle<AseAsset> = asset_server.load(Path::new(\"sprites/hello.aseprite\"));\n\n loader.add(h.clone());\n\n}\n\n```\n\nSee the documentation for lib.rs for more information.\n\n\n\nForked from https://github.com/alpine-alpaca/bevy_proto_aseprite.\n\n\n\n# Example\n\n\n\n```\n\ncargo run --example animated --features=\"benimator\"\n\n```\n\n\n\n# TODOs\n\n\n\n- Improve error handling.\n\n\n\n- Atlas creation fails if there are too many / too big sprites.\n\n\n\n- Hot reloading. This requires dynamic atlas reconstruction.\n", "file_path": "README.md", "rank": 2, "score": 14308.387235882661 }, { "content": "use std::path::Path;\n\n\n\nuse bevy::{input::system::exit_on_esc_system, prelude::*};\n\nuse bevy_ase::{\n\n self,\n\n asset::{AseAsset, Tileset},\n\n loader,\n\n loader::Loader,\n\n};\n\nuse bevy_ecs_tilemap::prelude::*;\n\n\n", "file_path": "examples/tilemap/main.rs", "rank": 3, "score": 8.295252508764376 }, { "content": "use std::path::Path;\n\n\n\nuse bevy::{input::system::exit_on_esc_system, prelude::*, sprite::entity::SpriteSheetBundle};\n\nuse bevy_ase::{\n\n self,\n\n asset::{Animation, AseAsset},\n\n loader::{self, Loader},\n\n};\n\n\n", "file_path": "examples/animated/main.rs", "rank": 4, "score": 7.205513123270655 }, { "content": "/// The default AseAssetLoader loads files with extensions \"aseprite\" and \"ase\".\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::loader::AseAssetLoader;\n\n///\n\n/// fn build(app: &mut AppBuilder) {\n\n/// app.init_asset_loader::<AseAssetLoader>();\n\n/// }\n\n/// ```\n\n/// ## Custom extensions\n\n/// The AseAssetLoader can be instantiated with a custom set of targeted file extensions.\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::loader::AseAssetLoader;\n\n///\n\n/// fn build(app: &mut AppBuilder) {\n\n/// let my_loader = AseAssetLoader {\n\n/// extensions: &[\"aseprite\", \"my_custom_extension\"]\n\n/// };\n\n/// app.add_asset_loader(my_loader);\n", "file_path": "src/loader.rs", "rank": 5, "score": 6.620532200948027 }, { "content": "impl Default for Loader {\n\n fn default() -> Self {\n\n Self {\n\n todo_handles: Vec::new(),\n\n in_progress: Arc::new(AtomicU32::new(0)),\n\n done: Arc::new(Mutex::new(Vec::new())),\n\n }\n\n }\n\n}\n\n\n\nimpl Loader {\n\n /// Adds an [AseAsset] to the [Loader] for loading.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use bevy::prelude::*;\n\n /// use bevy_ase::asset::AseAsset;\n\n /// use bevy_ase::loader::Loader;\n\n /// use std::path::Path;\n", "file_path": "src/loader.rs", "rank": 6, "score": 6.535257345469066 }, { "content": "use crate::asset::{\n\n animation::{Animation, AnimationData, Frame, Sprite, SpriteData},\n\n slice::Slice,\n\n tileset::{TilesetData, TilesetResult},\n\n AseAssetMap, Tileset,\n\n};\n\nuse crate::loader::AseAssetResources;\n\nuse asefile::AsepriteFile;\n\nuse bevy::sprite::TextureAtlasBuilder;\n\nuse bevy::{prelude::*, utils::HashMap};\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "src/processing.rs", "rank": 7, "score": 6.41817585559359 }, { "content": "/// System function for moving loaded Aseprite assets into Resoures.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::loader::ase_importer;\n\n///\n\n/// // Creates a Bevy app and adds the ase_importer system.\n\n/// // This system is already added by default in AseLoaderPlugin.\n\n/// fn app() {\n\n/// App::build().add_system(ase_importer.system());\n\n/// }\n\n/// ```\n\npub fn ase_importer(\n\n mut loader: ResMut<Loader>,\n\n task_pool: ResMut<AsyncComputeTaskPool>,\n\n mut aseassets: ResMut<Assets<AseAsset>>,\n\n asset_server: Res<AssetServer>,\n\n resources: AseAssetResources,\n\n) {\n\n let pending = loader.pending_count();\n\n if pending > 0 {\n\n debug!(\"Processing asefiles (batches: {})\", pending);\n\n }\n\n if loader.all_todo_handles_ready(&asset_server) {\n\n loader.spawn_tasks(&task_pool, &mut aseassets);\n\n }\n\n loader.move_finished_into_resources(resources);\n\n}\n", "file_path": "src/loader.rs", "rank": 8, "score": 6.37311418217673 }, { "content": "use asefile::AsepriteFile;\n\nuse bevy::reflect::TypeUuid;\n\nuse std::path::PathBuf;\n\n\n\n/// Handle type for ase assets.\n\n///\n\n/// [crate::loader::Loader] processes [AseAsset] instances and stores their data\n\n/// as various other data types in bevy's Assets resources.\n\n///\n\n/// Once an AseAsset has been processed into other resource types, its data is dropped.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::asset::AseAsset;\n\n///\n\n/// // Convert an untyped handle into an AseAsset handle.\n\n/// pub fn to_typed(handle: HandleUntyped) -> Handle<AseAsset> {\n\n/// handle.clone().typed::<AseAsset>()\n", "file_path": "src/asset/ase.rs", "rank": 9, "score": 6.35858227759487 }, { "content": "//! Index for assets created by this library.\n\nuse super::{animation::Animation, slice::Slice, tileset::Tileset};\n\nuse bevy::utils::HashMap;\n\nuse bevy::{asset::Asset, prelude::*};\n\nuse std::path::{Path, PathBuf};\n\n\n\n/// Provides a map to [Handles](Handle) for an Ase file's assets.\n\n///\n\n/// Instances of this type are owned by [AseFileMap]. To access them during runtime,\n\n/// use the AseFileMap as a system parameter, and index each AseAssetMap by\n\n/// using the file's path as a key.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use bevy_ase::asset::{AseAssetMap, Animation, Tileset};\n\n/// use bevy::asset::Handle;\n\n/// // Get all animations in this file with the name \"foo\".\n\n/// fn get_foo_animations(ase_asset_map: &AseAssetMap) -> Option<&Vec<Handle<Animation>>> {\n\n/// ase_asset_map.animations(\"foo\")\n", "file_path": "src/asset/asset_index.rs", "rank": 10, "score": 5.950249292863557 }, { "content": "use crate::asset::asset_index::AseFileMap;\n\nuse crate::asset::{ase::AseData, slice::Slice, Animation, AseAsset, Tileset};\n\nuse crate::processing::{self, ResourceDataByFile};\n\nuse asefile::AsepriteFile;\n\nuse bevy::{\n\n asset::{AssetLoader, BoxedFuture, LoadState, LoadedAsset},\n\n ecs::system::Res,\n\n prelude::*,\n\n tasks::AsyncComputeTaskPool,\n\n};\n\nuse std::{\n\n path::PathBuf,\n\n sync::{\n\n atomic::{AtomicU32, Ordering},\n\n Arc, Mutex,\n\n },\n\n};\n\n\n\n/// Provides a default Bevy app configuration for loading Aseprite files.\n\n///\n", "file_path": "src/loader.rs", "rank": 11, "score": 5.71277533712607 }, { "content": "//!\n\n//! This library exposes [a plugin](loader::AseLoaderDefaultPlugin) with default settings.\n\n//! This plugin initializes all of the above resources as Asset types,\n\n//! adds [Loader](loader::Loader) and [AseAssetLoader](loader::AseAssetLoader) resources,\n\n//! and adds an [importer system function](loader::ase_importer) to process loaded ase data.\n\n//! For a custom configuration, import the constituent parts and add them to AppBuilder directly.\n\n//! The Texture resource is required to be initialized. Other asset types are optional.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```\n\n//! use bevy::prelude::*;\n\n//! use bevy_ase::asset::AseAsset;\n\n//! use bevy_ase::loader::{AseLoaderDefaultPlugin, Loader};\n\n//! use std::path::Path;\n\n//!\n\n//! // Initialize and run a bevy app with the default bevy_ase configuration.\n\n//! fn main() {\n\n//! App::build()\n\n//! .add_plugins(DefaultPlugins)\n", "file_path": "src/lib.rs", "rank": 12, "score": 5.648164154409089 }, { "content": "//! ```\n\n//! use bevy::prelude::*;\n\n//! use bevy_ase;\n\n//! #[cfg(feature = \"benimator\")]\n\n//! use benimator;\n\n//!\n\n//! // Creates a benimator animation asset whenever a bevy_ase animation asset is created.\n\n//! #[cfg(feature = \"benimator\")]\n\n//! pub fn convert_animation(\n\n//! mut event_reader: EventReader<AssetEvent<bevy_ase::asset::Animation>>,\n\n//! animations: Res<Assets<bevy_ase::asset::Animation>>,\n\n//! mut sprite_sheet_animations: ResMut<Assets<benimator::SpriteSheetAnimation>>,\n\n//! ) {\n\n//! for evt in event_reader.iter() {\n\n//! if let AssetEvent::Created { handle } = evt {\n\n//! // Unwrap: Responding to Asset Created event, so asset exists\n\n//! let anim = animations.get(handle).unwrap();\n\n//! let converted_animation = anim.into();\n\n//! sprite_sheet_animations.add(converted_animation);\n\n//! }\n", "file_path": "src/lib.rs", "rank": 13, "score": 5.486129962113589 }, { "content": "/// This initializes all of bevy_ase's asset types, a [Loader] resource,\n\n/// an [AseAssetLoader] asset loader, and the [ase_importer] system function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::loader::AseLoaderDefaultPlugin;\n\n/// fn app() {\n\n/// App::build()\n\n/// .add_plugins(DefaultPlugins)\n\n/// // Add the default plugin to the bevy app build.\n\n/// .add_plugin(AseLoaderDefaultPlugin);\n\n/// }\n\n/// ```\n\npub struct AseLoaderDefaultPlugin;\n\n\n\nimpl Plugin for AseLoaderDefaultPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_asset::<AseAsset>()\n", "file_path": "src/loader.rs", "rank": 14, "score": 5.483701394563448 }, { "content": "// Data used to move animations into Bevy.\n\nstruct AnimationImportData<'a> {\n\n animation_data: Vec<AnimationData>,\n\n sprite_data: Vec<SpriteData<Handle<Texture>>>,\n\n atlas: &'a TextureAtlas,\n\n atlas_handle: Handle<TextureAtlas>,\n\n}\n\n\n", "file_path": "src/processing.rs", "rank": 15, "score": 5.188862181002882 }, { "content": "/// To load Aseprite files, or check their loading status, a system can accept the [Loader] as a parameter.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::loader::Loader;\n\n/// // Adds a Loader instance to the app's resources.\n\n/// // The AseLoaderDefaultPlugin already does this by default.\n\n/// fn build(app: &mut AppBuilder) {\n\n/// app.init_resource::<Loader>();\n\n/// }\n\n/// ```\n\n\n\npub struct Loader {\n\n todo_handles: Vec<Handle<AseAsset>>,\n\n in_progress: Arc<AtomicU32>,\n\n done: Arc<Mutex<Vec<processing::ResourceDataByFile>>>,\n\n}\n\n\n", "file_path": "src/loader.rs", "rank": 16, "score": 5.076758163772416 }, { "content": " ///\n\n /// // System function which sends ase assets in the \"sprites\" folder to the loader.\n\n /// pub fn load_sprites(asset_server: Res<AssetServer>, mut aseloader: ResMut<Loader>) {\n\n /// let handles = asset_server.load_folder(std::path::Path::new(\"sprites\")).unwrap();\n\n /// for h in &handles {\n\n /// aseloader.add(h.clone().typed::<AseAsset>());\n\n /// }\n\n /// }\n\n /// ```\n\n pub fn add(&mut self, handle: Handle<AseAsset>) {\n\n self.todo_handles.push(handle);\n\n }\n\n\n\n /// Returns the number of [AseAsset] handles currently being processed.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use bevy::prelude::*;\n\n /// use bevy_ase::loader::Loader;\n", "file_path": "src/loader.rs", "rank": 17, "score": 4.929419845592352 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n// Tuple of all resource types to move data into.\n\npub(crate) type AseAssetResources<'a> = (\n\n ResMut<'a, Assets<Texture>>,\n\n Option<ResMut<'a, Assets<Animation>>>,\n\n Option<ResMut<'a, Assets<TextureAtlas>>>,\n\n Option<ResMut<'a, Assets<Tileset>>>,\n\n Option<ResMut<'a, Assets<Slice>>>,\n\n Option<ResMut<'a, AseFileMap>>,\n\n);\n\n\n\n/// System function for moving loaded Aseprite assets into Resoures.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use bevy::prelude::*;\n\n/// use bevy_ase::loader::ase_importer;\n\n///\n\n/// // Creates a Bevy app and adds the ase_importer system.\n\n/// // This system is already added by default in AseLoaderPlugin.\n\n/// fn app() {\n\n/// App::build().add_system(ase_importer.system());\n\n/// }\n\n/// ```\n", "file_path": "src/loader.rs", "rank": 18, "score": 4.850926104836984 }, { "content": "/// # Examples\n\n///\n\n/// ```\n\n/// #[cfg(feature = \"benimator\")]\n\n/// use bevy_ase::asset::{Animation, Frame};\n\n/// use benimator::SpriteSheetAnimation;\n\n///\n\n/// // Create a benimator SpriteSheetAnimation from a reference to a bevy_ase Animation.\n\n/// fn to_benimator_anim(animation: &Animation) -> SpriteSheetAnimation {\n\n/// animation.into() \n\n/// }\n\n///\n\n/// // Create a benimator Frame from a a reference to a bevy_ase Frame.\n\n/// fn to_benimator_frame(frame: &Frame) -> benimator::Frame {\n\n/// frame.into() \n\n/// }\n\n/// ```\n\npub mod benimator;\n\n\n\n#[cfg(feature = \"bevy_ecs_tilemap\")]\n", "file_path": "src/lib.rs", "rank": 19, "score": 4.557082994330216 }, { "content": "use asefile::{AsepriteFile, Tag};\n\nuse bevy::{\n\n prelude::*,\n\n reflect::TypeUuid,\n\n render::texture::{Extent3d, TextureDimension, TextureFormat},\n\n sprite::TextureAtlas,\n\n};\n\nuse std::path::{Path, PathBuf};\n\n\n\n/// A sprite-based animation.\n\n#[derive(Debug, TypeUuid)]\n\n#[uuid = \"49c1ff21-7abe-4167-b25b-f3730763e348\"]\n\npub struct Animation {\n\n frames: Vec<Frame>,\n\n atlas: Handle<TextureAtlas>,\n\n}\n\nimpl Animation {\n\n /// Creates a new Animation with a [Frame] vec and a [TextureAtlas] handle.\n\n pub fn new(frames: Vec<Frame>, atlas: Handle<TextureAtlas>) -> Self {\n\n Animation { frames, atlas }\n", "file_path": "src/asset/animation.rs", "rank": 20, "score": 4.494890589125802 }, { "content": "use std::path::PathBuf;\n\n\n\nuse asefile::AsepriteFile;\n\n\n\nuse crate::processing::{self, ResourceData};\n\n\n", "file_path": "src/tests.rs", "rank": 22, "score": 3.04630260626598 }, { "content": "pub(crate) mod animation;\n\npub(crate) mod ase;\n\npub(crate) mod asset_index;\n\npub mod slice;\n\npub(crate) mod tileset;\n\n\n\npub use animation::{Animation, Frame, Sprite};\n\npub use ase::AseAsset;\n\npub use asefile::UserData;\n\npub use asset_index::{AseAssetMap, AseFileMap};\n\npub use tileset::{TileSize, Tileset};\n", "file_path": "src/asset.rs", "rank": 23, "score": 2.7828871002647695 }, { "content": "//! Types for slice data.\n\npub use asefile::{Slice9, SliceKey, SliceOrigin, SlicePivot, SliceSize};\n\nuse bevy::reflect::TypeUuid;\n\n\n\n/// A slice is a region of an Ase sprite with a name and optional user data.\n\n#[derive(Debug, TypeUuid)]\n\n#[uuid = \"d12e0ddb-b47b-4d50-ae12-73eb970feae2\"]\n\npub struct Slice {\n\n /// The name of the slice. Not guaranteed to be unique.\n\n pub name: String,\n\n /// A set of [asefile::SliceKey] structs. Together, these describe the shape and position of a slice during animation.\n\n pub keys: Vec<asefile::SliceKey>,\n\n /// Optional [asefile::UserData] associated with this slice.\n\n pub user_data: Option<asefile::UserData>,\n\n}\n\nimpl Slice {\n\n pub(crate) fn from_ase(ase_slice: &asefile::Slice) -> Self {\n\n let asefile::Slice {\n\n name,\n\n keys,\n", "file_path": "src/asset/slice.rs", "rank": 24, "score": 2.5636228845186624 }, { "content": "/// Implements conversions from bevy_ase assets into bevy_ecs_tilemap assets.\n\n///\n\n/// Adds the [layer_settings](asset::Tileset::layer_settings) method to [Tileset](asset::Tileset).\n\npub mod bevy_ecs_tilemap;\n\n\n\n/// Provides systems and resources for loading Aseprite files.\n\n///\n\n/// The default loader configuration provided by [loader::AseLoaderDefaultPlugin] contains\n\n/// asset types and processing for all Aseprite data types provided by this library.\n\npub mod loader;\n\nmod processing;\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/lib.rs", "rank": 25, "score": 2.3091351182902136 }, { "content": "#![warn(missing_docs)]\n\n//! Utilities for loading [`Aseprite`] files into a [`Bevy`] application.\n\n//!\n\n//! Provides [Loader](loader::Loader), an [AssetLoader](bevy::asset::AssetLoader) struct which directly\n\n//! reads .aseprite files without an intermediate import step.\n\n//! The loader adds [`Resources`] generated by the files' data.\n\n//! Systems can invoke the loader's methods to start loading files,\n\n//! or query the loading state.\n\n//!\n\n//! # Resources\n\n//!\n\n//! This library creates several types of resources:\n\n//!\n\n//! - [Texture](bevy::render::texture::Texture) data, which contains the file's images.\n\n//! - [TextureAtlas](bevy::sprite::TextureAtlas) data, which contains mapping information for each sprite in a spritesheet.\n\n//! - [Animation](asset::Animation) data.\n\n//! - [Slice](asset::slice::Slice) data.\n\n//! - [Tileset](asset::Tileset) data (from files created in Aseprite v1.3 beta).\n\n//!\n\n//! # Configuration\n", "file_path": "src/lib.rs", "rank": 26, "score": 2.2094136875323804 }, { "content": "use crate::asset::{Animation, Frame};\n\nuse std::time::Duration;\n\n\n\nimpl From<&Frame> for benimator::Frame {\n\n fn from(f: &Frame) -> Self {\n\n benimator::Frame {\n\n duration: Duration::from_millis(f.duration_ms as u64),\n\n index: f.sprite.atlas_index,\n\n }\n\n }\n\n}\n\nimpl From<&Animation> for benimator::SpriteSheetAnimation {\n\n fn from(a: &Animation) -> Self {\n\n let frames = a.frames().iter().map(|f| f.into()).collect();\n\n benimator::SpriteSheetAnimation::from_frames(frames)\n\n }\n\n}\n", "file_path": "src/benimator.rs", "rank": 27, "score": 2.104295682863823 }, { "content": "//! }\n\n//! }\n\n//! ```\n\n//! [`Bevy`]: https://bevyengine.org/\n\n//! [`Aseprite`]: https://www.aseprite.org/\n\n//! [`Resources`]: https://bevyengine.org/learn/book/getting-started/resources/\n\n\n\n/// Provides asset types for working with Aseprite data.\n\n///\n\n/// Data from Aseprite files moved into Bevy includes Textures, [Animations](asset::Animation),\n\n/// [Slices](asset::slice::Slice), and [Tilesets](asset::Tileset). This module also provides\n\n/// map resources to access assets by keying with a file path and an asset name.\n\npub mod asset;\n\n#[cfg(feature = \"benimator\")]\n\n/// Implements conversions from bevy_ase assets into benimator assets.\n\n///\n\n/// Enabled by the \"benimator\" feature. Provides a [From] &[Animation](asset::Animation)\n\n/// implementation for benimator's SpriteSheetAnimation type,\n\n/// and [From] &[Frame](asset::Frame) implementation for benimator's Frame type.\n\n///\n", "file_path": "src/lib.rs", "rank": 28, "score": 2.013622822076464 }, { "content": " .add_asset::<Texture>()\n\n .add_asset::<TextureAtlas>()\n\n .add_asset::<Animation>()\n\n .add_asset::<Tileset>()\n\n .add_asset::<Slice>()\n\n .init_resource::<Loader>()\n\n .init_resource::<AseFileMap>()\n\n .init_asset_loader::<AseAssetLoader>()\n\n .add_system(ase_importer.system());\n\n }\n\n}\n\n\n\nconst DEFAULT_EXTENSIONS: &[&str; 2] = &[\"aseprite\", \"ase\"];\n\n\n\n/// Asset loader resource for bevy files.\n\n///\n\n/// A default AseAssetLoader instance is already initialized in the AseLoaderDefaultPlugin.\n\n/// # Examples\n\n///\n\n/// ## Default\n", "file_path": "src/loader.rs", "rank": 29, "score": 1.289734324021496 }, { "content": " load_context: &'a mut bevy::asset::LoadContext,\n\n ) -> BoxedFuture<'a, Result<(), anyhow::Error>> {\n\n Box::pin(async move {\n\n debug!(\"Loading/parsing asefile: {}\", load_context.path().display());\n\n let ase = AseAsset {\n\n data: AseData::Loaded(AsepriteFile::read(bytes)?),\n\n name: load_context.path().to_owned(),\n\n };\n\n load_context.set_default_asset(LoadedAsset::new(ase));\n\n Ok(())\n\n })\n\n }\n\n\n\n fn extensions(&self) -> &[&str] {\n\n self.extensions\n\n }\n\n}\n\n/// Provides methods for loading [AseAsset].\n\n///\n\n/// The [AseLoaderDefaultPlugin] adds this as a resource by default.\n", "file_path": "src/loader.rs", "rank": 30, "score": 1.2096488748005134 }, { "content": "/// }\n\n///\n\n/// // Get the first tileset in this file with the name \"bar\".\n\n/// fn get_bar_tileset(ase_asset_map: &AseAssetMap) -> Option<Handle<Tileset>> {\n\n/// ase_asset_map.tilesets(\"foo\")?.first().map(Handle::clone)\n\n/// }\n\n/// ```\n\n///\n\n/// # Notes\n\n///\n\n/// The owning AseFileMap instance provides convenience methods to index a file\n\n/// and an asset simultaneously. These methods also clone the Handle value before returning.\n\n///\n\n/// [Texture] assets are mapped to their frame index. This map does not include Textures\n\n/// rendered from [Tileset] assets. To access a Tileset's Texture, use the texture field\n\n/// on a tileset asset.\n\n///\n\n/// [Animation], [Slice], and Tileset assets are mapped to their string name. There may be\n\n/// more than one asset with the same name. If just one asset is expected,\n\n/// compose the result with `first()`.\n", "file_path": "src/asset/asset_index.rs", "rank": 31, "score": 1.0418957777152622 } ]
Rust
src/lib.rs
justanotherdot/cargo-watch
ac0a403e0c6b64a60dd7f5f78c049301ce092b4f
#![forbid(unsafe_code, clippy::pedantic)] #![allow( clippy::non_ascii_literal, clippy::cast_sign_loss, clippy::cast_possible_truncation )] #[macro_use] extern crate clap; extern crate watchexec; use clap::{ArgMatches, Error, ErrorKind}; use std::{env::set_current_dir, path::MAIN_SEPARATOR}; use watchexec::{Args, ArgsBuilder}; pub mod args; pub mod cargo; pub mod watch; pub fn change_dir() { cargo::root() .and_then(|p| set_current_dir(p).ok()) .unwrap_or_else(|| { Error::with_description("Not a Cargo project, aborting.", ErrorKind::Io).exit(); }); } pub fn set_commands(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { let mut commands: Vec<String> = Vec::new(); if matches.is_present("cmd:cargo") { for cargo in values_t!(matches, "cmd:cargo", String).unwrap_or_else(|e| e.exit()) { let mut cmd: String = "cargo ".into(); cmd.push_str(&cargo); commands.push(cmd); } } if matches.is_present("cmd:shell") { for shell in values_t!(matches, "cmd:shell", String).unwrap_or_else(|e| e.exit()) { commands.push(shell); } } if commands.is_empty() { commands.push("cargo check".into()); } if debug { println!(">>> Commands: {:?}", commands); } builder.cmd(commands); } pub fn set_ignores(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { if matches.is_present("ignore-nothing") { if debug { println!(">>> Ignoring nothing"); } builder.no_vcs_ignore(true); return; } let novcs = matches.is_present("no-gitignore"); builder.no_vcs_ignore(novcs); if debug { println!(">>> Load Git/VCS ignores: {:?}", !novcs); } let mut list = vec![ format!("*{}.DS_Store", MAIN_SEPARATOR), "*.sw?".into(), "*.sw?x".into(), "#*#".into(), ".#*".into(), ".*.kate-swp".into(), format!("*{s}.hg{s}**", s = MAIN_SEPARATOR), format!("*{s}.git{s}**", s = MAIN_SEPARATOR), format!("*{s}.svn{s}**", s = MAIN_SEPARATOR), "*.db".into(), "*.db-*".into(), format!("*{s}*.db-journal{s}**", s = MAIN_SEPARATOR), format!("*{s}target{s}**", s = MAIN_SEPARATOR), ]; if debug { println!(">>> Default ignores: {:?}", list); } if matches.is_present("ignore") { for ignore in values_t!(matches, "ignore", String).unwrap_or_else(|e| e.exit()) { #[cfg(windows)] let ignore = ignore.replace("/", &MAIN_SEPARATOR.to_string()); list.push(ignore); } } if debug { println!(">>> All ignores: {:?}", list); } builder.ignores(list); } pub fn set_debounce(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { let d = if matches.is_present("delay") { let debounce = value_t!(matches, "delay", f32).unwrap_or_else(|e| e.exit()); if debug { println!(">>> File updates debounce: {} seconds", debounce); } (debounce * 1000.0) as u32 } else { 500 }; builder.poll_interval(d).debounce(d); } pub fn set_watches(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { let mut opts = Vec::new(); if matches.is_present("watch") { for watch in values_t!(matches, "watch", String).unwrap_or_else(|e| e.exit()) { opts.push(watch.into()); } } if opts.is_empty() { opts.push(".".into()); } if debug { println!(">>> Watches: {:?}", opts); } builder.paths(opts); } pub fn get_options(debug: bool, matches: &ArgMatches) -> Args { let mut builder = ArgsBuilder::default(); builder .restart(!matches.is_present("no-restart")) .poll(matches.is_present("poll")) .clear_screen(matches.is_present("clear")) .debug(debug) .run_initially(!matches.is_present("postpone")); set_ignores(debug, &mut builder, &matches); set_debounce(debug, &mut builder, &matches); set_watches(debug, &mut builder, &matches); set_commands(debug, &mut builder, &matches); let mut args = builder.build().unwrap(); args.once = matches.is_present("once"); if debug { println!(">>> Watchexec arguments: {:?}", args); } args }
#![forbid(unsafe_code, clippy::pedantic)] #![allow( clippy::non_ascii_literal, clippy::cast_sign_loss, clippy::cast_possible_truncation )] #[macro_use] extern crate clap; extern crate watchexec; use clap::{ArgMatches, Error, ErrorKind}; use std::{env::set_current_dir, path::MAIN_SEPARATOR}; use watchexec::{Args, ArgsBuilder}; pub mod args; pub mod cargo; pub mod watch;
pub fn set_commands(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { let mut commands: Vec<String> = Vec::new(); if matches.is_present("cmd:cargo") { for cargo in values_t!(matches, "cmd:cargo", String).unwrap_or_else(|e| e.exit()) { let mut cmd: String = "cargo ".into(); cmd.push_str(&cargo); commands.push(cmd); } } if matches.is_present("cmd:shell") { for shell in values_t!(matches, "cmd:shell", String).unwrap_or_else(|e| e.exit()) { commands.push(shell); } } if commands.is_empty() { commands.push("cargo check".into()); } if debug { println!(">>> Commands: {:?}", commands); } builder.cmd(commands); } pub fn set_ignores(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { if matches.is_present("ignore-nothing") { if debug { println!(">>> Ignoring nothing"); } builder.no_vcs_ignore(true); return; } let novcs = matches.is_present("no-gitignore"); builder.no_vcs_ignore(novcs); if debug { println!(">>> Load Git/VCS ignores: {:?}", !novcs); } let mut list = vec![ format!("*{}.DS_Store", MAIN_SEPARATOR), "*.sw?".into(), "*.sw?x".into(), "#*#".into(), ".#*".into(), ".*.kate-swp".into(), format!("*{s}.hg{s}**", s = MAIN_SEPARATOR), format!("*{s}.git{s}**", s = MAIN_SEPARATOR), format!("*{s}.svn{s}**", s = MAIN_SEPARATOR), "*.db".into(), "*.db-*".into(), format!("*{s}*.db-journal{s}**", s = MAIN_SEPARATOR), format!("*{s}target{s}**", s = MAIN_SEPARATOR), ]; if debug { println!(">>> Default ignores: {:?}", list); } if matches.is_present("ignore") { for ignore in values_t!(matches, "ignore", String).unwrap_or_else(|e| e.exit()) { #[cfg(windows)] let ignore = ignore.replace("/", &MAIN_SEPARATOR.to_string()); list.push(ignore); } } if debug { println!(">>> All ignores: {:?}", list); } builder.ignores(list); } pub fn set_debounce(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { let d = if matches.is_present("delay") { let debounce = value_t!(matches, "delay", f32).unwrap_or_else(|e| e.exit()); if debug { println!(">>> File updates debounce: {} seconds", debounce); } (debounce * 1000.0) as u32 } else { 500 }; builder.poll_interval(d).debounce(d); } pub fn set_watches(debug: bool, builder: &mut ArgsBuilder, matches: &ArgMatches) { let mut opts = Vec::new(); if matches.is_present("watch") { for watch in values_t!(matches, "watch", String).unwrap_or_else(|e| e.exit()) { opts.push(watch.into()); } } if opts.is_empty() { opts.push(".".into()); } if debug { println!(">>> Watches: {:?}", opts); } builder.paths(opts); } pub fn get_options(debug: bool, matches: &ArgMatches) -> Args { let mut builder = ArgsBuilder::default(); builder .restart(!matches.is_present("no-restart")) .poll(matches.is_present("poll")) .clear_screen(matches.is_present("clear")) .debug(debug) .run_initially(!matches.is_present("postpone")); set_ignores(debug, &mut builder, &matches); set_debounce(debug, &mut builder, &matches); set_watches(debug, &mut builder, &matches); set_commands(debug, &mut builder, &matches); let mut args = builder.build().unwrap(); args.once = matches.is_present("once"); if debug { println!(">>> Watchexec arguments: {:?}", args); } args }
pub fn change_dir() { cargo::root() .and_then(|p| set_current_dir(p).ok()) .unwrap_or_else(|| { Error::with_description("Not a Cargo project, aborting.", ErrorKind::Io).exit(); }); }
function_block-full_function
[ { "content": "fn main() -> watchexec::error::Result<()> {\n\n let matches = cargo_watch::args::parse();\n\n\n\n cargo_watch::change_dir();\n\n\n\n let quiet = matches.is_present(\"quiet\");\n\n let debug = matches.is_present(\"debug\");\n\n let opts = cargo_watch::get_options(debug, &matches);\n\n let handler = cargo_watch::watch::CwHandler::new(opts, quiet)?;\n\n watchexec::run::watch(&handler)\n\n}\n", "file_path": "src/main.rs", "rank": 0, "score": 75551.95326953352 }, { "content": "pub fn parse() -> ArgMatches<'static> {\n\n let footnote = \"Cargo commands (-x) are always executed before shell commands (-s).\\n\\nBy default, your entire project is watched, except for the target/ and .git/ folders, and your .gitignore files are used to filter paths.\".to_owned();\n\n\n\n #[cfg(windows)] let footnote = format!(\"{}\\n\\nOn Windows, patterns given to -i have forward slashes (/) automatically converted to backward ones (\\\\) to ease command portability.\", footnote);\n\n\n\n let mut app = App::new(env!(\"CARGO_PKG_NAME\"))\n\n .bin_name(\"cargo\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .help_message(\"\")\n\n .version_message(\"\")\n\n .setting(AppSettings::ArgsNegateSubcommands)\n\n .setting(AppSettings::DisableHelpSubcommand)\n\n .setting(AppSettings::DontCollapseArgsInUsage)\n\n .setting(AppSettings::GlobalVersion)\n\n .setting(AppSettings::StrictUtf8)\n\n .setting(AppSettings::SubcommandRequired)\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"watch\")\n\n .author(env!(\"CARGO_PKG_HOMEPAGE\"))\n", "file_path": "src/args.rs", "rank": 2, "score": 62194.80342345011 }, { "content": "/// Returns the closest ancestor path containing a `Cargo.toml`.\n\n///\n\n/// Returns `None` if no ancestor path contains a `Cargo.toml`, or if\n\n/// the limit of `MAX_ANCESTORS` ancestors has been reached.\n\n///\n\n/// TODO: #52 Parse toml to get to workspace root\n\npub fn root() -> Option<PathBuf> {\n\n /// Checks if the directory contains `Cargo.toml`\n\n fn contains_manifest(path: &PathBuf) -> bool {\n\n fs::read_dir(path)\n\n .map(|entries| {\n\n entries\n\n .filter_map(Result::ok)\n\n .any(|ent| &ent.file_name() == \"Cargo.toml\")\n\n })\n\n .unwrap_or(false)\n\n }\n\n\n\n // From the current directory we work our way up, looking for `Cargo.toml`\n\n env::current_dir().ok().and_then(|mut wd| {\n\n for _ in 0..MAX_ANCESTORS {\n\n if contains_manifest(&wd) {\n\n return Some(wd);\n\n }\n\n if !wd.pop() {\n\n break;\n\n }\n\n }\n\n\n\n None\n\n })\n\n}\n", "file_path": "src/cargo.rs", "rank": 4, "score": 54124.23447199364 }, { "content": "### RLS is slow while using cargo watch, or vice versa, or it's waiting for the project lock a lot\n\n\n\nCargo builds (and checks, and clippy, and tests because the tests have to be\n\nbuilt) take out a lock on the project so two cargo instances don't run at the\n\nsame time. That may include language servers like RLS (and possibly RA)! So\n\nyour editor might be fighting with cargo watch for the lock when you save.\n\n\n\nThere's not really a way around this. Either stop using cargo watch, or stop\n\nusing RLS/RA, or accept that whenever you save there may be some slowdown for a\n\nlittle while. While I'm investigating ways to make this less of an issue,\n\nthere's not going to be a quick fix anytime soon.\n\n\n\n### On Windows 7 (or lower): \"failed to add to job object: Access denied (OS Error 5)\"\n\n\n\nCargo Watch versions 5.0.0 and up (and watchexec versions 1.3.0 and up) [do not\n\nsupport Windows 7 or lower][i-69]. There are no plans at the moment to add such\n\nsupport.\n\n\n\nYou can downgrade to the last version which did support Windows 7 (and lower),\n\nbut do keep in mind that many bug fixes and features are missing there:\n\n\n\n```\n\n$ cargo install --force --vers 4.0.3 cargo-watch\n\n```\n\n\n\n[i-69]: https://github.com/passcod/cargo-watch/issues/69\n\n\n\n### If running cargo watch errors with \"Found argument 'build' which wasn't expected\" (or similar)\n\n\n\nYou're probably using **version 4** (or higher) but using the **version 3** (or\n\nlower) style of arguments. The interface changed! Refer to the sections above\n\nfor new usage guidelines, or to the help message:\n\n\n\n```\n\n$ cargo watch --help\n\n```\n\n\n\n### I want to run cargo-watch directly, without going through cargo\n\n\n\nYou can! But you'll have to specify the `watch` subcommand as the first\n\nargument, like so:\n\n\n\n```\n\n$ /path/to/cargo-watch watch -x build\n\n```\n\n\n\n### I want to run cargo-watch outside of a Cargo project\n\n\n\nThat's not supported. If you have a good reason to use a Cargo-specific tool\n\noutside a Cargo project, please open an issue! Otherwise, you'll probably be\n\nbest served with using [Watchexec].\n\n\n", "file_path": "README.md", "rank": 9, "score": 32793.71443789474 }, { "content": "# $ cargo watch\n\n\n\n[![Crate release version](https://flat.badgen.net/crates/v/cargo-watch)](https://crates.io/crates/cargo-watch)\n\n[![Crate license: CC0 1.0](https://flat.badgen.net/github/license/passcod/cargo-watch)](https://creativecommons.org/publicdomain/zero/1.0/)\n\n[![Crate download count](https://flat.badgen.net/crates/d/cargo-watch)](https://crates.io/crates/cargo-watch)\n\n[![Build status](https://flat.badgen.net/travis/passcod/cargo-watch/master)](https://travis-ci.org/passcod/cargo-watch)\n\n\n\nCargo Watch watches over your project's source for changes, and runs Cargo\n\ncommands when they occur.\n\n\n\nIf you've used [nodemon], [gulp], [guard], [watchman], or similar others,\n\nit will probably feel familiar.\n\n\n\n[nodemon]: http://nodemon.io/\n\n[gulp]: http://gulpjs.com/\n\n[guard]: http://guardgem.org/\n\n[watchman]: https://facebook.github.io/watchman/\n\n\n\n## Install\n\n\n\nPre-built binaries are available [on the Github Releases tab](https://github.com/passcod/cargo-watch/releases).\n\n\n\n```\n\n$ cargo install cargo-watch\n\n```\n\n\n\nTo upgrade:\n\n\n\n```\n\n$ cargo install --force cargo-watch\n\n```\n\n\n\nOr clone and build with `$ cargo build` then place in your $PATH.\n\n\n\n## Usage\n\n\n\nBy default, it runs `check` (which is available [since Rust 1.16][st-check]).\n\nYou can easily override this, though:\n\n\n\n```\n\n$ cargo watch [-x command]...\n\n```\n\n\n\n[st-check]: https://blog.rust-lang.org/2017/03/16/Rust-1.16.html\n\n\n\nA few examples:\n\n\n\n```\n\n# Run tests only\n\n$ cargo watch -x test\n\n\n\n# Run check then tests\n\n$ cargo watch -x check -x test\n\n\n\n# Run run with arguments\n\n$ cargo watch -x 'run -- --some-arg'\n\n\n", "file_path": "README.md", "rank": 10, "score": 32793.288016506194 }, { "content": "### Something not covered above / I have a feature request\n\n\n\nPlease [open an issue][watch-issues], or look through the existing ones. You\n\nmay also want to look through [issues for the Notify library][notify-issues]\n\nthis tool depends on, or the [issues for the Watchexec tool][watchexec-issues]\n\nthat we use under the covers.\n\n\n\nIf you want more verbose output, try running with the `--debug` flag. Note that\n\nthis will also enable debug mode for watchexec. When filing an issue, **make\n\nsure to include a log with `--debug` enabled so problems can be diagnosed.**\n\n\n\n[notify-issues]: https://github.com/passcod/notify/issues\n\n[watch-issues]: https://github.com/passcod/cargo-watch/issues\n\n[watchexec-issues]: https://github.com/mattgreen/watchexec/issues\n\n\n\n### I want to embed Cargo Watch in my own (Rust) tool\n\n\n\nYou cannot do that directly. You may of course call `cargo-watch` as any other\n\nprogram, but if you want to directly / statically embed it, that's not\n\npossible. But! Cargo Watch is built on top of [Watchexec], Watchexec is itself\n\nbuilt on [Notify], and both of these can be used as Rust libraries.\n\n\n\n- If you want to build a tool that runs, restarts, and otherwise manages\n\n commands in response to file changes, you'll most probably want to use\n\n **Watchexec**.\n\n\n\n- If you want to build a tool that responds to file changes, but does not need\n\n to run commands, or does so in a way that is not well-supported by Watchexec,\n\n then **Notify** is your ticket.\n\n\n\n[Notify]: https://github.com/passcod/notify\n\n[Watchexec]: https://github.com/mattgreen/watchexec\n\n\n\n### Wait, is this just a wrapper on top of watchexec?\n\n\n\nIt is! [Watchexec] does a really good job of watching files and running commands\n\nand all the details that go with this. Cargo watch simply embeds watchexec and\n\ncalls it with its own custom options and defaults, so you can just run\n\n`cargo-watch` in your project and be in business.\n\n\n\n## About\n\n\n\nCreated by [Félix Saparelli][passcod] and [awesome contributors][contributors].\n\n\n\n[contributors]: https://github.com/passcod/cargo-watch/network/members\n\n[passcod]: https://passcod.name\n", "file_path": "README.md", "rank": 11, "score": 32792.33271444406 }, { "content": "### Ignore syntax\n\n\n\nSee the [`glob::Pattern` docs][glob::Pattern] for a more detailed\n\nspecification of the glob matching syntax used for `--ignore`.\n\n\n\nOn Windows, patterns should be specified with Windows-style (`\\\\`) separators.\n\nUnix-style separators (`/`) would not match Windows paths, which could be\n\nconfusing and give the appearance of commandline ignores not working.\n\n\n\nFrom Cargo Watch 7.0.0, `/` in commandline ignores are automatically translated\n\nto `\\\\` when running on Windows, but one should still try to write the correct\n\npatterns for the platform, as there may be more subtle differences.\n\n\n\n[glob::Pattern]: https://doc.rust-lang.org/glob/glob/struct.Pattern.html\n\n\n\n### Reloading servers seamlessly\n\n\n\nCargo Watch pairs very well with [Catflap], a tool for Unixy platforms that\n\nlets one spawn a socket before the watcher runs that Rust servers can then bind\n\nto, avoiding request-dropping and the infamous ADDRINUSE error. For example:\n\n\n\n```\n\n$ catflap -- cargo watch -x run\n\n```\n\n\n\n[Catflap]: https://github.com/passcod/catflap\n\n\n\nOf course, if you don't need to guard against these issues or don't want to\n\nmodify your program to grab sockets instead of ports, you can use Cargo Watch\n\nas-is: it will happily just restart your server normally.\n\n\n", "file_path": "README.md", "rank": 12, "score": 32789.983120353536 }, { "content": "# Run an arbitrary command\n\n$ cargo watch -s 'echo Hello world'\n\n```\n\n\n\nThere's a lot more you can do! Here's a copy of the help:\n\n\n\n```\n\nUSAGE:\n\n cargo watch [FLAGS] [OPTIONS]\n\n\n\nFLAGS:\n\n -c, --clear Clear the screen before each run\n\n --debug Display debug output\n\n -h, --help Display this message\n\n --ignore-nothing Ignore nothing, not even target/ and .git/\n\n --no-gitignore Don’t use .gitignore files\n\n --no-restart Don’t restart command while it’s still running\n\n --poll Force use of polling for file changes\n\n --postpone Postpone first run until a file changes\n\n -q, --quiet Suppress output from cargo-watch itself\n\n -V, --version Display version information\n\n\n\nOPTIONS:\n\n -x, --exec <cmd>...\n\n Cargo command(s) to execute on changes [default: check]\n\n -s, --shell <cmd>...\n\n Shell command(s) to execute on changes\n\n -d, --delay <delay>\n\n File updates debounce delay in seconds [default: 0.5]\n\n -i, --ignore <pattern>...\n\n Ignore a glob/gitignore-style pattern\n\n -w, --watch <watch>...\n\n Watch specific file(s) or folder(s) [default: .]\n\n\n\nCargo commands (-x) are always executed before shell commands (-s).\n\n\n\nBy default, your entire project is watched, except for the target/\n\nand .git/ folders, and your .gitignore files are used to filter paths.\n\n```\n\n\n", "file_path": "README.md", "rank": 13, "score": 32789.524519034916 }, { "content": "### If file updates seems to never trigger\n\n\n\nTry using `--poll` to force the polling fallback.\n\n\n\nIf that still doesn't work, and you're using an editor that does \"safe saving\",\n\nlike IntelliJ / PyCharm, you may have to disable \"safe saving\" as that may\n\nprevent file notifications from being generated properly.\n\n\n\n### Linux: If it fails to watch some deep directories but not others / \"No space left on device\"\n\n\n\nYou may have hit the inotify watch limit. [Here's a summary of what this means\n\nand how to increase it.][inotify limit]\n\n\n\n[inotify limit]: https://blog.passcod.name/2017/jun/25/inotify-watch-limit\n\n\n\n### If you want to only recompile one Cargo workspace\n\n\n\nCargo workspaces [are not natively supported yet][i-52].\n\n\n\nHowever, as you can run \"arbitrary commands\" with the `-s` option, you can\n\nwrite workspace-aware commands manually.\n\n\n\n[i-52]: https://github.com/passcod/cargo-watch/issues/52\n\n\n\n### If it runs repeatedly without touching anything\n\n\n\nThat can happen when watching files that are modified by the command you're\n\nrunning.\n\n\n\nIf you're only running compiles or checks (i.e. any command that only affects\n\nthe target/ folder) and you're using `-w`, you might be confusing the\n\ntarget-folder-ignorer. Check your options and paths.\n\n\n\n### If it runs repeatedly only touching ignored files\n\n\n\nMake sure the files you ignored are the only ones being touched. Use the\n\n`--debug` option to see exactly which files were modified and triggered the\n\nrestart (or were ignored). Some programs and libraries create temporary files\n\nthat may not match a simple ignore pattern.\n\n\n\n### I don't have colour in my cargo output / for cargo test\n\n\n\nThis sometimes happens on some terminal configurations. A quick workaround\n\n(instead of going down the rabbit hole of debugging your console settings) is\n\nto pass `--color=always` to the command. E.g.\n\n\n\n```\n\n$ cargo watch -x 'check --color=always'\n\n```\n\n\n\nFor test (and bench) commands, you'll need to pass the flag to the underlying\n\nprogram instead of cargo:\n\n\n\n```\n\n$ cargo watch -x 'test -- --color=always'\n\n```\n\n\n", "file_path": "README.md", "rank": 14, "score": 32789.1966614176 }, { "content": "### Restarting an application only if the build/check succeeds\n\n\n\n[Brought up by @LeDominik](https://github.com/passcod/cargo-watch/issues/75),\n\nhere's a pattern that may be very useful: you're working on a server or app,\n\nbut want it to keep running while you're writing a new feature or fixing a bug,\n\npotentially causing the code not to compile anymore in the meantime.\n\n\n\nIn this case, you can use this strategy: run a first `cargo watch` with check,\n\nbuild, test, or whatever you want, and append `-s 'touch .trigger` (or equivalent\n\nfor your platform). Then, run a second `cargo watch` simultaneously that _only_\n\nwatches that `.trigger` file. For example:\n\n\n\n```\n\n$ cargo watch -x check -s 'touch .trigger'\n\n```\n\n\n\nand\n\n\n\n```\n\n$ cargo watch --no-gitignore -w .trigger -x run\n\n```\n\n\n\nThe `--no-gitignore` flag ensures that you can safely add `.trigger` to your\n\n`.gitignore` file to avoid mistakenly committing it.\n\n\n\n## Contributing\n\n\n\nThe Cargo Watch team enthusiastically welcomes contributions and project\n\nparticipation! There's a bunch of things you can do if you want to contribute!\n\nThe [Contributor Guide](./CONTRIBUTING.md) has all the information you need for\n\neverything from reporting bugs to contributing entire new features. Please\n\ndon't hesitate to jump in if you'd like to, or even ask us questions if\n\nsomething isn't clear. <sub>{[attribution](https://github.com/zkat/pacote#contributing)}</sub>\n\n\n\nYou can also contribute financially with [Patreon] or [GitHub Sponsorship].\n\n\n\n[Patreon]: https://www.patreon.com/passcod\n\n[GitHub Sponsors]: https://github.com/users/passcod/sponsorship\n\n\n\n## Troubleshooting\n\n\n\nIn all cases, start by checking your version with `cargo watch --version` and,\n\nif necessary, upgrading to [the latest one][releases].\n\n\n\n[releases]: https://github.com/passcod/cargo-watch/releases\n\n\n", "file_path": "README.md", "rank": 15, "score": 32788.99377123207 }, { "content": "## Request Support\n\n\n\nIf you have a question about this project, how to use it, or just need clarification about something:\n\n\n\n* Open an Issue at https://github.com/passcod/cargo-watch/issues\n\n* Provide as much context as you can about what you're running into.\n\n* Provide project and platform versions (rustc, cargo, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it.\n\n\n\nOnce it's filed:\n\n\n\n* The project team will [label the issue](#label-issues).\n\n* Someone will try to have a response soon.\n\n* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made.\n\n\n\n## Report an Error or Bug\n\n\n\nIf you run into an error or bug with the project:\n\n\n\n* Open an Issue at https://github.com/passcod/cargo-watch/issues\n\n* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own.\n\n* Include a log by running the same command with `--debug` enabled.\n\n* Provide project and platform versions (rustc, cargo, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it.\n\n\n\nOnce it's filed:\n\n\n\n* The project team will [label the issue](#label-issues).\n\n* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.\n\n* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code).\n\n* If you don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made.\n\n* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 16, "score": 32788.05276807255 }, { "content": "## Request a Feature\n\n\n\nIf the project doesn't do something you need or want it to do:\n\n\n\n* Open an Issue at https://github.com/passcod/cargo-watch/issues\n\n* Provide as much context as you can about what you're running into.\n\n* Please try and be clear about why existing features and alternatives would not work for you.\n\n\n\nOnce it's filed:\n\n\n\n* The project team will [label the issue](#label-issues).\n\n* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward.\n\n* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code).\n\n\n\nNote: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no.\n\n\n\n## Project Setup\n\n\n\nSo you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before.\n\n\n\nIf this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code).\n\n\n\nIf you want to go the usual route and run the project locally, though:\n\n\n\n* [Install Rust](https://www.rustup.rs/)\n\n* [Fork the project](https://guides.github.com/activities/forking/#fork)\n\n\n\nThen in your terminal:\n\n* `cd path/to/your/clone`\n\n* `cargo build`\n\n* `cargo run -- watch`\n\n\n\nAnd you should be ready to go!\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 17, "score": 32786.325732312 }, { "content": "## Contribute Code\n\n\n\nWe like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others.\n\n\n\nCode contributions of just about any size are acceptable!\n\n\n\nTo contribute code:\n\n\n\n* [Set up the project](#project-setup).\n\n* Make any necessary changes to the source code.\n\n* Include any [additional documentation](#contribute-documentation) the changes might need.\n\n* Write clear, concise commit message(s).\n\n* Dependency updates, additions, or removals must be in individual commits.\n\n* Go to https://github.com/passcod/cargo-watch/pulls and open a new pull request with your changes.\n\n* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing.\n\n\n\nOnce you've filed the PR:\n\n\n\n* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc).\n\n* One or more maintainers will use GitHub's review feature to review your PR.\n\n* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-docs`) will be added depending on the review.\n\n* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚\n\n* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release)\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 18, "score": 32785.69153589085 }, { "content": "## Tag A Release\n\n\n\n[Needs Collaborator](#join-the-project-team): Maintainer\n\n\n\nTo tag a release:\n\n\n\n1. Make sure everything works and there are no compile warnings.\n\n2. Read over the README.md to make sure it is updated for this new version.\n\n3. Checkout the `master` branch, and `git pull` all changes.\n\n4. If you have some changes pending to be committed, do this now.\n\n5. Change the version number in the Cargo.toml.\n\n6. Run `cargo update` to update the Cargo.lock.\n\n7. Commit both of these with the message `1.2.3` (where that is the new version number).\n\n8. Tag the commit with `v1.2.3`.\n\n9. Push the commit and the tag: `git push --follow-tags`.\n\n10. Publish the crate: `cargo publish`.\n\n\n\nYou may chime into issues or PRs that were resolved by this release to let\n\npeople involved that the fix or feature is out, now.\n\n\n\nYou may also want to tweet about the release, and/or announce it on\n\n[Reddit](https://reddit.com/r/rust).\n\n\n\n## Join the Project Team\n\n\n\n### Ways to Join\n\n\n\nThere are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do.\n\n\n\nAll of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 19, "score": 32785.474101386644 }, { "content": "## Contribute Documentation\n\n\n\nDocumentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake!\n\n\n\nTo contribute documentation:\n\n\n\n* [Set up the project](#project-setup).\n\n* Edit or add any relevant documentation.\n\n* Make sure your changes are formatted correctly and consistently with the rest of the documentation.\n\n* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything.\n\n* Write clear, concise commit message(s).\n\n* Go to https://github.com/passcod/cargo-watch/pulls and open a new pull request with your changes.\n\n* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing.\n\n\n\nOnce you've filed the PR:\n\n\n\n* One or more maintainers will use GitHub's review feature to review your PR.\n\n* If the maintainer asks for any changes, edit your changes, push, and ask for another review.\n\n* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚\n\n* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release)\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 20, "score": 32785.419467195 }, { "content": "# Contributing\n\n\n\n## How do I... <a name=\"toc\"></a>\n\n\n\n* [Use This Guide](#introduction)?\n\n* Ask or Say Something? 🤔🐛😱\n\n * [Request Support](#request-support)\n\n * [Report an Error or Bug](#report-an-error-or-bug)\n\n * [Request a Feature](#request-a-feature)\n\n* Make Something? 🤓👩🏽‍💻📜🍳\n\n * [Project Setup](#project-setup)\n\n * [Contribute Documentation](#contribute-documentation)\n\n * [Contribute Code](#contribute-code)\n\n* Manage Something ✅🙆🏼💃👔\n\n * [Provide Support on Issues](#provide-support-on-issues)\n\n * [Label Issues](#label-issues)\n\n * [Clean Up Issues and PRs](#clean-up-issues-and-prs)\n\n * [Review Pull Requests](#review-pull-requests)\n\n * [Merge Pull Requests](#merge-pull-requests)\n\n * [Tag a Release](#tag-a-release)\n\n * [Join the Project Team](#join-the-project-team)\n\n* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻\n\n\n\n## Introduction\n\n\n\nThank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝\n\n\n\nPlease make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚\n\n\n\nThe [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 21, "score": 32785.16504660375 }, { "content": "## Label Issues\n\n\n\n[Needs Collaborator](#join-the-project-team): Issue Tracker\n\n\n\nOne of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily.\n\n\n\nIn order to label issues, [open up the list of unlabeled issues](https://github.com/passcod/cargo-watch/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself!\n\n\n\nLabel | Apply When | Notes\n\n--- | --- | ---\n\n`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label.\n\n`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. |\n\n`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`.\n\n`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`)\n\n`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. |\n\n`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling.\n\n`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete.\n\n`performance` | This issue or PR is directly related to improving performance. |\n\n`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. |\n\n`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily \"easy\", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority.\n\n`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes.\n\n`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 22, "score": 32784.77634356301 }, { "content": "## Clean Up Issues and PRs\n\n\n\n[Needs Collaborator](#join-the-project-team): Issue Tracker\n\n\n\nIssues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon.\n\n\n\nIn these cases, they should be closed until they're brought up again or the interaction starts over.\n\n\n\nTo clean up issues and PRs:\n\n\n\n* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today.\n\n* Go through each issue *from oldest to newest*, and close them if **all of the following are true**:\n\n * not opened by a maintainer\n\n * not marked as `critical`\n\n * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available)\n\n * no explicit messages in the comments asking for it to be left open\n\n * does not belong to a milestone\n\n* Leave a message when closing saying \"Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/passcod/cargo-watch/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details.\"\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 23, "score": 32784.610448783686 }, { "content": "## Attribution\n\n\n\nThis guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)!\n", "file_path": "CONTRIBUTING.md", "rank": 24, "score": 32784.38824153427 }, { "content": "## Provide Support on Issues\n\n\n\n[Needs Collaborator](#join-the-project-team): none\n\n\n\nHelping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug.\n\n\n\nSometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it.\n\n\n\nIn order to help other folks out with their questions:\n\n\n\n* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/passcod/cargo-watch/issues?q=is%3Aopen+is%3Aissue+label%3Asupport).\n\n* Read through the list until you find something that you're familiar enough with to give an answer to.\n\n* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on.\n\n* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you.\n\n\n\nSome notes on picking up support issues:\n\n\n\n* Avoid responding to issues you don't know you can answer accurately.\n\n* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format.\n\n* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict).\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 25, "score": 32783.95201803785 }, { "content": "You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names.\n\n\n\nPermission | Description\n\n--- | ---\n\nIssue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker.\n\nCommitter | Granted to contributors who want to handle the actual pull request merges, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team.\n\nMaintainer | Very similar to Committer but with the added privilege of being able to tag new releases and publish to https://crates.io/.\n\nAdmin/Owner | Granted to people ultimately responsible for the project, its community, etc.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 26, "score": 32783.4126981656 }, { "content": "## Review Pull Requests\n\n\n\n[Needs Collaborator](#join-the-project-team): Issue Tracker\n\n\n\nWhile anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions.\n\n\n\nPR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration.\n\n\n\nSome notes:\n\n\n\n* You may ask for minor changes (\"nitpicks\"), but consider whether they are really blockers to merging: try to err on the side of \"approve, with comments\".\n\n* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own.\n\n* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it?\n\n\n\n## Merge Pull Requests\n\n\n\n[Needs Collaborator](#join-the-project-team): Committer\n\n\n\nTBD - need to hash out a bit more of this process.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 27, "score": 32783.15027578887 }, { "content": "### Further Enforcement\n\n\n\nIf you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed:\n\n\n\n 1. Repeat the request to stop.\n\n 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked.\n\n 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours.\n\n 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used.\n\n\n\nOn top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary.\n\n\n\nMaintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk.\n\n\n\nMembers expelled from events or venues with any sort of paid attendance will not be refunded.\n\n\n\n### Who Watches the Watchers?\n\n\n\nMaintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community.\n\n\n\nAdditionally, as a project hosted on GitHub and part of the Rust community, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures.\n\n\n\n### Enforcement Examples\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 28, "score": 31883.534685339037 }, { "content": "#### The Best Case\n\n\n\nThe vast majority of situations work out like this. This interaction is common, and generally positive.\n\n\n\n> Alex: \"Yeah I used X and it was retarded!\"\n\n\n\n> Patt (not a maintainer): \"Hey, could you not use that word? What about 'ridiculous' instead?\"\n\n\n\n> Alex: \"oh sorry, sure.\" -> edits old comment to say \"it was really confusing!\"\n\n\n\n#### The Maintainer Case\n\n\n\nSometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**.\n\n\n\n> Patt: \"Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job.\"\n\n\n\n> Alex: \"Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that.\"\n\n\n\n> Patt: \"I'm not attacking anyone, what's your problem?\"\n\n\n\n> Alex: \"@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope.\"\n\n\n\n> KeeperOfCommitBits: (on issue) \"Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space.\"\n\n\n\n> Patt: \"Leave me alone I haven't said anything bad wtf is wrong with you.\"\n\n\n\n> KeeperOfCommitBits: (deletes user's comment), \"@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?\"\n\n\n\n> Patt: \"@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble.\"\n\n\n\n> KeeperOfCommitBits: \"@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!\"\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 29, "score": 31882.389222305297 }, { "content": "## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment include:\n\n\n\n * Using welcoming and inclusive language.\n\n * Being respectful of differing viewpoints and experiences.\n\n * Gracefully accepting constructive feedback.\n\n * Focusing on what is best for the community.\n\n * Showing empathy and kindness towards other community members.\n\n * Encouraging and raising up your peers in the project so you can all bask in hacks and glory.\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity.\n\n * Trolling, insulting/derogatory comments, and personal or political attacks.\n\n * Public or private harassment, deliberate intimidation, or threats.\n\n * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of \"outing\" of any aspect of someone's identity without their consent.\n\n * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent.\n\n * Publishing of private communication that doesn't have to do with reporting harrassment.\n\n * Any of the above even when [presented as \"ironic\" or \"joking\"](https://en.wikipedia.org/wiki/Hipster_racism).\n\n * Any attempt to present \"reverse-ism\" versions of the above as violations. Examples of reverse-isms are \"reverse racism\", \"reverse sexism\", \"heterophobia\", and \"cisphobia\".\n\n * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are.\n\n * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something.\n\n * \"[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)\"\n\n * Other conduct which could reasonably be considered inappropriate in a professional or community setting.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 30, "score": 31881.622338999852 }, { "content": "#### The Nope Case\n\n\n\n> PepeTheFrog🐸: \"Hi, I am a literal actual nazi and I think white supremacists are quite fashionable.\"\n\n\n\n> Patt: \"NOOOOPE. OH NOPE NOPE.\"\n\n\n\n> Alex: \"JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE\"\n\n\n\n> KeeperOfCommitBits: \"👀 Nope. NOPE NOPE NOPE. 🔥\"\n\n\n\n> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct was generated (and then adapted) using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of\n\nConduct](https://wealljs.org/code-of-conduct), which is itself based on\n\n[Contributor Covenant](http://contributor-covenant.org), version 1.4, available\n\nat\n\n[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4),\n\nand the LGBTQ in Technology Slack [Code of\n\nConduct](http://lgbtq.technology/coc.html).\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 31, "score": 31881.506536373923 }, { "content": "# Code of Conduct\n\n\n\n## When Something Happens\n\n\n\nIf you see a Code of Conduct violation, follow these steps:\n\n\n\n1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits.\n\n2. That person should immediately stop the behavior and correct the issue.\n\n3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers).\n\n4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban.\n\n\n\nWhen reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation.\n\n\n\n**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples).\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 32, "score": 31881.34408844467 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members.\n\n\n\nDepending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members.\n\n\n\n### Other Community Standards\n\n\n\nAs a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/).\n\n\n\nWhile not directly under the umbrella of the Rust Project, it is a part of the Rust community and [its Code of Conduct](https://www.rust-lang.org/conduct.html) may be applicable in some instances.\n\n\n\nEnforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities.\n\n\n\n## Maintainer Enforcement Process\n\n\n\nOnce the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps.\n\n\n\n### Contacting Maintainers\n\n\n\nRefer to the GitHub repository for collaborator/maintainer information and email someone there.\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 33, "score": 31880.129776642083 }, { "content": "#[cfg(unix)]\n\n#[test]\n\nfn with_error() {\n\n let mut main = Command::cargo_bin(\"cargo-watch\")\n\n .unwrap()\n\n .stderr(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .args(&[\n\n \"--testing-only--once\",\n\n \"--no-gitignore\",\n\n \"--poll\",\n\n \"-w\",\n\n \"./tests/touchdata/\",\n\n \"-s\",\n\n \"echo with error\",\n\n \"-s\",\n\n \"false\",\n\n ])\n\n .spawn()\n\n .unwrap();\n\n\n\n sleep(Duration::from_millis(50));\n", "file_path": "tests/echo.rs", "rank": 34, "score": 26073.414122569295 }, { "content": " .multiple(true)\n\n .empty_values(false)\n\n .min_values(1)\n\n .number_of_values(1)\n\n .default_value(\".\")\n\n .help(\"Watch specific file(s) or folder(s)\"),\n\n )\n\n .after_help(footnote.as_str()),\n\n );\n\n\n\n // Allow invocation of cargo-watch with both `cargo-watch watch ARGS`\n\n // (as invoked by cargo) and `cargo-watch ARGS`.\n\n let mut args: Vec<String> = env::args().collect();\n\n args.insert(1, \"watch\".into());\n\n\n\n let matches = match app.get_matches_from_safe_borrow(args) {\n\n Ok(matches) => matches,\n\n Err(err) => {\n\n match err.kind {\n\n ErrorKind::HelpDisplayed => {\n", "file_path": "src/args.rs", "rank": 35, "score": 25778.05766462906 }, { "content": "use clap::{App, AppSettings, Arg, ArgMatches, ErrorKind, SubCommand};\n\nuse std::{env, process};\n\n\n", "file_path": "src/args.rs", "rank": 36, "score": 25776.31383421662 }, { "content": " .help(\"Reserved for workspace support\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"poll\")\n\n .long(\"poll\")\n\n .help(\"Force use of polling for file changes\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"postpone\")\n\n .long(\"postpone\")\n\n .help(\"Postpone first run until a file changes\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"quiet\")\n\n .short(\"q\")\n\n .long(\"quiet\")\n\n .help(\"Suppress output from cargo-watch itself\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"cmd:cargo\")\n", "file_path": "src/args.rs", "rank": 37, "score": 25774.87072285329 }, { "content": " .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n\n .usage(\"cargo watch [FLAGS] [OPTIONS]\")\n\n .help_message(\"Display this message\")\n\n .version_message(\"Display version information\")\n\n .arg(\n\n Arg::with_name(\"once\")\n\n .long(\"testing-only--once\")\n\n .hidden(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"clear\")\n\n .short(\"c\")\n\n .long(\"clear\")\n\n .help(\"Clear the screen before each run\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"debug\")\n\n .long(\"debug\")\n\n .help(\"Show debug output\"),\n\n )\n", "file_path": "src/args.rs", "rank": 38, "score": 25772.888259560805 }, { "content": " .number_of_values(1)\n\n .help(\"Ignore a glob/gitignore-style pattern\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"packages:one\")\n\n .short(\"p\")\n\n .long(\"package\")\n\n .takes_value(true)\n\n .value_name(\"spec\")\n\n .multiple(true)\n\n .empty_values(false)\n\n .min_values(1)\n\n .hidden(true)\n\n .help(\"Reserved for workspace support\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"watch\")\n\n .short(\"w\")\n\n .long(\"watch\")\n\n .takes_value(true)\n", "file_path": "src/args.rs", "rank": 39, "score": 25770.896501443553 }, { "content": " .arg(\n\n Arg::with_name(\"ignore-nothing\")\n\n .long(\"ignore-nothing\")\n\n .help(\"Ignore nothing, not even target/ and .git/\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"no-gitignore\")\n\n .long(\"no-gitignore\")\n\n .help(\"Don’t use .gitignore files\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"no-restart\")\n\n .long(\"no-restart\")\n\n .help(\"Don’t restart command while it’s still running\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"packages:all\")\n\n .long(\"all\")\n\n .conflicts_with(\"packages:one\")\n\n .hidden(true)\n", "file_path": "src/args.rs", "rank": 40, "score": 25770.505979650774 }, { "content": " println!(\"{}\", err);\n\n process::exit(0);\n\n }\n\n\n\n ErrorKind::VersionDisplayed => {\n\n // Unlike HelpDisplayed, VersionDisplayed emits the output\n\n // by itself (clap-rs/clap#1390). It also does so without a\n\n // trailing newline, so we print one ourselves.\n\n println!();\n\n process::exit(0);\n\n }\n\n\n\n _ => app.get_matches(),\n\n }\n\n }\n\n };\n\n\n\n matches.subcommand.unwrap().matches\n\n}\n", "file_path": "src/args.rs", "rank": 41, "score": 25770.3671676793 }, { "content": " .short(\"x\")\n\n .long(\"exec\")\n\n .takes_value(true)\n\n .value_name(\"cmd\")\n\n .multiple(true)\n\n .empty_values(false)\n\n .min_values(1)\n\n .number_of_values(1)\n\n .help(\"Cargo command(s) to execute on changes [default: check]\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"cmd:shell\")\n\n .short(\"s\")\n\n .long(\"shell\")\n\n .takes_value(true)\n\n .value_name(\"cmd\")\n\n .multiple(true)\n\n .empty_values(false)\n\n .min_values(1)\n\n .number_of_values(1)\n", "file_path": "src/args.rs", "rank": 42, "score": 25769.66813481263 }, { "content": " .help(\"Shell command(s) to execute on changes\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"delay\")\n\n .short(\"d\")\n\n .long(\"delay\")\n\n .takes_value(true)\n\n .empty_values(false)\n\n .default_value(\"0.5\")\n\n .help(\"File updates debounce delay in seconds\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"ignore\")\n\n .short(\"i\")\n\n .long(\"ignore\")\n\n .takes_value(true)\n\n .value_name(\"pattern\")\n\n .multiple(true)\n\n .empty_values(false)\n\n .min_values(1)\n", "file_path": "src/args.rs", "rank": 43, "score": 25768.34318953137 }, { "content": "//! Utilities for working with cargo and rust files\n\n\n\nuse std::env;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\n/// How many parent folders are searched for a `Cargo.toml`\n\nconst MAX_ANCESTORS: u32 = 10;\n\n\n\n/// Returns the closest ancestor path containing a `Cargo.toml`.\n\n///\n\n/// Returns `None` if no ancestor path contains a `Cargo.toml`, or if\n\n/// the limit of `MAX_ANCESTORS` ancestors has been reached.\n\n///\n\n/// TODO: #52 Parse toml to get to workspace root\n", "file_path": "src/cargo.rs", "rank": 44, "score": 23881.10575787579 }, { "content": "use watchexec::{\n\n error::Result,\n\n pathop::PathOp,\n\n run::{ExecHandler, Handler},\n\n Args,\n\n};\n\n\n\npub struct CwHandler {\n\n cmd: String,\n\n once: bool,\n\n quiet: bool,\n\n inner: ExecHandler,\n\n}\n\n\n\nimpl Handler for CwHandler {\n\n fn args(&self) -> Args {\n\n self.inner.args()\n\n }\n\n\n\n fn on_manual(&self) -> Result<bool> {\n", "file_path": "src/watch.rs", "rank": 45, "score": 23728.954811474552 }, { "content": " if self.once {\n\n Ok(true)\n\n } else {\n\n self.start();\n\n self.inner.on_manual()\n\n }\n\n }\n\n\n\n fn on_update(&self, ops: &[PathOp]) -> Result<bool> {\n\n self.start();\n\n self.inner.on_update(ops)\n\n }\n\n}\n\n\n\nimpl CwHandler {\n\n pub fn new(mut args: Args, quiet: bool) -> Result<Self> {\n\n let cmd = args.cmd.join(\" && \");\n\n let mut final_cmd = cmd.clone();\n\n if !quiet {\n\n #[cfg(unix)]\n", "file_path": "src/watch.rs", "rank": 46, "score": 23722.136253996458 }, { "content": " final_cmd.push_str(\"; echo [Finished running. Exit status: $?]\");\n\n #[cfg(windows)]\n\n final_cmd.push_str(\" & echo [Finished running. Exit status: %ERRORLEVEL%]\");\n\n #[cfg(not(any(unix, windows)))]\n\n final_cmd.push_str(\" ; echo [Finished running]\");\n\n // ^ could be wrong depending on the platform, to be fixed on demand\n\n }\n\n\n\n args.cmd = vec![final_cmd];\n\n\n\n Ok(Self {\n\n once: args.once,\n\n cmd,\n\n inner: ExecHandler::new(args)?,\n\n quiet,\n\n })\n\n }\n\n\n\n fn start(&self) {\n\n if !self.quiet {\n\n println!(\"[Running '{}']\", self.cmd);\n\n }\n\n }\n\n}\n", "file_path": "src/watch.rs", "rank": 47, "score": 23719.72912772339 }, { "content": "#[test]\n\nfn with_cargo() {\n\n let mut main = Command::new(\"cargo\")\n\n .stderr(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .args(&[\"watch\", \"--version\"])\n\n .spawn()\n\n .unwrap();\n\n\n\n if main.wait_timeout(Duration::from_secs(1)).unwrap().is_none() {\n\n main.kill().unwrap();\n\n }\n\n\n\n main.wait_with_output()\n\n .unwrap()\n\n .assert()\n\n .success()\n\n .stdout(is_match(r\"cargo-watch \\d+\\.\\d+\\.\\d+\\n\").unwrap());\n\n}\n\n\n", "file_path": "tests/exec.rs", "rank": 48, "score": 22933.70043032941 }, { "content": "#[test]\n\nfn without_cargo() {\n\n let mut main = Command::cargo_bin(\"cargo-watch\")\n\n .unwrap()\n\n .stderr(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .args(&[\"watch\", \"--version\"])\n\n .spawn()\n\n .unwrap();\n\n\n\n if main.wait_timeout(Duration::from_secs(1)).unwrap().is_none() {\n\n main.kill().unwrap();\n\n }\n\n\n\n main.wait_with_output()\n\n .unwrap()\n\n .assert()\n\n .success()\n\n .stdout(is_match(r\"cargo-watch \\d+\\.\\d+\\.\\d+\\n\").unwrap());\n\n}\n\n\n", "file_path": "tests/exec.rs", "rank": 49, "score": 22063.279456901066 }, { "content": "#[test]\n\nfn without_watch() {\n\n let mut main = Command::cargo_bin(\"cargo-watch\")\n\n .unwrap()\n\n .stderr(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .args(&[\"--version\"])\n\n .spawn()\n\n .unwrap();\n\n\n\n if main.wait_timeout(Duration::from_secs(1)).unwrap().is_none() {\n\n main.kill().unwrap();\n\n }\n\n\n\n main.wait_with_output()\n\n .unwrap()\n\n .assert()\n\n .success()\n\n .stdout(is_match(r\"cargo-watch \\d+\\.\\d+\\.\\d+\\n\").unwrap());\n\n}\n", "file_path": "tests/exec.rs", "rank": 50, "score": 21916.569541898905 }, { "content": "extern crate assert_cmd;\n\nextern crate cargo_watch;\n\nextern crate predicates;\n\nextern crate wait_timeout;\n\nextern crate watchexec;\n\n\n\nuse assert_cmd::prelude::*;\n\nuse predicates::str::is_match;\n\nuse std::{\n\n process::{Command, Stdio},\n\n time::Duration,\n\n};\n\nuse wait_timeout::ChildExt;\n\n\n\n#[test]\n", "file_path": "tests/exec.rs", "rank": 52, "score": 15.913362977949527 }, { "content": "extern crate assert_cmd;\n\nextern crate cargo_watch;\n\n#[macro_use]\n\nextern crate insta;\n\nextern crate wait_timeout;\n\nextern crate watchexec;\n\n\n\nuse assert_cmd::prelude::*;\n\nuse std::{\n\n fs::OpenOptions,\n\n io,\n\n path::PathBuf,\n\n process::{Command, Stdio},\n\n thread::sleep,\n\n time::Duration,\n\n};\n\nuse wait_timeout::ChildExt;\n\n\n", "file_path": "tests/echo.rs", "rank": 53, "score": 15.52257259642404 }, { "content": "//! Watch files in a Cargo project and compile it when they change\n\n#![forbid(unsafe_code, clippy::pedantic)]\n\n\n\nextern crate cargo_watch;\n\nextern crate watchexec;\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 14.527136378945944 }, { "content": " touch(4).unwrap();\n\n\n\n if main\n\n .wait_timeout(Duration::from_secs(30))\n\n .unwrap()\n\n .is_none()\n\n {\n\n main.kill().unwrap();\n\n }\n\n\n\n assert_snapshot_matches!(\"with_error.stderr\", std_to_string(&mut main.stderr));\n\n assert_snapshot_matches!(\"with_error.stdout\", std_to_string(&mut main.stdout));\n\n}\n", "file_path": "tests/echo.rs", "rank": 55, "score": 2.8829744584738446 } ]
Rust
fastpay_core/src/authority.rs
mahimna-fb/fastpay
413826688d365ad597acb002213704378e9b2efc
use crate::{base_types::*, committee::Committee, error::FastPayError, messages::*}; use std::{collections::BTreeMap, convert::TryInto}; #[cfg(test)] #[path = "unit_tests/authority_tests.rs"] mod authority_tests; #[derive(Eq, PartialEq, Debug)] pub struct AccountOffchainState { pub balance: Balance, pub next_sequence_number: SequenceNumber, pub pending_confirmation: Option<SignedTransferOrder>, pub confirmed_log: Vec<CertifiedTransferOrder>, pub synchronization_log: Vec<PrimarySynchronizationOrder>, pub received_log: Vec<CertifiedTransferOrder>, } pub struct AuthorityState { pub name: AuthorityName, pub committee: Committee, pub secret: SecretKey, pub accounts: BTreeMap<FastPayAddress, AccountOffchainState>, pub last_transaction_index: VersionNumber, pub shard_id: ShardId, pub number_of_shards: u32, } pub trait Authority { fn handle_transfer_order( &mut self, order: TransferOrder, ) -> Result<AccountInfoResponse, FastPayError>; fn handle_confirmation_order( &mut self, order: ConfirmationOrder, ) -> Result<(AccountInfoResponse, Option<CrossShardUpdate>), FastPayError>; fn handle_primary_synchronization_order( &mut self, order: PrimarySynchronizationOrder, ) -> Result<AccountInfoResponse, FastPayError>; fn handle_account_info_request( &self, request: AccountInfoRequest, ) -> Result<AccountInfoResponse, FastPayError>; fn handle_cross_shard_recipient_commit( &mut self, certificate: CertifiedTransferOrder, ) -> Result<(), FastPayError>; } impl Authority for AuthorityState { fn handle_transfer_order( &mut self, order: TransferOrder, ) -> Result<AccountInfoResponse, FastPayError> { fp_ensure!( self.in_shard(&order.transfer.sender), FastPayError::WrongShard ); order.check_signature()?; let transfer = &order.transfer; let sender = transfer.sender; fp_ensure!( transfer.sequence_number <= SequenceNumber::max(), FastPayError::InvalidSequenceNumber ); fp_ensure!( transfer.amount > Amount::zero(), FastPayError::IncorrectTransferAmount ); match self.accounts.get_mut(&sender) { None => fp_bail!(FastPayError::UnknownSenderAccount), Some(account) => { if let Some(pending_confirmation) = &account.pending_confirmation { fp_ensure!( &pending_confirmation.value.transfer == transfer, FastPayError::PreviousTransferMustBeConfirmedFirst { pending_confirmation: pending_confirmation.value.clone() } ); return Ok(account.make_account_info(sender)); } fp_ensure!( account.next_sequence_number == transfer.sequence_number, FastPayError::UnexpectedSequenceNumber ); fp_ensure!( account.balance >= transfer.amount.into(), FastPayError::InsufficientFunding { current_balance: account.balance } ); let signed_order = SignedTransferOrder::new(order, self.name, &self.secret); account.pending_confirmation = Some(signed_order); Ok(account.make_account_info(sender)) } } } fn handle_confirmation_order( &mut self, confirmation_order: ConfirmationOrder, ) -> Result<(AccountInfoResponse, Option<CrossShardUpdate>), FastPayError> { let certificate = confirmation_order.transfer_certificate; fp_ensure!( self.in_shard(&certificate.value.transfer.sender), FastPayError::WrongShard ); certificate.check(&self.committee)?; let transfer = certificate.value.transfer.clone(); let mut sender_account = self .accounts .entry(transfer.sender) .or_insert_with(AccountOffchainState::new); let mut sender_sequence_number = sender_account.next_sequence_number; let mut sender_balance = sender_account.balance; if sender_sequence_number < transfer.sequence_number { fp_bail!(FastPayError::MissingEalierConfirmations { current_sequence_number: sender_sequence_number }); } if sender_sequence_number > transfer.sequence_number { return Ok((sender_account.make_account_info(transfer.sender), None)); } sender_balance = sender_balance.try_sub(transfer.amount.into())?; sender_sequence_number = sender_sequence_number.increment()?; sender_account.balance = sender_balance; sender_account.next_sequence_number = sender_sequence_number; sender_account.pending_confirmation = None; sender_account.confirmed_log.push(certificate.clone()); let info = sender_account.make_account_info(transfer.sender); let recipient = match transfer.recipient { Address::FastPay(recipient) => recipient, Address::Primary(_) => { return Ok((info, None)); } }; if self.in_shard(&recipient) { let recipient_account = self .accounts .entry(recipient) .or_insert_with(AccountOffchainState::new); recipient_account.balance = recipient_account .balance .try_add(transfer.amount.into()) .unwrap_or_else(|_| Balance::max()); recipient_account.received_log.push(certificate); return Ok((info, None)); } let cross_shard = Some(CrossShardUpdate { shard_id: self.which_shard(&recipient), transfer_certificate: certificate, }); Ok((info, cross_shard)) } fn handle_cross_shard_recipient_commit( &mut self, certificate: CertifiedTransferOrder, ) -> Result<(), FastPayError> { let transfer = &certificate.value.transfer; let recipient = match transfer.recipient { Address::FastPay(recipient) => recipient, Address::Primary(_) => { fp_bail!(FastPayError::InvalidCrossShardUpdate); } }; fp_ensure!(self.in_shard(&recipient), FastPayError::WrongShard); let recipient_account = self .accounts .entry(recipient) .or_insert_with(AccountOffchainState::new); recipient_account.balance = recipient_account .balance .try_add(transfer.amount.into()) .unwrap_or_else(|_| Balance::max()); recipient_account.received_log.push(certificate); Ok(()) } fn handle_primary_synchronization_order( &mut self, order: PrimarySynchronizationOrder, ) -> Result<AccountInfoResponse, FastPayError> { let recipient = order.recipient; fp_ensure!(self.in_shard(&recipient), FastPayError::WrongShard); let recipient_account = self .accounts .entry(recipient) .or_insert_with(AccountOffchainState::new); if order.transaction_index <= self.last_transaction_index { return Ok(recipient_account.make_account_info(recipient)); } fp_ensure!( order.transaction_index == self.last_transaction_index.increment()?, FastPayError::UnexpectedTransactionIndex ); let recipient_balance = recipient_account.balance.try_add(order.amount.into())?; let last_transaction_index = self.last_transaction_index.increment()?; recipient_account.balance = recipient_balance; recipient_account.synchronization_log.push(order); self.last_transaction_index = last_transaction_index; Ok(recipient_account.make_account_info(recipient)) } fn handle_account_info_request( &self, request: AccountInfoRequest, ) -> Result<AccountInfoResponse, FastPayError> { fp_ensure!(self.in_shard(&request.sender), FastPayError::WrongShard); let account = self.account_state(&request.sender)?; let mut response = account.make_account_info(request.sender); if let Some(seq) = request.request_sequence_number { if let Some(cert) = account.confirmed_log.get(usize::from(seq)) { response.requested_certificate = Some(cert.clone()); } else { fp_bail!(FastPayError::CertificateNotfound) } } if let Some(idx) = request.request_received_transfers_excluding_first_nth { response.requested_received_transfers = account.received_log[idx..].to_vec(); } Ok(response) } } impl Default for AccountOffchainState { fn default() -> Self { Self { balance: Balance::zero(), next_sequence_number: SequenceNumber::new(), pending_confirmation: None, confirmed_log: Vec::new(), synchronization_log: Vec::new(), received_log: Vec::new(), } } } impl AccountOffchainState { pub fn new() -> Self { Self::default() } fn make_account_info(&self, sender: FastPayAddress) -> AccountInfoResponse { AccountInfoResponse { sender, balance: self.balance, next_sequence_number: self.next_sequence_number, pending_confirmation: self.pending_confirmation.clone(), requested_certificate: None, requested_received_transfers: Vec::new(), } } #[cfg(test)] pub fn new_with_balance(balance: Balance, received_log: Vec<CertifiedTransferOrder>) -> Self { Self { balance, next_sequence_number: SequenceNumber::new(), pending_confirmation: None, confirmed_log: Vec::new(), synchronization_log: Vec::new(), received_log, } } } impl AuthorityState { pub fn new(committee: Committee, name: AuthorityName, secret: SecretKey) -> Self { AuthorityState { committee, name, secret, accounts: BTreeMap::new(), last_transaction_index: VersionNumber::new(), shard_id: 0, number_of_shards: 1, } } pub fn new_shard( committee: Committee, name: AuthorityName, secret: SecretKey, shard_id: u32, number_of_shards: u32, ) -> Self { AuthorityState { committee, name, secret, accounts: BTreeMap::new(), last_transaction_index: VersionNumber::new(), shard_id, number_of_shards, } } pub fn in_shard(&self, address: &FastPayAddress) -> bool { self.which_shard(address) == self.shard_id } pub fn get_shard(num_shards: u32, address: &FastPayAddress) -> u32 { const LAST_INTEGER_INDEX: usize = std::mem::size_of::<FastPayAddress>() - 4; u32::from_le_bytes(address.0[LAST_INTEGER_INDEX..].try_into().expect("4 bytes")) % num_shards } pub fn which_shard(&self, address: &FastPayAddress) -> u32 { Self::get_shard(self.number_of_shards, address) } fn account_state( &self, address: &FastPayAddress, ) -> Result<&AccountOffchainState, FastPayError> { self.accounts .get(address) .ok_or(FastPayError::UnknownSenderAccount) } #[cfg(test)] pub fn accounts_mut(&mut self) -> &mut BTreeMap<FastPayAddress, AccountOffchainState> { &mut self.accounts } }
use crate::{base_types::*, committee::Committee, error::FastPayError, messages::*}; use std::{collections::BTreeMap, convert::TryInto}; #[cfg(test)] #[path = "unit_tests/authority_tests.rs"] mod authority_tests; #[derive(Eq, PartialEq, Debug)] pub struct AccountOffchainState { pub balance: Balance, pub next_sequence_number: SequenceNumber, pub pending_confirmation: Option<SignedTransferOrder>, pub confirmed_log: Vec<CertifiedTransferOrder>, pub synchronization_log: Vec<PrimarySynchronizationOrder>, pub received_log: Vec<CertifiedTransferOrder>, } pub struct AuthorityState { pub name: AuthorityName, pub committee: Committee, pub secret: SecretKey, pub accounts: BTreeMap<FastPayAddress, AccountOffchainState>, pub last_transaction_index: VersionNumber, pub shard_id: ShardId, pub number_of_shards: u32, } pub trait Authority { fn handle_transfer_order( &mut self, order: TransferOrder, ) -> Result<AccountInfoResponse, FastPayError>; fn handle_confirmation_order( &mut self, order: ConfirmationOrder, ) -> Result<(AccountInfoResponse, Option<CrossShardUpdate>), FastPayError>; fn handle_primary_synchronization_order( &mut self, order: PrimarySynchronizationOrder, ) -> Result<AccountInfoResponse, FastPayError>; fn handle_account_info_request( &self, request: AccountInfoRequest, ) -> Result<AccountInfoResponse, FastPayError>; fn handle_cross_shard_recipient_commit( &mut self, certificate: CertifiedTransferOrder, ) -> Result<(), FastPayError>; } impl Authority for AuthorityState { fn handle_transfer_order( &mut self, order: TransferOrder, ) -> Result<AccountInfoResponse, FastPayError> { fp_ensure!( self.in_shard(&order.transfer.sender), FastPayError::WrongShard ); order.check_signature()?; let transfer = &order.transfer; let sender = transfer.sender; fp_ensure!( transfer.sequence_number <= SequenceNumber::max(), FastPayError::InvalidSequenceNumber ); fp_ensure!( transfer.amount > Amount::zero(), FastPayError::IncorrectTransferAmount ); match self.accounts.get_mut(&sender) { None => fp_bail!(FastPayError::UnknownSenderAccount), Some(account) => { if let Some(pending_confirmation) = &account.pending_confirmation { fp_ensure!( &pending_confirmation.value.transfer == transfer, FastPayError::PreviousTransferMustBeConfirmedFirst { pending_confirmation: pending_confirmation.value.clone() } ); return Ok(account.make_account_info(sender)); } fp_ensure!( account.next_sequence_number == transfer.sequence_number, FastPayError::UnexpectedSequenceNumber ); fp_ensure!( account.balance >= transfer.amount.into(), FastPayError::InsufficientFunding { current_balance: account.balance } ); let signed_order = SignedTransferOrder::new(order, self.name, &self.secret); account.pending_confirmation = Some(signed_order); Ok(account.make_account_info(sender)) } } } fn handle_confirmation_order( &mut self, confirmation_order: ConfirmationOrder, ) -> Result<(AccountInfoResponse, Option<CrossShardUpdate>), FastPayError> { let certificate = confirmation_order.transfer_certificate; fp_ensure!( self.in_shard(&certificate.value.transfer.sender), FastPayError::WrongShard ); certificate.check(&self.committee)?; let transfer = certificate.value.transfer.clone(); let mut sender_account = self .accounts .entry(transfer.sender) .or_insert_with(AccountOffchainState::new); let mut sender_sequence_number = sender_account.next_sequence_number; let mut sender_balance = sender_account.balance; if sender_sequence_number < transfer.sequence_number { fp_bail!(FastPayError::MissingEalierConfirmations { current_sequence_number: sender_sequence_number }); } if sender_sequence_number > transfer.sequence_number { return Ok((sender_account.make_account_info(transfer.sender), None)); } sender_balance = sender_balance.try_sub(transfer.amount.into())?; sender_sequence_number = sender_sequence_number.increment()?; sender_account.balance = sender_balance; sender_account.next_sequence_number = sender_sequence_number; sender_account.pending_confirmation = None; sender_account.confirmed_log.push(certificate.clone()); let info = sender_account.make_account_info(transfer.sender); let recipient = match transfer.recipient { Address::FastPay(recipient) => recipient, Address::Primary(_) => { return Ok((info, None)); } }; if self.in_shard(&recipient) { let recipient_account = self .accounts .entry(recipient) .or_insert_with(AccountOffchainState::new); recipient_account.balance = recipient_account .balance .try_add(transfer.amount.into()) .unwrap_or_else(|_| Balance::max()); recipient_account.received_log.push(certificate); return Ok((info, None)); } let cross_shard = Some(CrossShardUpdate { shard_id: self.which_shard(&recipient), transfer_certificate: certificate, }); Ok((info, cross_shard)) } fn handle_cross_shard_recipient_commit( &mut self,
requested_certificate: None, requested_received_transfers: Vec::new(), } } #[cfg(test)] pub fn new_with_balance(balance: Balance, received_log: Vec<CertifiedTransferOrder>) -> Self { Self { balance, next_sequence_number: SequenceNumber::new(), pending_confirmation: None, confirmed_log: Vec::new(), synchronization_log: Vec::new(), received_log, } } } impl AuthorityState { pub fn new(committee: Committee, name: AuthorityName, secret: SecretKey) -> Self { AuthorityState { committee, name, secret, accounts: BTreeMap::new(), last_transaction_index: VersionNumber::new(), shard_id: 0, number_of_shards: 1, } } pub fn new_shard( committee: Committee, name: AuthorityName, secret: SecretKey, shard_id: u32, number_of_shards: u32, ) -> Self { AuthorityState { committee, name, secret, accounts: BTreeMap::new(), last_transaction_index: VersionNumber::new(), shard_id, number_of_shards, } } pub fn in_shard(&self, address: &FastPayAddress) -> bool { self.which_shard(address) == self.shard_id } pub fn get_shard(num_shards: u32, address: &FastPayAddress) -> u32 { const LAST_INTEGER_INDEX: usize = std::mem::size_of::<FastPayAddress>() - 4; u32::from_le_bytes(address.0[LAST_INTEGER_INDEX..].try_into().expect("4 bytes")) % num_shards } pub fn which_shard(&self, address: &FastPayAddress) -> u32 { Self::get_shard(self.number_of_shards, address) } fn account_state( &self, address: &FastPayAddress, ) -> Result<&AccountOffchainState, FastPayError> { self.accounts .get(address) .ok_or(FastPayError::UnknownSenderAccount) } #[cfg(test)] pub fn accounts_mut(&mut self) -> &mut BTreeMap<FastPayAddress, AccountOffchainState> { &mut self.accounts } }
certificate: CertifiedTransferOrder, ) -> Result<(), FastPayError> { let transfer = &certificate.value.transfer; let recipient = match transfer.recipient { Address::FastPay(recipient) => recipient, Address::Primary(_) => { fp_bail!(FastPayError::InvalidCrossShardUpdate); } }; fp_ensure!(self.in_shard(&recipient), FastPayError::WrongShard); let recipient_account = self .accounts .entry(recipient) .or_insert_with(AccountOffchainState::new); recipient_account.balance = recipient_account .balance .try_add(transfer.amount.into()) .unwrap_or_else(|_| Balance::max()); recipient_account.received_log.push(certificate); Ok(()) } fn handle_primary_synchronization_order( &mut self, order: PrimarySynchronizationOrder, ) -> Result<AccountInfoResponse, FastPayError> { let recipient = order.recipient; fp_ensure!(self.in_shard(&recipient), FastPayError::WrongShard); let recipient_account = self .accounts .entry(recipient) .or_insert_with(AccountOffchainState::new); if order.transaction_index <= self.last_transaction_index { return Ok(recipient_account.make_account_info(recipient)); } fp_ensure!( order.transaction_index == self.last_transaction_index.increment()?, FastPayError::UnexpectedTransactionIndex ); let recipient_balance = recipient_account.balance.try_add(order.amount.into())?; let last_transaction_index = self.last_transaction_index.increment()?; recipient_account.balance = recipient_balance; recipient_account.synchronization_log.push(order); self.last_transaction_index = last_transaction_index; Ok(recipient_account.make_account_info(recipient)) } fn handle_account_info_request( &self, request: AccountInfoRequest, ) -> Result<AccountInfoResponse, FastPayError> { fp_ensure!(self.in_shard(&request.sender), FastPayError::WrongShard); let account = self.account_state(&request.sender)?; let mut response = account.make_account_info(request.sender); if let Some(seq) = request.request_sequence_number { if let Some(cert) = account.confirmed_log.get(usize::from(seq)) { response.requested_certificate = Some(cert.clone()); } else { fp_bail!(FastPayError::CertificateNotfound) } } if let Some(idx) = request.request_received_transfers_excluding_first_nth { response.requested_received_transfers = account.received_log[idx..].to_vec(); } Ok(response) } } impl Default for AccountOffchainState { fn default() -> Self { Self { balance: Balance::zero(), next_sequence_number: SequenceNumber::new(), pending_confirmation: None, confirmed_log: Vec::new(), synchronization_log: Vec::new(), received_log: Vec::new(), } } } impl AccountOffchainState { pub fn new() -> Self { Self::default() } fn make_account_info(&self, sender: FastPayAddress) -> AccountInfoResponse { AccountInfoResponse { sender, balance: self.balance, next_sequence_number: self.next_sequence_number, pending_confirmation: self.pending_confirmation.clone(),
random
[ { "content": "pub fn serialize_info_request(value: &AccountInfoRequest) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::InfoReq(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 0, "score": 194362.98059485495 }, { "content": "pub fn serialize_transfer_order_into<W>(\n\n writer: W,\n\n value: &TransferOrder,\n\n) -> Result<(), failure::Error>\n\nwhere\n\n W: std::io::Write,\n\n{\n\n serialize_into(writer, &ShallowSerializedMessage::Order(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 1, "score": 157893.60208946266 }, { "content": "pub fn serialize_transfer_order(value: &TransferOrder) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::Order(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 3, "score": 154285.15679357952 }, { "content": "#[test]\n\nfn test_handle_transfer_order_unknown_sender() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(5));\n\n let (unknown_address, unknown_key) = get_key_pair();\n\n\n\n let mut unknown_sender_transfer = transfer_order.transfer;\n\n unknown_sender_transfer.sender = unknown_address;\n\n let unknown_sender_transfer_order = TransferOrder::new(unknown_sender_transfer, &unknown_key);\n\n assert!(authority_state\n\n .handle_transfer_order(unknown_sender_transfer_order)\n\n .is_err());\n\n assert!(authority_state\n\n .accounts\n\n .get(&sender)\n\n .unwrap()\n\n .pending_confirmation\n\n .is_none());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 4, "score": 153592.5007522364 }, { "content": "#[test]\n\nfn test_handle_transfer_order_exceed_balance() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(1000));\n\n assert!(authority_state\n\n .handle_transfer_order(transfer_order)\n\n .is_err());\n\n assert!(authority_state\n\n .accounts\n\n .get(&sender)\n\n .unwrap()\n\n .pending_confirmation\n\n .is_none());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 5, "score": 153527.35173890644 }, { "content": "pub fn serialize_vote_into<W>(writer: W, value: &SignedTransferOrder) -> Result<(), failure::Error>\n\nwhere\n\n W: std::io::Write,\n\n{\n\n serialize_into(writer, &ShallowSerializedMessage::Vote(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 6, "score": 151376.43877805182 }, { "content": "/// The underlying data-fetching mechanism to be provided by the user.\n\npub trait Requester {\n\n type Key: std::cmp::Ord + Send + Sync + Clone + 'static;\n\n type Value: std::fmt::Debug + Send + Clone + 'static;\n\n\n\n /// Request the value corresponding to the given key.\n\n fn query(&mut self, key: Self::Key) -> future::BoxFuture<Self::Value>;\n\n}\n\n\n\n/// Channel for using code to send requests and stop the downloader task.\n\n#[derive(Clone)]\n\npub struct DownloadHandle<K, V>(mpsc::UnboundedSender<DownloadCommand<K, V>>);\n\n\n", "file_path": "fastpay_core/src/downloader.rs", "rank": 7, "score": 147933.240499813 }, { "content": "/// The handler required to create a service.\n\npub trait MessageHandler {\n\n fn handle_message<'a>(&'a mut self, buffer: &'a [u8])\n\n -> future::BoxFuture<'a, Option<Vec<u8>>>;\n\n}\n\n\n\n/// The result of spawning a server is oneshot channel to kill it and a handle to track completion.\n\npub struct SpawnedServer {\n\n complete: futures::channel::oneshot::Sender<()>,\n\n handle: tokio::task::JoinHandle<Result<(), std::io::Error>>,\n\n}\n\n\n\nimpl SpawnedServer {\n\n pub async fn join(self) -> Result<(), std::io::Error> {\n\n // Note that dropping `self.complete` would terminate the server.\n\n self.handle.await??;\n\n Ok(())\n\n }\n\n\n\n pub async fn kill(self) -> Result<(), std::io::Error> {\n\n self.complete.send(()).unwrap();\n", "file_path": "fastpay/src/transport.rs", "rank": 8, "score": 147642.62443053542 }, { "content": "pub fn deserialize_message<R>(reader: R) -> Result<SerializedMessage, failure::Error>\n\nwhere\n\n R: std::io::Read,\n\n{\n\n bincode::deserialize_from(reader).map_err(|err| format_err!(\"{}\", err))\n\n}\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 9, "score": 145995.2734671379 }, { "content": "pub trait AuthorityClient {\n\n /// Initiate a new transfer to a FastPay or Primary account.\n\n fn handle_transfer_order(\n\n &mut self,\n\n order: TransferOrder,\n\n ) -> AsyncResult<AccountInfoResponse, FastPayError>;\n\n\n\n /// Confirm a transfer to a FastPay or Primary account.\n\n fn handle_confirmation_order(\n\n &mut self,\n\n order: ConfirmationOrder,\n\n ) -> AsyncResult<AccountInfoResponse, FastPayError>;\n\n\n\n /// Handle information requests for this account.\n\n fn handle_account_info_request(\n\n &mut self,\n\n request: AccountInfoRequest,\n\n ) -> AsyncResult<AccountInfoResponse, FastPayError>;\n\n}\n\n\n", "file_path": "fastpay_core/src/client.rs", "rank": 10, "score": 143749.28027373744 }, { "content": "pub fn serialize_info_response(value: &AccountInfoResponse) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::InfoResp(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 11, "score": 141705.35152617618 }, { "content": "#[cfg(test)]\n\nfn init_transfer_order(\n\n sender: FastPayAddress,\n\n secret: &SecretKey,\n\n recipient: Address,\n\n amount: Amount,\n\n) -> TransferOrder {\n\n let transfer = Transfer {\n\n sender,\n\n recipient,\n\n amount,\n\n sequence_number: SequenceNumber::new(),\n\n user_data: UserData::default(),\n\n };\n\n TransferOrder::new(transfer, &secret)\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 12, "score": 137332.08736607034 }, { "content": "pub fn serialize_vote(value: &SignedTransferOrder) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::Vote(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 13, "score": 133899.66218099496 }, { "content": "pub fn serialize_cert(value: &CertifiedTransferOrder) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::Cert(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 14, "score": 133899.66218099496 }, { "content": "#[cfg(test)]\n\nfn init_certified_transfer_order(\n\n sender: FastPayAddress,\n\n secret: &SecretKey,\n\n recipient: Address,\n\n amount: Amount,\n\n authority_state: &AuthorityState,\n\n) -> CertifiedTransferOrder {\n\n let transfer_order = init_transfer_order(sender, secret, recipient, amount);\n\n let vote = SignedTransferOrder::new(\n\n transfer_order.clone(),\n\n authority_state.name,\n\n &authority_state.secret,\n\n );\n\n let mut builder =\n\n SignatureAggregator::try_new(transfer_order, &authority_state.committee).unwrap();\n\n builder\n\n .append(vote.authority, vote.signature)\n\n .unwrap()\n\n .unwrap()\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 15, "score": 133879.92879010906 }, { "content": "pub fn get_key_pair() -> (FastPayAddress, SecretKey) {\n\n let mut csprng = OsRng;\n\n let keypair = dalek::Keypair::generate(&mut csprng);\n\n (\n\n EdPublicKeyBytes(keypair.public.to_bytes()),\n\n SecretKey(keypair.to_bytes()),\n\n )\n\n}\n\n\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 16, "score": 133016.43165214764 }, { "content": "pub fn serialize_cross_shard(value: &CertifiedTransferOrder) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::CrossShard(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 17, "score": 131006.42165221006 }, { "content": "#[test]\n\nfn test_handle_transfer_order_ok() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(5));\n\n\n\n let account_info = authority_state\n\n .handle_transfer_order(transfer_order)\n\n .unwrap();\n\n let pending_confirmation = authority_state\n\n .accounts\n\n .get(&sender)\n\n .unwrap()\n\n .pending_confirmation\n\n .clone()\n\n .unwrap();\n\n assert_eq!(\n\n account_info.pending_confirmation.unwrap(),\n\n pending_confirmation\n\n );\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 18, "score": 130634.95038400369 }, { "content": "#[cfg(test)]\n\npub fn dbg_addr(name: u8) -> FastPayAddress {\n\n let addr = [name; dalek::PUBLIC_KEY_LENGTH];\n\n EdPublicKeyBytes(addr)\n\n}\n\n\n\n// TODO: Remove Eq, PartialEq, Ord, PartialOrd and Hash from signatures.\n\n#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Serialize, Deserialize)]\n\npub struct Signature {\n\n pub part1: [u8; dalek::SIGNATURE_LENGTH / 2],\n\n pub part2: [u8; dalek::SIGNATURE_LENGTH / 2],\n\n}\n\n\n\n// Zero the secret key when unallocating.\n\nimpl Drop for SecretKey {\n\n fn drop(&mut self) {\n\n for i in 0..dalek::KEYPAIR_LENGTH {\n\n self.0[i] = 0;\n\n }\n\n }\n\n}\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 19, "score": 129913.04456752786 }, { "content": "#[test]\n\nfn test_handle_confirmation_order_unknown_sender() {\n\n let recipient = dbg_addr(2);\n\n let (sender, sender_key) = get_key_pair();\n\n let mut authority_state = init_state();\n\n let certified_transfer_order = init_certified_transfer_order(\n\n sender,\n\n &sender_key,\n\n Address::FastPay(recipient),\n\n Amount::from(5),\n\n &authority_state,\n\n );\n\n\n\n assert!(authority_state\n\n .handle_confirmation_order(ConfirmationOrder::new(certified_transfer_order))\n\n .is_ok());\n\n assert!(authority_state.accounts.get(&recipient).is_some());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 20, "score": 127810.01687466819 }, { "content": "#[test]\n\nfn test_handle_confirmation_order_exceed_balance() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = dbg_addr(2);\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n\n\n let certified_transfer_order = init_certified_transfer_order(\n\n sender,\n\n &sender_key,\n\n Address::FastPay(recipient),\n\n Amount::from(1000),\n\n &authority_state,\n\n );\n\n assert!(authority_state\n\n .handle_confirmation_order(ConfirmationOrder::new(certified_transfer_order))\n\n .is_ok());\n\n let new_account = authority_state.accounts.get(&sender).unwrap();\n\n assert_eq!(Balance::from(-995), new_account.balance);\n\n assert_eq!(SequenceNumber::from(1), new_account.next_sequence_number);\n\n assert_eq!(new_account.confirmed_log.len(), 1);\n\n assert!(authority_state.accounts.get(&recipient).is_some());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 21, "score": 127744.86786133825 }, { "content": "#[test]\n\nfn test_handle_transfer_order_zero_amount() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(5));\n\n\n\n // test transfer non-positive amount\n\n let mut zero_amount_transfer = transfer_order.transfer;\n\n zero_amount_transfer.amount = Amount::zero();\n\n let zero_amount_transfer_order = TransferOrder::new(zero_amount_transfer, &sender_key);\n\n assert!(authority_state\n\n .handle_transfer_order(zero_amount_transfer_order)\n\n .is_err());\n\n assert!(authority_state\n\n .accounts\n\n .get(&sender)\n\n .unwrap()\n\n .pending_confirmation\n\n .is_none());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 22, "score": 127579.04472782314 }, { "content": "#[test]\n\nfn test_handle_transfer_order_bad_signature() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(5));\n\n let (_unknown_address, unknown_key) = get_key_pair();\n\n let mut bad_signature_transfer_order = transfer_order.clone();\n\n bad_signature_transfer_order.signature = Signature::new(&transfer_order.transfer, &unknown_key);\n\n assert!(authority_state\n\n .handle_transfer_order(bad_signature_transfer_order)\n\n .is_err());\n\n assert!(authority_state\n\n .accounts\n\n .get(&sender)\n\n .unwrap()\n\n .pending_confirmation\n\n .is_none());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 23, "score": 127579.04472782314 }, { "content": "#[test]\n\nfn test_handle_transfer_order_double_spend() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(5));\n\n\n\n let signed_order = authority_state\n\n .handle_transfer_order(transfer_order.clone())\n\n .unwrap();\n\n let double_spend_signed_order = authority_state\n\n .handle_transfer_order(transfer_order)\n\n .unwrap();\n\n assert_eq!(signed_order, double_spend_signed_order);\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 24, "score": 127579.04472782314 }, { "content": "#[test]\n\nfn test_receiving_unconfirmed_transfer_with_lagging_sender_balances() {\n\n let mut rt = Runtime::new().unwrap();\n\n let (mut authority_clients, committee) = init_local_authorities(4);\n\n let mut client0 = make_client(authority_clients.clone(), committee.clone());\n\n let mut client1 = make_client(authority_clients.clone(), committee.clone());\n\n let mut client2 = make_client(authority_clients.clone(), committee);\n\n fund_account(&mut authority_clients, client0.address, vec![2, 3, 4, 4]);\n\n // not updating client balances\n\n\n\n // transferring funds from client0 to client1.\n\n // confirming to a quorum of node only at the end.\n\n rt.block_on(async {\n\n client0\n\n .transfer_to_fastpay_unsafe_unconfirmed(\n\n Amount::from(1),\n\n client1.address,\n\n UserData::default(),\n\n )\n\n .await\n\n .unwrap();\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 25, "score": 125491.87557924891 }, { "content": "#[test]\n\nfn test_handle_confirmation_order_receiver_equal_sender() {\n\n let (address, key) = get_key_pair();\n\n let mut authority_state = init_state_with_account(address, Balance::from(1));\n\n\n\n let certified_transfer_order = init_certified_transfer_order(\n\n address,\n\n &key,\n\n Address::FastPay(address),\n\n Amount::from(10),\n\n &authority_state,\n\n );\n\n assert!(authority_state\n\n .handle_confirmation_order(ConfirmationOrder::new(certified_transfer_order))\n\n .is_ok());\n\n let account = authority_state.accounts.get(&address).unwrap();\n\n assert_eq!(Balance::from(1), account.balance);\n\n assert_eq!(SequenceNumber::from(1), account.next_sequence_number);\n\n assert_eq!(account.confirmed_log.len(), 1);\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 26, "score": 124920.58846230656 }, { "content": "#[test]\n\nfn test_handle_confirmation_order_receiver_balance_overflow() {\n\n let (sender, sender_key) = get_key_pair();\n\n let (recipient, _) = get_key_pair();\n\n let mut authority_state = init_state_with_accounts(vec![\n\n (sender, Balance::from(1)),\n\n (recipient, Balance::max()),\n\n ]);\n\n\n\n let certified_transfer_order = init_certified_transfer_order(\n\n sender,\n\n &sender_key,\n\n Address::FastPay(recipient),\n\n Amount::from(1),\n\n &authority_state,\n\n );\n\n assert!(authority_state\n\n .handle_confirmation_order(ConfirmationOrder::new(certified_transfer_order))\n\n .is_ok());\n\n let new_sender_account = authority_state.accounts.get(&sender).unwrap();\n\n assert_eq!(Balance::from(0), new_sender_account.balance);\n\n assert_eq!(\n\n SequenceNumber::from(1),\n\n new_sender_account.next_sequence_number\n\n );\n\n assert_eq!(new_sender_account.confirmed_log.len(), 1);\n\n let new_recipient_account = authority_state.accounts.get(&recipient).unwrap();\n\n assert_eq!(Balance::max(), new_recipient_account.balance);\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 27, "score": 124857.28371437336 }, { "content": "#[test]\n\nfn test_handle_transfer_order_bad_sequence_number() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = Address::FastPay(dbg_addr(2));\n\n let authority_state = init_state_with_account(sender, Balance::from(5));\n\n let transfer_order = init_transfer_order(sender, &sender_key, recipient, Amount::from(5));\n\n\n\n let mut sequence_number_state = authority_state;\n\n let sequence_number_state_sender_account =\n\n sequence_number_state.accounts.get_mut(&sender).unwrap();\n\n sequence_number_state_sender_account.next_sequence_number =\n\n sequence_number_state_sender_account\n\n .next_sequence_number\n\n .increment()\n\n .unwrap();\n\n assert!(sequence_number_state\n\n .handle_transfer_order(transfer_order)\n\n .is_err());\n\n assert!(sequence_number_state\n\n .accounts\n\n .get(&sender)\n\n .unwrap()\n\n .pending_confirmation\n\n .is_none());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 28, "score": 124696.15477116886 }, { "content": "#[cfg(test)]\n\nfn init_state_with_account(address: FastPayAddress, balance: Balance) -> AuthorityState {\n\n init_state_with_accounts(std::iter::once((address, balance)))\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 29, "score": 124511.14023599261 }, { "content": "#[cfg(test)]\n\nfn init_contract() -> (FastPaySmartContractState, AuthorityName, SecretKey) {\n\n let (authority_address, authority_key) = get_key_pair();\n\n let mut authorities = BTreeMap::new();\n\n authorities.insert(\n\n /* address */ authority_address,\n\n /* voting right */ 1,\n\n );\n\n let committee = Committee::new(authorities);\n\n (\n\n FastPaySmartContractState::new(committee),\n\n authority_address,\n\n authority_key,\n\n )\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/fastpay_smart_contract_tests.rs", "rank": 30, "score": 123562.683706282 }, { "content": "#[derive(Clone)]\n\nstruct CertificateRequester<A> {\n\n committee: Committee,\n\n authority_clients: Vec<A>,\n\n sender: FastPayAddress,\n\n}\n\n\n\nimpl<A> CertificateRequester<A> {\n\n fn new(committee: Committee, authority_clients: Vec<A>, sender: FastPayAddress) -> Self {\n\n Self {\n\n committee,\n\n authority_clients,\n\n sender,\n\n }\n\n }\n\n}\n\n\n\nimpl<A> Requester for CertificateRequester<A>\n\nwhere\n\n A: AuthorityClient + Send + Sync + 'static + Clone,\n\n{\n", "file_path": "fastpay_core/src/client.rs", "rank": 31, "score": 123220.72010583986 }, { "content": "pub fn serialize_message(msg: &SerializedMessage) -> Vec<u8> {\n\n serialize(msg)\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 32, "score": 120601.69925994126 }, { "content": "/// Make one transfer order per account, up to `max_orders` transfers.\n\nfn make_benchmark_transfer_orders(\n\n accounts_config: &mut AccountsConfig,\n\n max_orders: usize,\n\n) -> (Vec<TransferOrder>, Vec<(FastPayAddress, Bytes)>) {\n\n let mut orders = Vec::new();\n\n let mut serialized_orders = Vec::new();\n\n // TODO: deterministic sequence of orders to recover from interrupted benchmarks.\n\n let mut next_recipient = get_key_pair().0;\n\n for account in accounts_config.accounts_mut() {\n\n let transfer = Transfer {\n\n sender: account.address,\n\n recipient: Address::FastPay(next_recipient),\n\n amount: Amount::from(1),\n\n sequence_number: account.next_sequence_number,\n\n user_data: UserData::default(),\n\n };\n\n debug!(\"Preparing transfer order: {:?}\", transfer);\n\n account.next_sequence_number = account.next_sequence_number.increment().unwrap();\n\n next_recipient = account.address;\n\n let order = TransferOrder::new(transfer.clone(), &account.key);\n\n orders.push(order.clone());\n\n let serialized_order = serialize_transfer_order(&order);\n\n serialized_orders.push((account.address, serialized_order.into()));\n\n if serialized_orders.len() >= max_orders {\n\n break;\n\n }\n\n }\n\n (orders, serialized_orders)\n\n}\n\n\n", "file_path": "fastpay/src/client.rs", "rank": 33, "score": 118390.0622811185 }, { "content": "#[cfg(test)]\n\nfn init_primary_synchronization_order(recipient: FastPayAddress) -> PrimarySynchronizationOrder {\n\n let mut transaction_index = VersionNumber::new();\n\n transaction_index = transaction_index.increment().unwrap();\n\n PrimarySynchronizationOrder {\n\n recipient,\n\n amount: Amount::from(5),\n\n transaction_index,\n\n }\n\n}\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 34, "score": 117298.5403015507 }, { "content": "// Operations are considered successful when they successfully reach a quorum of authorities.\n\npub trait Client {\n\n /// Send money to a FastPay account.\n\n fn transfer_to_fastpay(\n\n &mut self,\n\n amount: Amount,\n\n recipient: FastPayAddress,\n\n user_data: UserData,\n\n ) -> AsyncResult<CertifiedTransferOrder, failure::Error>;\n\n\n\n /// Send money to a Primary account.\n\n fn transfer_to_primary(\n\n &mut self,\n\n amount: Amount,\n\n recipient: PrimaryAddress,\n\n user_data: UserData,\n\n ) -> AsyncResult<CertifiedTransferOrder, failure::Error>;\n\n\n\n /// Receive money from FastPay.\n\n fn receive_from_fastpay(\n\n &mut self,\n", "file_path": "fastpay_core/src/client.rs", "rank": 35, "score": 115273.27480709559 }, { "content": "#[test]\n\nfn test_account_state_unknown_account() {\n\n let sender = dbg_addr(1);\n\n let unknown_address = dbg_addr(99);\n\n let authority_state = init_state_with_account(sender, Balance::from(5));\n\n assert!(authority_state.account_state(&unknown_address).is_err());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 36, "score": 114553.32958012784 }, { "content": "pub trait Digestible {\n\n fn digest(&self) -> [u8; 32];\n\n}\n\n\n\nimpl Digestible for [u8; 5] {\n\n fn digest(self: &[u8; 5]) -> [u8; 32] {\n\n let mut h = dalek::Sha512::new();\n\n let mut hash = [0u8; 64];\n\n let mut digest = [0u8; 32];\n\n h.update(&self);\n\n hash.copy_from_slice(h.finalize().as_slice());\n\n digest.copy_from_slice(&hash[..32]);\n\n digest\n\n }\n\n}\n\n\n\nimpl Signature {\n\n pub fn to_array(&self) -> [u8; 64] {\n\n let mut sig: [u8; 64] = [0; 64];\n\n sig[0..32].clone_from_slice(&self.part1);\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 37, "score": 112975.84464491416 }, { "content": "#[test]\n\nfn test_certificates() {\n\n let (a1, sec1) = get_key_pair();\n\n let (a2, sec2) = get_key_pair();\n\n let (a3, sec3) = get_key_pair();\n\n\n\n let mut authorities = BTreeMap::new();\n\n authorities.insert(/* address */ a1, /* voting right */ 1);\n\n authorities.insert(/* address */ a2, /* voting right */ 1);\n\n let committee = Committee::new(authorities);\n\n\n\n let transfer = Transfer {\n\n sender: a1,\n\n recipient: Address::FastPay(a2),\n\n amount: Amount::from(1),\n\n sequence_number: SequenceNumber::new(),\n\n user_data: UserData::default(),\n\n };\n\n let order = TransferOrder::new(transfer.clone(), &sec1);\n\n let bad_order = TransferOrder::new(transfer, &sec2);\n\n\n", "file_path": "fastpay_core/src/unit_tests/messages_tests.rs", "rank": 38, "score": 112555.10147777494 }, { "content": "/// Try to make certificates from orders and server configs\n\nfn make_benchmark_certificates_from_orders_and_server_configs(\n\n orders: Vec<TransferOrder>,\n\n server_config: Vec<&str>,\n\n) -> Vec<(FastPayAddress, Bytes)> {\n\n let mut keys = Vec::new();\n\n for file in server_config {\n\n let server_config = AuthorityServerConfig::read(file).expect(\"Fail to read server config\");\n\n keys.push((server_config.authority.address, server_config.key));\n\n }\n\n let mut serialized_certificates = Vec::new();\n\n for order in orders {\n\n let mut certificate = CertifiedTransferOrder {\n\n value: order.clone(),\n\n signatures: Vec::new(),\n\n };\n\n let committee = Committee {\n\n voting_rights: keys.iter().map(|(k, _)| (*k, 1)).collect(),\n\n total_votes: keys.len(),\n\n };\n\n for i in 0..committee.quorum_threshold() {\n\n let (pubx, secx) = keys.get(i).unwrap();\n\n let sig = Signature::new(&certificate.value, secx);\n\n certificate.signatures.push((*pubx, sig));\n\n }\n\n let serialized_certificate = serialize_cert(&certificate);\n\n serialized_certificates.push((order.transfer.sender, serialized_certificate.into()));\n\n }\n\n serialized_certificates\n\n}\n\n\n", "file_path": "fastpay/src/client.rs", "rank": 39, "score": 112537.19933158555 }, { "content": "pub fn decode_address(s: &str) -> Result<EdPublicKeyBytes, failure::Error> {\n\n let value = base64::decode(s)?;\n\n let mut address = [0u8; dalek::PUBLIC_KEY_LENGTH];\n\n address.copy_from_slice(&value[..dalek::PUBLIC_KEY_LENGTH]);\n\n Ok(EdPublicKeyBytes(address))\n\n}\n\n\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 40, "score": 112269.51660529662 }, { "content": "#[test]\n\nfn test_info_request() {\n\n let req1 = AccountInfoRequest {\n\n sender: dbg_addr(0x20),\n\n request_sequence_number: None,\n\n request_received_transfers_excluding_first_nth: None,\n\n };\n\n let req2 = AccountInfoRequest {\n\n sender: dbg_addr(0x20),\n\n request_sequence_number: Some(SequenceNumber::from(129)),\n\n request_received_transfers_excluding_first_nth: None,\n\n };\n\n\n\n let buf1 = serialize_info_request(&req1);\n\n let buf2 = serialize_info_request(&req2);\n\n\n\n let result1 = deserialize_message(buf1.as_slice());\n\n let result2 = deserialize_message(buf2.as_slice());\n\n assert!(result1.is_ok());\n\n assert!(result2.is_ok());\n\n\n", "file_path": "fastpay_core/src/unit_tests/serialize_tests.rs", "rank": 41, "score": 110001.49663798866 }, { "content": "#[cfg(test)]\n\nfn init_state_with_accounts<I: IntoIterator<Item = (FastPayAddress, Balance)>>(\n\n balances: I,\n\n) -> AuthorityState {\n\n let mut state = init_state();\n\n for (address, balance) in balances {\n\n let account = state\n\n .accounts\n\n .entry(address)\n\n .or_insert_with(AccountOffchainState::new);\n\n account.balance = balance;\n\n }\n\n state\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 42, "score": 109064.62808752991 }, { "content": "/// How to send and obtain data packets over an \"active socket\".\n\npub trait DataStream: Send {\n\n fn write_data<'a>(\n\n &'a mut self,\n\n buffer: &'a [u8],\n\n ) -> future::BoxFuture<'a, Result<(), std::io::Error>>;\n\n fn read_data(&mut self) -> future::BoxFuture<Result<Vec<u8>, std::io::Error>>;\n\n}\n\n\n", "file_path": "fastpay/src/transport.rs", "rank": 43, "score": 107414.47643578096 }, { "content": "#[test]\n\nfn test_account_state_ok() {\n\n let sender = dbg_addr(1);\n\n let authority_state = init_state_with_account(sender, Balance::from(5));\n\n assert_eq!(\n\n authority_state.accounts.get(&sender).unwrap(),\n\n authority_state.account_state(&sender).unwrap()\n\n );\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 44, "score": 106737.39241712165 }, { "content": "/// A pool of (outgoing) data streams.\n\npub trait DataStreamPool: Send {\n\n fn send_data_to<'a>(\n\n &'a mut self,\n\n buffer: &'a [u8],\n\n address: &'a str,\n\n ) -> future::BoxFuture<'a, Result<(), io::Error>>;\n\n}\n\n\n", "file_path": "fastpay/src/transport.rs", "rank": 45, "score": 105277.25146172913 }, { "content": "pub trait FastPaySmartContract {\n\n /// Initiate a transfer from Primary to FastPay.\n\n fn handle_funding_transaction(\n\n &mut self,\n\n transaction: FundingTransaction,\n\n ) -> Result<(), failure::Error>;\n\n\n\n /// Finalize a transfer from FastPay to Primary.\n\n fn handle_redeem_transaction(\n\n &mut self,\n\n transaction: RedeemTransaction,\n\n ) -> Result<(), failure::Error>;\n\n}\n\n\n\nimpl FastPaySmartContract for FastPaySmartContractState {\n\n /// Initiate a transfer to FastPay.\n\n fn handle_funding_transaction(\n\n &mut self,\n\n transaction: FundingTransaction,\n\n ) -> Result<(), failure::Error> {\n", "file_path": "fastpay_core/src/fastpay_smart_contract.rs", "rank": 46, "score": 105220.34163038354 }, { "content": "#[test]\n\nfn test_handle_confirmation_order_ok() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = dbg_addr(2);\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let certified_transfer_order = init_certified_transfer_order(\n\n sender,\n\n &sender_key,\n\n Address::FastPay(recipient),\n\n Amount::from(5),\n\n &authority_state,\n\n );\n\n\n\n let old_account = authority_state.accounts.get_mut(&sender).unwrap();\n\n let mut next_sequence_number = old_account.next_sequence_number;\n\n next_sequence_number = next_sequence_number.increment().unwrap();\n\n let mut remaining_balance = old_account.balance;\n\n remaining_balance = remaining_balance\n\n .try_sub(certified_transfer_order.value.transfer.amount.into())\n\n .unwrap();\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 47, "score": 104078.80283028545 }, { "content": "#[derive(Clone)]\n\nstruct LocalRequester(Arc<AtomicU32>);\n\n\n\nimpl LocalRequester {\n\n fn new() -> Self {\n\n Self(Arc::new(AtomicU32::new(0)))\n\n }\n\n}\n\n\n\nimpl Requester for LocalRequester {\n\n type Key = &'static str;\n\n type Value = u32;\n\n\n\n fn query(&mut self, _key: Self::Key) -> future::BoxFuture<Self::Value> {\n\n Box::pin(future::ready(self.0.fetch_add(1, Ordering::Relaxed)))\n\n }\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/downloader_tests.rs", "rank": 48, "score": 102829.52628301513 }, { "content": "#[test]\n\nfn test_handle_cross_shard_recipient_commit() {\n\n let (sender, sender_key) = get_key_pair();\n\n let (recipient, _) = get_key_pair();\n\n // Sender has no account on this shard.\n\n let mut authority_state = init_state_with_account(recipient, Balance::from(1));\n\n let certified_transfer_order = init_certified_transfer_order(\n\n sender,\n\n &sender_key,\n\n Address::FastPay(recipient),\n\n Amount::from(10),\n\n &authority_state,\n\n );\n\n assert!(authority_state\n\n .handle_cross_shard_recipient_commit(certified_transfer_order)\n\n .is_ok());\n\n let account = authority_state.accounts.get(&recipient).unwrap();\n\n assert_eq!(Balance::from(11), account.balance);\n\n assert_eq!(SequenceNumber::from(0), account.next_sequence_number);\n\n assert_eq!(account.confirmed_log.len(), 0);\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 49, "score": 102072.42191525464 }, { "content": "pub fn address_from_base64<'de, D>(deserializer: D) -> Result<EdPublicKeyBytes, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n let value = decode_address(&s).map_err(|err| serde::de::Error::custom(err.to_string()))?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 50, "score": 101818.18592614247 }, { "content": "#[test]\n\nfn test_handle_primary_synchronization_order_update() {\n\n let mut state = init_state();\n\n let mut updated_transaction_index = state.last_transaction_index;\n\n let address = dbg_addr(1);\n\n let order = init_primary_synchronization_order(address);\n\n\n\n assert!(state\n\n .handle_primary_synchronization_order(order.clone())\n\n .is_ok());\n\n updated_transaction_index = updated_transaction_index.increment().unwrap();\n\n assert_eq!(state.last_transaction_index, updated_transaction_index);\n\n let account = state.accounts.get(&address).unwrap();\n\n assert_eq!(account.balance, order.amount.into());\n\n assert_eq!(state.accounts.len(), 1);\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 51, "score": 101796.56085025496 }, { "content": "pub fn serialize_cert_into<W>(\n\n writer: W,\n\n value: &CertifiedTransferOrder,\n\n) -> Result<(), failure::Error>\n\nwhere\n\n W: std::io::Write,\n\n{\n\n serialize_into(writer, &ShallowSerializedMessage::Cert(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 52, "score": 101521.3038620875 }, { "content": "#[test]\n\nfn test_initiating_valid_transfer_despite_bad_authority() {\n\n let mut rt = Runtime::new().unwrap();\n\n let (recipient, _) = get_key_pair();\n\n\n\n let mut sender = init_local_client_state_with_bad_authority(vec![4, 4, 4, 4]);\n\n sender.balance = Balance::from(4);\n\n let certificate = rt\n\n .block_on(sender.transfer_to_fastpay(\n\n Amount::from(3),\n\n recipient,\n\n UserData(Some(*b\"hello...........hello...........\")),\n\n ))\n\n .unwrap();\n\n assert_eq!(sender.next_sequence_number, SequenceNumber::from(1));\n\n assert_eq!(sender.pending_transfer, None);\n\n assert_eq!(\n\n rt.block_on(sender.get_strong_majority_balance()),\n\n Balance::from(1)\n\n );\n\n assert_eq!(\n\n rt.block_on(sender.request_certificate(sender.address, SequenceNumber::from(0)))\n\n .unwrap(),\n\n certificate\n\n );\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 53, "score": 99676.47279025259 }, { "content": "#[test]\n\nfn test_handle_primary_synchronization_order_double_spend() {\n\n let mut state = init_state();\n\n let mut updated_transaction_index = state.last_transaction_index;\n\n let address = dbg_addr(1);\n\n let order = init_primary_synchronization_order(address);\n\n\n\n assert!(state\n\n .handle_primary_synchronization_order(order.clone())\n\n .is_ok());\n\n updated_transaction_index = updated_transaction_index.increment().unwrap();\n\n // Replays are ignored.\n\n assert!(state\n\n .handle_primary_synchronization_order(order.clone())\n\n .is_ok());\n\n assert_eq!(state.last_transaction_index, updated_transaction_index);\n\n let account = state.accounts.get(&address).unwrap();\n\n assert_eq!(account.balance, order.amount.into());\n\n assert_eq!(state.accounts.len(), 1);\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 54, "score": 99643.53218277049 }, { "content": "#[test]\n\nfn test_handle_confirmation_order_bad_sequence_number() {\n\n let (sender, sender_key) = get_key_pair();\n\n let recipient = dbg_addr(2);\n\n let mut authority_state = init_state_with_account(sender, Balance::from(5));\n\n let sender_account = authority_state.accounts.get_mut(&sender).unwrap();\n\n sender_account.next_sequence_number = sender_account.next_sequence_number.increment().unwrap();\n\n // let old_account = sender_account;\n\n\n\n let old_balance;\n\n let old_seq_num;\n\n {\n\n let old_account = authority_state.accounts.get_mut(&sender).unwrap();\n\n old_balance = old_account.balance;\n\n old_seq_num = old_account.next_sequence_number;\n\n }\n\n\n\n let certified_transfer_order = init_certified_transfer_order(\n\n sender,\n\n &sender_key,\n\n Address::FastPay(recipient),\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 55, "score": 99643.53218277049 }, { "content": "pub fn serialize_error(value: &FastPayError) -> Vec<u8> {\n\n serialize(&ShallowSerializedMessage::Error(value))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 56, "score": 99460.16962083962 }, { "content": "fn deserialize_response(response: &[u8]) -> Option<AccountInfoResponse> {\n\n match deserialize_message(response) {\n\n Ok(SerializedMessage::InfoResp(info)) => Some(*info),\n\n Ok(SerializedMessage::Error(error)) => {\n\n error!(\"Received error value: {}\", error);\n\n None\n\n }\n\n Ok(_) => {\n\n error!(\"Unexpected return value\");\n\n None\n\n }\n\n Err(error) => {\n\n error!(\n\n \"Unexpected error: {} while deserializing {:?}\",\n\n error, response\n\n );\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[derive(StructOpt)]\n\n#[structopt(\n\n name = \"FastPay Client\",\n\n about = \"A Byzantine fault tolerant payments sidechain with low-latency finality and high throughput\"\n\n)]\n", "file_path": "fastpay/src/client.rs", "rank": 57, "score": 98038.06714419808 }, { "content": "pub fn address_as_base64<S>(key: &EdPublicKeyBytes, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::ser::Serializer,\n\n{\n\n serializer.serialize_str(&encode_address(key))\n\n}\n\n\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 58, "score": 94682.3675147789 }, { "content": "fn mass_update_recipients(\n\n accounts_config: &mut AccountsConfig,\n\n certificates: Vec<(FastPayAddress, Bytes)>,\n\n) {\n\n for (_sender, buf) in certificates {\n\n if let Ok(SerializedMessage::Cert(certificate)) = deserialize_message(&buf[..]) {\n\n accounts_config.update_for_received_transfer(*certificate);\n\n }\n\n }\n\n}\n\n\n", "file_path": "fastpay/src/client.rs", "rank": 59, "score": 89727.06094387716 }, { "content": "fn make_authority_clients(\n\n committee_config: &CommitteeConfig,\n\n buffer_size: usize,\n\n send_timeout: std::time::Duration,\n\n recv_timeout: std::time::Duration,\n\n) -> HashMap<AuthorityName, network::Client> {\n\n let mut authority_clients = HashMap::new();\n\n for config in &committee_config.authorities {\n\n let config = config.clone();\n\n let client = network::Client::new(\n\n config.network_protocol,\n\n config.host,\n\n config.base_port,\n\n config.num_shards,\n\n buffer_size,\n\n send_timeout,\n\n recv_timeout,\n\n );\n\n authority_clients.insert(config.address, client);\n\n }\n\n authority_clients\n\n}\n\n\n", "file_path": "fastpay/src/client.rs", "rank": 60, "score": 88935.46046142447 }, { "content": "/// Try to aggregate votes into certificates.\n\nfn make_benchmark_certificates_from_votes(\n\n committee_config: &CommitteeConfig,\n\n votes: Vec<SignedTransferOrder>,\n\n) -> Vec<(FastPayAddress, Bytes)> {\n\n let committee = Committee::new(committee_config.voting_rights());\n\n let mut aggregators = HashMap::new();\n\n let mut certificates = Vec::new();\n\n let mut done_senders = HashSet::new();\n\n for vote in votes {\n\n // We aggregate votes indexed by sender.\n\n let address = vote.value.transfer.sender;\n\n if done_senders.contains(&address) {\n\n continue;\n\n }\n\n debug!(\n\n \"Processing vote on {}'s transfer by {}\",\n\n encode_address(&address),\n\n encode_address(&vote.authority)\n\n );\n\n let value = vote.value;\n", "file_path": "fastpay/src/client.rs", "rank": 61, "score": 87458.74961394479 }, { "content": "fn make_authority_mass_clients(\n\n committee_config: &CommitteeConfig,\n\n buffer_size: usize,\n\n send_timeout: std::time::Duration,\n\n recv_timeout: std::time::Duration,\n\n max_in_flight: u64,\n\n) -> Vec<(u32, network::MassClient)> {\n\n let mut authority_clients = Vec::new();\n\n for config in &committee_config.authorities {\n\n let client = network::MassClient::new(\n\n config.network_protocol,\n\n config.host.clone(),\n\n config.base_port,\n\n buffer_size,\n\n send_timeout,\n\n recv_timeout,\n\n max_in_flight / config.num_shards as u64, // Distribute window to diff shards\n\n );\n\n authority_clients.push((config.num_shards, client));\n\n }\n\n authority_clients\n\n}\n\n\n", "file_path": "fastpay/src/client.rs", "rank": 62, "score": 86704.40581068787 }, { "content": "#[cfg(test)]\n\nfn init_state() -> AuthorityState {\n\n let (authority_address, authority_key) = get_key_pair();\n\n let mut authorities = BTreeMap::new();\n\n authorities.insert(\n\n /* address */ authority_address,\n\n /* voting right */ 1,\n\n );\n\n let committee = Committee::new(authorities);\n\n AuthorityState::new(committee, authority_address, authority_key)\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 63, "score": 85937.62448489615 }, { "content": "pub fn encode_address(key: &EdPublicKeyBytes) -> String {\n\n base64::encode(&key.0[..])\n\n}\n\n\n", "file_path": "fastpay_core/src/base_types.rs", "rank": 64, "score": 83803.83663299639 }, { "content": "#[test]\n\nfn test_order() {\n\n let (sender_name, sender_key) = get_key_pair();\n\n\n\n let transfer = Transfer {\n\n sender: sender_name,\n\n recipient: Address::Primary(dbg_addr(0x20)),\n\n amount: Amount::from(5),\n\n sequence_number: SequenceNumber::new(),\n\n user_data: UserData::default(),\n\n };\n\n let transfer_order = TransferOrder::new(transfer, &sender_key);\n\n\n\n let buf = serialize_transfer_order(&transfer_order);\n\n let result = deserialize_message(buf.as_slice());\n\n assert!(result.is_ok());\n\n if let SerializedMessage::Order(o) = result.unwrap() {\n\n assert!(*o == transfer_order);\n\n } else {\n\n panic!()\n\n }\n", "file_path": "fastpay_core/src/unit_tests/serialize_tests.rs", "rank": 65, "score": 83084.76378433095 }, { "content": "#[test]\n\nfn test_info_response() {\n\n let (sender_name, sender_key) = get_key_pair();\n\n let transfer = Transfer {\n\n sender: sender_name,\n\n recipient: Address::Primary(dbg_addr(0x20)),\n\n amount: Amount::from(5),\n\n sequence_number: SequenceNumber::new(),\n\n user_data: UserData::default(),\n\n };\n\n let order = TransferOrder::new(transfer, &sender_key);\n\n\n\n let (auth_name, auth_key) = get_key_pair();\n\n let vote = SignedTransferOrder::new(order.clone(), auth_name, &auth_key);\n\n\n\n let mut cert = CertifiedTransferOrder {\n\n value: order,\n\n signatures: Vec::new(),\n\n };\n\n\n\n for _ in 0..3 {\n", "file_path": "fastpay_core/src/unit_tests/serialize_tests.rs", "rank": 66, "score": 81508.67898769425 }, { "content": "#[test]\n\nfn test_bidirectional_transfer() {\n\n let mut rt = Runtime::new().unwrap();\n\n let (mut authority_clients, committee) = init_local_authorities(4);\n\n let mut client1 = make_client(authority_clients.clone(), committee.clone());\n\n let mut client2 = make_client(authority_clients.clone(), committee);\n\n fund_account(&mut authority_clients, client1.address, vec![2, 3, 4, 4]);\n\n // Update client1's local balance accordingly.\n\n client1.balance = rt.block_on(client1.get_strong_majority_balance());\n\n assert_eq!(client1.balance, Balance::from(3));\n\n\n\n let certificate = rt\n\n .block_on(client1.transfer_to_fastpay(\n\n Amount::from(3),\n\n client2.address,\n\n UserData::default(),\n\n ))\n\n .unwrap();\n\n\n\n assert_eq!(client1.next_sequence_number, SequenceNumber::from(1));\n\n assert_eq!(client1.pending_transfer, None);\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 67, "score": 81291.29615312966 }, { "content": "#[test]\n\nfn test_signed_values() {\n\n let mut authorities = BTreeMap::new();\n\n let (a1, sec1) = get_key_pair();\n\n let (a2, sec2) = get_key_pair();\n\n let (a3, sec3) = get_key_pair();\n\n\n\n authorities.insert(/* address */ a1, /* voting right */ 1);\n\n authorities.insert(/* address */ a2, /* voting right */ 0);\n\n let committee = Committee::new(authorities);\n\n\n\n let transfer = Transfer {\n\n sender: a1,\n\n recipient: Address::FastPay(a2),\n\n amount: Amount::from(1),\n\n sequence_number: SequenceNumber::new(),\n\n user_data: UserData::default(),\n\n };\n\n let order = TransferOrder::new(transfer.clone(), &sec1);\n\n let bad_order = TransferOrder::new(transfer, &sec2);\n\n\n", "file_path": "fastpay_core/src/unit_tests/messages_tests.rs", "rank": 68, "score": 81278.94549656911 }, { "content": "#[test]\n\nfn test_time_order() {\n\n let (sender_name, sender_key) = get_key_pair();\n\n let transfer = Transfer {\n\n sender: sender_name,\n\n recipient: Address::Primary(dbg_addr(0x20)),\n\n amount: Amount::from(5),\n\n sequence_number: SequenceNumber::new(),\n\n user_data: UserData::default(),\n\n };\n\n\n\n let mut buf = Vec::new();\n\n let now = Instant::now();\n\n for _ in 0..100 {\n\n let transfer_order = TransferOrder::new(transfer.clone(), &sender_key);\n\n serialize_transfer_order_into(&mut buf, &transfer_order).unwrap();\n\n }\n\n println!(\"Write Order: {} microsec\", now.elapsed().as_micros() / 100);\n\n\n\n let mut buf2 = buf.as_slice();\n\n let now = Instant::now();\n", "file_path": "fastpay_core/src/unit_tests/serialize_tests.rs", "rank": 69, "score": 81254.14926763935 }, { "content": "#[cfg(test)]\n\nfn init_local_authorities(\n\n count: usize,\n\n) -> (HashMap<AuthorityName, LocalAuthorityClient>, Committee) {\n\n let mut key_pairs = Vec::new();\n\n let mut voting_rights = BTreeMap::new();\n\n for _ in 0..count {\n\n let key_pair = get_key_pair();\n\n voting_rights.insert(key_pair.0, 1);\n\n key_pairs.push(key_pair);\n\n }\n\n let committee = Committee::new(voting_rights);\n\n\n\n let mut clients = HashMap::new();\n\n for (address, secret) in key_pairs {\n\n let state = AuthorityState::new(committee.clone(), address, secret);\n\n clients.insert(address, LocalAuthorityClient::new(state));\n\n }\n\n (clients, committee)\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 70, "score": 80865.91062415196 }, { "content": "#[test]\n\nfn test_get_shards() {\n\n let num_shards = 16u32;\n\n let mut found = vec![false; num_shards as usize];\n\n let mut left = num_shards;\n\n loop {\n\n let (address, _) = get_key_pair();\n\n let shard = AuthorityState::get_shard(num_shards, &address) as usize;\n\n println!(\"found {}\", shard);\n\n if !found[shard] {\n\n found[shard] = true;\n\n left -= 1;\n\n if left == 0 {\n\n break;\n\n }\n\n }\n\n }\n\n}\n\n\n\n// helpers\n\n\n", "file_path": "fastpay_core/src/unit_tests/authority_tests.rs", "rank": 71, "score": 80865.91062415196 }, { "content": "#[derive(Clone)]\n\nstruct LocalAuthorityClient(Arc<Mutex<AuthorityState>>);\n\n\n\nimpl AuthorityClient for LocalAuthorityClient {\n\n fn handle_transfer_order(\n\n &mut self,\n\n order: TransferOrder,\n\n ) -> AsyncResult<AccountInfoResponse, FastPayError> {\n\n let state = self.0.clone();\n\n Box::pin(async move { state.lock().await.handle_transfer_order(order) })\n\n }\n\n\n\n fn handle_confirmation_order(\n\n &mut self,\n\n order: ConfirmationOrder,\n\n ) -> AsyncResult<AccountInfoResponse, FastPayError> {\n\n let state = self.0.clone();\n\n Box::pin(async move {\n\n state\n\n .lock()\n\n .await\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 72, "score": 80198.26041965821 }, { "content": "#[cfg(test)]\n\nfn init_local_client_state(balances: Vec<i128>) -> ClientState<LocalAuthorityClient> {\n\n let (mut authority_clients, committee) = init_local_authorities(balances.len());\n\n let client = make_client(authority_clients.clone(), committee);\n\n fund_account(&mut authority_clients, client.address, balances);\n\n client\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 73, "score": 79964.83733227543 }, { "content": "#[test]\n\nfn test_receiving_unconfirmed_transfer() {\n\n let mut rt = Runtime::new().unwrap();\n\n let (mut authority_clients, committee) = init_local_authorities(4);\n\n let mut client1 = make_client(authority_clients.clone(), committee.clone());\n\n let mut client2 = make_client(authority_clients.clone(), committee);\n\n fund_account(&mut authority_clients, client1.address, vec![2, 3, 4, 4]);\n\n // not updating client1.balance\n\n\n\n let certificate = rt\n\n .block_on(client1.transfer_to_fastpay_unsafe_unconfirmed(\n\n Amount::from(2),\n\n client2.address,\n\n UserData::default(),\n\n ))\n\n .unwrap();\n\n // Transfer was executed locally, creating negative balance.\n\n assert_eq!(client1.balance, Balance::from(-2));\n\n assert_eq!(client1.next_sequence_number, SequenceNumber::from(1));\n\n assert_eq!(client1.pending_transfer, None);\n\n // ..but not confirmed remotely, hence an unchanged balance and sequence number.\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 74, "score": 79572.79491066001 }, { "content": "#[test]\n\nfn test_initiating_valid_transfer() {\n\n let mut rt = Runtime::new().unwrap();\n\n let (recipient, _) = get_key_pair();\n\n\n\n let mut sender = init_local_client_state(vec![2, 4, 4, 4]);\n\n sender.balance = Balance::from(4);\n\n let certificate = rt\n\n .block_on(sender.transfer_to_fastpay(\n\n Amount::from(3),\n\n recipient,\n\n UserData(Some(*b\"hello...........hello...........\")),\n\n ))\n\n .unwrap();\n\n assert_eq!(sender.next_sequence_number, SequenceNumber::from(1));\n\n assert_eq!(sender.pending_transfer, None);\n\n assert_eq!(\n\n rt.block_on(sender.get_strong_majority_balance()),\n\n Balance::from(1)\n\n );\n\n assert_eq!(\n\n rt.block_on(sender.request_certificate(sender.address, SequenceNumber::from(0)))\n\n .unwrap(),\n\n certificate\n\n );\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 75, "score": 79572.79491066001 }, { "content": "#[cfg(test)]\n\nfn init_local_authorities_bad_1(\n\n count: usize,\n\n) -> (HashMap<AuthorityName, LocalAuthorityClient>, Committee) {\n\n let mut key_pairs = Vec::new();\n\n let mut voting_rights = BTreeMap::new();\n\n for i in 0..count {\n\n let key_pair = get_key_pair();\n\n voting_rights.insert(key_pair.0, 1);\n\n if i + 1 < (count + 2) / 3 {\n\n // init 1 authority with a bad keypair\n\n key_pairs.push(get_key_pair());\n\n } else {\n\n key_pairs.push(key_pair);\n\n }\n\n }\n\n let committee = Committee::new(voting_rights);\n\n\n\n let mut clients = HashMap::new();\n\n for (address, secret) in key_pairs {\n\n let state = AuthorityState::new(committee.clone(), address, secret);\n\n clients.insert(address, LocalAuthorityClient::new(state));\n\n }\n\n (clients, committee)\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 76, "score": 79160.56895216502 }, { "content": "#[test]\n\nfn test_get_strong_majority_balance() {\n\n let mut rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n let mut client = init_local_client_state(vec![3, 4, 4, 4]);\n\n assert_eq!(client.get_strong_majority_balance().await, Balance::from(4));\n\n\n\n let mut client = init_local_client_state(vec![0, 3, 4, 4]);\n\n assert_eq!(client.get_strong_majority_balance().await, Balance::from(3));\n\n\n\n let mut client = init_local_client_state(vec![0, 3, 4]);\n\n assert_eq!(client.get_strong_majority_balance().await, Balance::from(0));\n\n });\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 77, "score": 78128.22800331571 }, { "content": "#[test]\n\nfn test_initiating_transfer_low_funds() {\n\n let mut rt = Runtime::new().unwrap();\n\n let (recipient, _) = get_key_pair();\n\n\n\n let mut sender = init_local_client_state(vec![2, 2, 4, 4]);\n\n sender.balance = Balance::from(2);\n\n assert!(rt\n\n .block_on(sender.transfer_to_fastpay(Amount::from(3), recipient, UserData::default()))\n\n .is_err());\n\n // Trying to overspend does not block an account.\n\n assert_eq!(sender.next_sequence_number, SequenceNumber::from(0));\n\n assert_eq!(sender.pending_transfer, None);\n\n assert_eq!(\n\n rt.block_on(sender.get_strong_majority_balance()),\n\n Balance::from(2)\n\n );\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 78, "score": 77957.42895919111 }, { "content": "#[cfg(test)]\n\nfn init_local_client_state_with_bad_authority(\n\n balances: Vec<i128>,\n\n) -> ClientState<LocalAuthorityClient> {\n\n let (mut authority_clients, committee) = init_local_authorities_bad_1(balances.len());\n\n let client = make_client(authority_clients.clone(), committee);\n\n fund_account(&mut authority_clients, client.address, balances);\n\n client\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 79, "score": 76047.97724314062 }, { "content": "#[test]\n\nfn test_handle_redeem_transaction_negative_balance() {\n\n let (mut contract_state, name, secret) = init_contract();\n\n let mut redeem_transaction =\n\n init_redeem_transaction(contract_state.committee.clone(), name, secret);\n\n let funding_transaction = init_funding_transaction();\n\n let too_much_money = Amount::from(1000);\n\n assert!(contract_state\n\n .handle_funding_transaction(funding_transaction)\n\n .is_ok());\n\n let old_balance = contract_state.total_balance;\n\n\n\n redeem_transaction\n\n .transfer_certificate\n\n .value\n\n .transfer\n\n .amount = redeem_transaction\n\n .transfer_certificate\n\n .value\n\n .transfer\n\n .amount\n\n .try_add(too_much_money)\n\n .unwrap();\n\n assert!(contract_state\n\n .handle_redeem_transaction(redeem_transaction)\n\n .is_err());\n\n assert_eq!(old_balance, contract_state.total_balance);\n\n assert!(contract_state.accounts.is_empty());\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/fastpay_smart_contract_tests.rs", "rank": 80, "score": 73801.65937167729 }, { "content": "fn serialize_into<T, W>(writer: W, msg: &T) -> Result<(), failure::Error>\n\nwhere\n\n W: std::io::Write,\n\n T: Serialize,\n\n{\n\n bincode::serialize_into(writer, msg).map_err(|err| format_err!(\"{}\", err))\n\n}\n\n\n", "file_path": "fastpay_core/src/serialize.rs", "rank": 81, "score": 73532.4071043628 }, { "content": "#[cfg(test)]\n\nfn fund_account<I: IntoIterator<Item = i128>>(\n\n clients: &mut HashMap<AuthorityName, LocalAuthorityClient>,\n\n address: FastPayAddress,\n\n balances: I,\n\n) {\n\n let mut balances = balances.into_iter().map(Balance::from);\n\n for (_, client) in clients.iter_mut() {\n\n client.0.as_ref().try_lock().unwrap().accounts_mut().insert(\n\n address,\n\n AccountOffchainState::new_with_balance(\n\n balances.next().unwrap_or_else(Balance::zero),\n\n /* no receive log to justify the balances */ Vec::new(),\n\n ),\n\n );\n\n }\n\n}\n\n\n", "file_path": "fastpay_core/src/unit_tests/client_tests.rs", "rank": 82, "score": 68828.38865156738 }, { "content": "def info():\n\n\tset_hosts()\n\n\n\n\tprint('\\nclients:')\n\n\tfor client in env.roledefs['client']:\n\n\t\tprint('\\t ssh -i '+env.key_filename+' '+env.user+'@'+client)\n\n\n\n\tprint('\\nservers:')\n\n\tfor server in env.roledefs['authority']:\n\n\t\tprint('\\t ssh -i '+env.key_filename+' '+env.user+'@'+server)\n\n\n\n\tprint('\\nthroughput:')\n\n\tfor server in env.roledefs['throughput']:\n", "file_path": "scripts/fabfile.py", "rank": 83, "score": 65504.587213338425 }, { "content": "def transfer(remote_client=remote_client):\n\n\texecute(initialize_client, remote_client)\n\n\n\n\tf = open(initial_accounts, 'r')\n\n\taddresses = f.read().splitlines()\n\n\tf.close()\n\n\tassert len(addresses) > 0\n\n\n\n\tfor i, sender in enumerate(addresses):\n\n\t\tcommand = './client --committee %s --accounts %s transfer --from %s --to %s 1' \\\n\n\t\t\t% (committee_config, accounts, sender, sender)\n\n\t\tif remote_client:\n\n\t\t\trun(command)\n\n\t\telse:\n", "file_path": "scripts/fabfile.py", "rank": 84, "score": 65300.420528393 }, { "content": "def info():\n\n\tset_hosts()\n\n\n\n\tprint('\\ndev:')\n\n\tfor dev in env.roledefs['dev']:\n", "file_path": "scripts/aws_plot.py", "rank": 85, "score": 64430.96114254334 }, { "content": "struct ServerOpt {\n\n /// Path to the file containing the server configuration of this FastPay authority (including its secret key)\n\n #[structopt(long = \"server\")]\n\n server: String,\n\n\n\n /// Subcommands. Acceptable values are run and generate.\n\n #[structopt(subcommand)]\n\n cmd: ServerCommands,\n\n}\n\n\n", "file_path": "fastpay/src/server.rs", "rank": 86, "score": 62709.62578673629 }, { "content": "struct ClientOpt {\n\n /// Sets the file storing the state of our user accounts (an empty one will be created if missing)\n\n #[structopt(long = \"accounts\")]\n\n accounts: String,\n\n\n\n /// Sets the file describing the public configurations of all authorities\n\n #[structopt(long = \"committee\")]\n\n committee: String,\n\n\n\n /// Timeout for sending queries (us)\n\n #[structopt(long = \"send_timeout\", default_value = \"4000000\")]\n\n send_timeout: u64,\n\n\n\n /// Timeout for receiving responses (us)\n\n #[structopt(long = \"recv_timeout\", default_value = \"4000000\")]\n\n recv_timeout: u64,\n\n\n\n /// Maximum size of datagrams received and sent (bytes)\n\n #[structopt(long = \"buffer_size\", default_value = transport::DEFAULT_MAX_DATAGRAM_SIZE)]\n\n buffer_size: String,\n\n\n\n /// Subcommands. Acceptable values are transfer, query_balance, benchmark, and create_accounts.\n\n #[structopt(subcommand)]\n\n cmd: ClientCommands,\n\n}\n\n\n", "file_path": "fastpay/src/client.rs", "rank": 87, "score": 62709.62578673629 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ClientServerBenchmark {\n\n network_protocol: transport::NetworkProtocol,\n\n host: String,\n\n port: u32,\n\n committee_size: usize,\n\n num_shards: u32,\n\n max_in_flight: usize,\n\n num_accounts: usize,\n\n send_timeout: Duration,\n\n recv_timeout: Duration,\n\n buffer_size: usize,\n\n cross_shard_queue_size: usize,\n\n}\n\n\n", "file_path": "fastpay/src/bench.rs", "rank": 88, "score": 61476.65692083484 }, { "content": "/// An implementation of DataStream based on UDP.\n\nstruct UdpDataStream {\n\n socket: UdpSocket,\n\n address: String,\n\n buffer: Vec<u8>,\n\n}\n\n\n\nimpl UdpDataStream {\n\n async fn connect(address: String, max_data_size: usize) -> Result<Self, std::io::Error> {\n\n let socket = UdpSocket::bind(&\"0.0.0.0:0\").await?;\n\n let buffer = vec![0u8; max_data_size];\n\n Ok(Self {\n\n socket,\n\n address,\n\n buffer,\n\n })\n\n }\n\n}\n\n\n\nimpl DataStream for UdpDataStream {\n\n fn write_data<'a>(\n", "file_path": "fastpay/src/transport.rs", "rank": 89, "score": 61472.475167669756 }, { "content": "/// An implementation of DataStream based on TCP.\n\nstruct TcpDataStream {\n\n stream: TcpStream,\n\n max_data_size: usize,\n\n}\n\n\n\nimpl TcpDataStream {\n\n async fn connect(address: String, max_data_size: usize) -> Result<Self, std::io::Error> {\n\n let stream = TcpStream::connect(address).await?;\n\n stream.set_send_buffer_size(max_data_size)?;\n\n stream.set_recv_buffer_size(max_data_size)?;\n\n Ok(Self {\n\n stream,\n\n max_data_size,\n\n })\n\n }\n\n\n\n async fn tcp_write_data<S>(stream: &mut S, buffer: &[u8]) -> Result<(), std::io::Error>\n\n where\n\n S: AsyncWrite + Unpin,\n\n {\n", "file_path": "fastpay/src/transport.rs", "rank": 90, "score": 61472.475167669756 }, { "content": "struct RunningServerState {\n\n server: Server,\n\n cross_shard_sender: mpsc::Sender<(Vec<u8>, ShardId)>,\n\n}\n\n\n\nimpl MessageHandler for RunningServerState {\n\n fn handle_message<'a>(\n\n &'a mut self,\n\n buffer: &'a [u8],\n\n ) -> futures::future::BoxFuture<'a, Option<Vec<u8>>> {\n\n Box::pin(async move {\n\n let result = deserialize_message(buffer);\n\n let reply = match result {\n\n Err(_) => Err(FastPayError::InvalidDecoding),\n\n Ok(result) => {\n\n match result {\n\n SerializedMessage::Order(message) => self\n\n .server\n\n .state\n\n .handle_transfer_order(*message)\n", "file_path": "fastpay/src/network.rs", "rank": 91, "score": 61472.475167669756 }, { "content": "/// An implementation of DataStreamPool based on UDP.\n\nstruct UdpDataStreamPool {\n\n socket: UdpSocket,\n\n}\n\n\n\nimpl UdpDataStreamPool {\n\n async fn new() -> Result<Self, std::io::Error> {\n\n let socket = UdpSocket::bind(&\"0.0.0.0:0\").await?;\n\n Ok(Self { socket })\n\n }\n\n}\n\n\n\nimpl DataStreamPool for UdpDataStreamPool {\n\n fn send_data_to<'a>(\n\n &'a mut self,\n\n buffer: &'a [u8],\n\n address: &'a str,\n\n ) -> future::BoxFuture<'a, Result<(), std::io::Error>> {\n\n Box::pin(async move {\n\n self.socket.send_to(buffer, address).await?;\n\n Ok(())\n", "file_path": "fastpay/src/transport.rs", "rank": 92, "score": 60322.67795442079 }, { "content": "/// An implementation of DataStreamPool based on TCP.\n\nstruct TcpDataStreamPool {\n\n streams: HashMap<String, TcpStream>,\n\n}\n\n\n\nimpl TcpDataStreamPool {\n\n async fn new() -> Result<Self, std::io::Error> {\n\n let streams = HashMap::new();\n\n Ok(Self { streams })\n\n }\n\n\n\n async fn get_stream(&mut self, address: &str) -> Result<&mut TcpStream, io::Error> {\n\n if !self.streams.contains_key(address) {\n\n match TcpStream::connect(address).await {\n\n Ok(s) => {\n\n self.streams.insert(address.to_string(), s);\n\n }\n\n Err(error) => {\n\n error!(\"Failed to open connection to {}: {}\", address, error);\n\n return Err(error);\n\n }\n", "file_path": "fastpay/src/transport.rs", "rank": 93, "score": 60322.67795442079 }, { "content": "fn main() {\n\n env_logger::from_env(env_logger::Env::default().default_filter_or(\"info\")).init();\n\n let benchmark = ClientServerBenchmark::from_command_line();\n\n\n\n let (states, orders) = benchmark.make_structures();\n\n\n\n // Start the servers on the thread pool\n\n for state in states {\n\n // Make special single-core runtime for each server\n\n let b = benchmark.clone();\n\n thread::spawn(move || {\n\n let mut runtime = Builder::new()\n\n .enable_all()\n\n .basic_scheduler()\n\n .thread_stack_size(15 * 1024 * 1024)\n\n .build()\n\n .unwrap();\n\n\n\n runtime.block_on(async move {\n\n let server = b.spawn_server(state).await;\n", "file_path": "fastpay/src/bench.rs", "rank": 94, "score": 59507.31646621789 }, { "content": "fn main() {\n\n env_logger::from_env(env_logger::Env::default().default_filter_or(\"info\")).init();\n\n let matches = ClientOpt::from_args();\n\n\n\n let send_timeout = Duration::from_micros(matches.send_timeout);\n\n let recv_timeout = Duration::from_micros(matches.recv_timeout);\n\n let accounts_config_path = &matches.accounts;\n\n let committee_config_path = &matches.committee;\n\n let buffer_size = matches.buffer_size.parse::<usize>().unwrap();\n\n\n\n let mut accounts_config =\n\n AccountsConfig::read_or_create(accounts_config_path).expect(\"Unable to read user accounts\");\n\n let committee_config =\n\n CommitteeConfig::read(committee_config_path).expect(\"Unable to read committee config file\");\n\n\n\n match matches.cmd {\n\n ClientCommands::Transfer { from, to, amount } => {\n\n let sender = decode_address(&from).expect(\"Failed to decode sender's address\");\n\n let recipient = decode_address(&to).expect(\"Failed to decode recipient's address\");\n\n let amount = Amount::from(amount);\n", "file_path": "fastpay/src/client.rs", "rank": 95, "score": 59507.31646621789 }, { "content": "fn main() {\n\n env_logger::from_env(env_logger::Env::default().default_filter_or(\"info\")).init();\n\n let matches = ServerOpt::from_args();\n\n\n\n let server_config_path = &matches.server;\n\n\n\n match matches.cmd {\n\n ServerCommands::Run {\n\n buffer_size,\n\n cross_shard_queue_size,\n\n committee,\n\n initial_accounts,\n\n initial_balance,\n\n shard,\n\n } => {\n\n let committee_config_path = &committee;\n\n let initial_accounts_config_path = &initial_accounts;\n\n let initial_balance = Balance::from(initial_balance.parse::<i128>().unwrap());\n\n let buffer_size = buffer_size.parse::<usize>().unwrap();\n\n // let parsed_cross_shard_queue_size = cross_shard_queue_size.parse::<usize>().unwrap();\n", "file_path": "fastpay/src/server.rs", "rank": 96, "score": 59507.31646621789 }, { "content": "struct TestService {\n\n counter: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl TestService {\n\n fn new(counter: Arc<AtomicUsize>) -> Self {\n\n TestService { counter }\n\n }\n\n}\n\n\n\nimpl MessageHandler for TestService {\n\n fn handle_message<'a>(\n\n &'a mut self,\n\n buffer: &'a [u8],\n\n ) -> future::BoxFuture<'a, Option<Vec<u8>>> {\n\n self.counter.fetch_add(buffer.len(), Ordering::Relaxed);\n\n Box::pin(async move { Some(Vec::from(buffer)) })\n\n }\n\n}\n\n\n", "file_path": "fastpay/src/unit_tests/transport_tests.rs", "rank": 97, "score": 59251.297793067555 }, { "content": "fn make_servers(\n\n local_ip_addr: &str,\n\n server_config_path: &str,\n\n committee_config_path: &str,\n\n initial_accounts_config_path: &str,\n\n initial_balance: Balance,\n\n buffer_size: usize,\n\n cross_shard_queue_size: usize,\n\n) -> Vec<network::Server> {\n\n let server_config =\n\n AuthorityServerConfig::read(server_config_path).expect(\"Fail to read server config\");\n\n let num_shards = server_config.authority.num_shards;\n\n\n\n let mut servers = Vec::new();\n\n for shard in 0..num_shards {\n\n servers.push(make_shard_server(\n\n local_ip_addr,\n\n server_config_path,\n\n committee_config_path,\n\n initial_accounts_config_path,\n", "file_path": "fastpay/src/server.rs", "rank": 98, "score": 58224.63259561843 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn make_shard_server(\n\n local_ip_addr: &str,\n\n server_config_path: &str,\n\n committee_config_path: &str,\n\n initial_accounts_config_path: &str,\n\n initial_balance: Balance,\n\n buffer_size: usize,\n\n cross_shard_queue_size: usize,\n\n shard: u32,\n\n) -> network::Server {\n\n let server_config =\n\n AuthorityServerConfig::read(server_config_path).expect(\"Fail to read server config\");\n\n let committee_config =\n\n CommitteeConfig::read(committee_config_path).expect(\"Fail to read committee config\");\n\n let initial_accounts_config = InitialStateConfig::read(initial_accounts_config_path)\n\n .expect(\"Fail to read initial account config\");\n\n\n\n let committee = Committee::new(committee_config.voting_rights());\n\n let num_shards = server_config.authority.num_shards;\n\n\n", "file_path": "fastpay/src/server.rs", "rank": 99, "score": 57035.83163537602 } ]
Rust
crypto/bls/src/macros.rs
sean-sn/lighthouse
c6baa0eed131c5e8ecc5860778ffc7d4a4c18d2d
macro_rules! impl_tree_hash { ($byte_size: expr) => { fn tree_hash_type() -> tree_hash::TreeHashType { tree_hash::TreeHashType::Vector } fn tree_hash_packed_encoding(&self) -> Vec<u8> { unreachable!("Vector should never be packed.") } fn tree_hash_packing_factor() -> usize { unreachable!("Vector should never be packed.") } fn tree_hash_root(&self) -> tree_hash::Hash256 { let values_per_chunk = tree_hash::BYTES_PER_CHUNK; let minimum_chunk_count = ($byte_size + values_per_chunk - 1) / values_per_chunk; tree_hash::merkle_root(&self.serialize(), minimum_chunk_count) } }; } macro_rules! impl_ssz_encode { ($byte_size: expr) => { fn is_ssz_fixed_len() -> bool { true } fn ssz_fixed_len() -> usize { $byte_size } fn ssz_bytes_len(&self) -> usize { $byte_size } fn ssz_append(&self, buf: &mut Vec<u8>) { buf.extend_from_slice(&self.serialize()) } }; } macro_rules! impl_ssz_decode { ($byte_size: expr) => { fn is_ssz_fixed_len() -> bool { true } fn ssz_fixed_len() -> usize { $byte_size } fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> { let len = bytes.len(); let expected = <Self as ssz::Decode>::ssz_fixed_len(); if len != expected { Err(ssz::DecodeError::InvalidByteLength { len, expected }) } else { Self::deserialize(bytes) .map_err(|e| ssz::DecodeError::BytesInvalid(format!("{:?}", e))) } } }; } macro_rules! impl_display { () => { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", hex_encode(self.serialize().to_vec())) } }; } macro_rules! impl_from_str { () => { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.starts_with("0x") { let bytes = hex::decode(&s[2..]).map_err(|e| e.to_string())?; Self::deserialize(&bytes[..]).map_err(|e| format!("{:?}", e)) } else { Err("must start with 0x".to_string()) } } }; } macro_rules! impl_serde_serialize { () => { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&self.to_string()) } }; } macro_rules! impl_serde_deserialize { () => { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { pub struct StringVisitor; impl<'de> serde::de::Visitor<'de> for StringVisitor { type Value = String; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a hex string with 0x prefix") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(value.to_string()) } } let string = deserializer.deserialize_str(StringVisitor)?; <Self as std::str::FromStr>::from_str(&string).map_err(serde::de::Error::custom) } }; } macro_rules! impl_debug { () => { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", hex_encode(&self.serialize().to_vec())) } }; } #[cfg(feature = "arbitrary")] macro_rules! impl_arbitrary { ($byte_size: expr) => { fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> { let mut bytes = [0u8; $byte_size]; u.fill_buffer(&mut bytes)?; Self::deserialize(&bytes).map_err(|_| arbitrary::Error::IncorrectFormat) } }; }
macro_rules! impl_tree_hash { ($byte_size: expr) => { fn tree_hash_type() -> tree_hash::TreeHashType { tree_hash::TreeHashType::Vector } fn tree_hash_packed_encoding(&self) -> Vec<u8> { unreachable!("Vector should never be packed.") } fn tree_hash_packing_factor() -> usize { unreachable!("Vector should never be packed.") } fn tree_hash_root(&self) -> tree_hash::Hash256 { let values_per_chunk = tree_hash::BYTES_PER_CHUNK; let minimum_chunk_count = ($byte_size + values_per_chunk - 1) / values_per_chunk; tree_hash::merkle_root(&self.serialize(), minimum_chunk_count) } }; } macro_rules! impl_ssz_encode { ($byte_size: expr) => { fn is_ssz_fixed_len() -> bool { true } fn ssz_fixed_len() -> usize { $byte_size } fn ssz_bytes_len(&self) -> usize { $byte_size } fn ssz_append(&self, buf: &mut Vec<u8>) { buf.extend_from_slice(&self.serialize()) } }; } macro_rules! impl_ssz_decode { ($byte_size: expr) => { fn is_ssz_fixed_len() -> bool { true } fn ssz_fixed_len() -> usize { $byte_size } fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> { let len = bytes.len(); let expected = <Self as ssz::Decode>::ssz_fixed_len(); if len != expected { Err(ssz::DecodeError::InvalidByteLength { len, expected }) } else { Self::deserialize(bytes) .map_err(|e| ssz::DecodeError::BytesInvalid(format!("{:?}", e))) } } }; } macro_rules! impl_display { () => { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", hex_encode(self.serialize().to_vec())) } }; } macro_rules! impl_from_str { () => { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.starts_with("0x") { let bytes = hex::decode(&s[2..]).map_err(|e| e.to_string())?; Self::deserialize(&bytes[..]).map_err(|e| format!("{:?}", e)) } else { Err("must start with 0x".to_string()) } } }; } macro_rules! impl_serde_serialize { () => { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&self.to_string()) } }; } macro_rules! impl_serde_deserialize { () => { fn deserialize<D>(deserializer: D) -> Result<Sel
Self::deserialize(&bytes).map_err(|_| arbitrary::Error::IncorrectFormat) } }; }
f, D::Error> where D: Deserializer<'de>, { pub struct StringVisitor; impl<'de> serde::de::Visitor<'de> for StringVisitor { type Value = String; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a hex string with 0x prefix") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(value.to_string()) } } let string = deserializer.deserialize_str(StringVisitor)?; <Self as std::str::FromStr>::from_str(&string).map_err(serde::de::Error::custom) } }; } macro_rules! impl_debug { () => { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", hex_encode(&self.serialize().to_vec())) } }; } #[cfg(feature = "arbitrary")] macro_rules! impl_arbitrary { ($byte_size: expr) => { fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> { let mut bytes = [0u8; $byte_size]; u.fill_buffer(&mut bytes)?;
random
[ { "content": "fn string_to_bytes(string: &str) -> Result<Vec<u8>, String> {\n\n let string = if string.starts_with(\"0x\") {\n\n &string[2..]\n\n } else {\n\n string\n\n };\n\n\n\n hex::decode(string).map_err(|e| format!(\"Unable to decode public or private key: {}\", e))\n\n}\n\n\n", "file_path": "common/eth2_interop_keypairs/src/lib.rs", "rank": 0, "score": 384622.70823354705 }, { "content": "// An alternative to `hex::decode`, to allow for more control of\n\n// the objects created while decoding the secret key.\n\nfn hex_string_to_bytes(data: &str) -> Result<Vec<u8>, String> {\n\n if data.len() % 2 != 0 {\n\n return Err(\"Odd length\".to_string());\n\n }\n\n\n\n let mut vec: Vec<u8> = Vec::new();\n\n for i in 0..data.len() / 2 {\n\n vec.push(\n\n val(&data.as_bytes()[2 * i], 2 * i)? << 4\n\n | val(&data.as_bytes()[2 * i + 1], 2 * i + 1)?,\n\n );\n\n }\n\n\n\n Ok(vec)\n\n}\n\n\n", "file_path": "remote_signer/backend/src/zeroize_string.rs", "rank": 1, "score": 368367.77421637444 }, { "content": "/// Parses a `0x`-prefixed, big-endian hex string as bytes.\n\n///\n\n/// E.g., `0x0102 == vec![1, 2]`\n\nfn hex_to_bytes(hex: &str) -> Result<Vec<u8>, String> {\n\n hex::decode(strip_prefix(hex)?).map_err(|e| format!(\"Failed to parse hex as bytes: {:?}\", e))\n\n}\n\n\n", "file_path": "beacon_node/eth1/src/http.rs", "rank": 2, "score": 355737.6334991586 }, { "content": "fn process_slice_field(new_tree_hash: &[u8], leaf: &mut Hash256, force_update: bool) -> bool {\n\n if force_update || leaf.as_bytes() != new_tree_hash {\n\n leaf.assign_from_slice(&new_tree_hash);\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "consensus/types/src/tree_hash_impls.rs", "rank": 3, "score": 341906.70715965296 }, { "content": "fn verify_checksum(bytes: &[u8], expected_checksum: &str) {\n\n let mut hasher = Sha256::new();\n\n hasher.update(bytes);\n\n let result = hasher.finalize();\n\n\n\n let checksum = hex::encode(&result[..]);\n\n\n\n assert_eq!(\n\n &checksum, expected_checksum,\n\n \"Checksum {} did not match {}\",\n\n checksum, expected_checksum\n\n );\n\n}\n\n\n", "file_path": "common/deposit_contract/build.rs", "rank": 4, "score": 341368.01582740387 }, { "content": "/// Encode `len` as a little-endian byte array of `BYTES_PER_LENGTH_OFFSET` length.\n\n///\n\n/// If `len` is larger than `2 ^ BYTES_PER_LENGTH_OFFSET`, a `debug_assert` is raised.\n\npub fn encode_length(len: usize) -> [u8; BYTES_PER_LENGTH_OFFSET] {\n\n // Note: it is possible for `len` to be larger than what can be encoded in\n\n // `BYTES_PER_LENGTH_OFFSET` bytes, triggering this debug assertion.\n\n //\n\n // These are the alternatives to using a `debug_assert` here:\n\n //\n\n // 1. Use `assert`.\n\n // 2. Push an error to the caller (e.g., `Option` or `Result`).\n\n // 3. Ignore it completely.\n\n //\n\n // I have avoided (1) because it's basically a choice between \"produce invalid SSZ\" or \"kill\n\n // the entire program\". I figure it may be possible for an attacker to trigger this assert and\n\n // take the program down -- I think producing invalid SSZ is a better option than this.\n\n //\n\n // I have avoided (2) because this error will need to be propagated upstream, making encoding a\n\n // function which may fail. I don't think this is ergonomic and the upsides don't outweigh the\n\n // downsides.\n\n //\n\n // I figure a `debug_assertion` is better than (3) as it will give us a change to detect the\n\n // error during testing.\n", "file_path": "consensus/ssz/src/encode.rs", "rank": 5, "score": 321329.87696129415 }, { "content": "pub fn parse_hex_bytes(matches: &ArgMatches, name: &'static str) -> Result<Vec<u8>, String> {\n\n matches\n\n .value_of(name)\n\n .ok_or_else(|| format!(\"{} not specified\", name))\n\n .and_then(|val| {\n\n if val.starts_with(\"0x\") {\n\n hex::decode(&val[2..]).map_err(|e| format!(\"Unable to parse {}: {:?}\", name, e))\n\n } else {\n\n Err(format!(\"Unable to parse {}, must have 0x prefix\", name))\n\n }\n\n })\n\n}\n", "file_path": "lcli/src/helpers.rs", "rank": 6, "score": 317464.31009794853 }, { "content": "fn decode_and_print<T: Decode + Serialize>(bytes: &[u8]) -> Result<(), String> {\n\n let item = T::from_ssz_bytes(&bytes).map_err(|e| format!(\"Ssz decode failed: {:?}\", e))?;\n\n\n\n println!(\n\n \"{}\",\n\n serde_yaml::to_string(&item)\n\n .map_err(|e| format!(\"Unable to write object to YAML: {:?}\", e))?\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "lcli/src/parse_hex.rs", "rank": 7, "score": 317081.3847591422 }, { "content": "fn decode_and_print<T: Decode + Serialize>(bytes: &[u8]) -> Result<(), String> {\n\n let item = T::from_ssz_bytes(&bytes).map_err(|e| format!(\"Ssz decode failed: {:?}\", e))?;\n\n\n\n println!(\n\n \"{}\",\n\n serde_yaml::to_string(&item)\n\n .map_err(|e| format!(\"Unable to write object to YAML: {:?}\", e))?\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "lcli/src/parse_ssz.rs", "rank": 8, "score": 317081.3847591422 }, { "content": "// Auxiliar function for `hex_string_to_bytes`.\n\nfn val(c: &u8, idx: usize) -> Result<u8, String> {\n\n match c {\n\n b'A'..=b'F' => Ok(c - b'A' + 10),\n\n b'a'..=b'f' => Ok(c - b'a' + 10),\n\n b'0'..=b'9' => Ok(c - b'0'),\n\n _ => Err(format!(\n\n \"Invalid hex character: {} at index {}\",\n\n *c as char, idx\n\n )),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod object {\n\n use super::*;\n\n use helpers::*;\n\n use zeroize::Zeroize;\n\n\n\n #[test]\n\n fn v_u8_zeroized() {\n", "file_path": "remote_signer/backend/src/zeroize_string.rs", "rank": 9, "score": 312626.2231844065 }, { "content": "/// Decode `data` from a 0x-prefixed hex string.\n\npub fn decode(s: &str) -> Result<Vec<u8>, String> {\n\n if s.starts_with(\"0x\") {\n\n hex::decode(&s[2..]).map_err(|e| format!(\"invalid hex: {:?}\", e))\n\n } else {\n\n Err(\"hex must have 0x prefix\".to_string())\n\n }\n\n}\n\n\n\npub struct PrefixedHexVisitor;\n\n\n\nimpl<'de> Visitor<'de> for PrefixedHexVisitor {\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a hex string with 0x prefix\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n", "file_path": "consensus/serde_utils/src/hex.rs", "rank": 10, "score": 308677.18197656627 }, { "content": "fn process_bool_field(val: bool, leaf: &mut Hash256, force_update: bool) -> bool {\n\n process_u64_field(val as u64, leaf, force_update)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::test_utils::TestRandom;\n\n use crate::Epoch;\n\n use rand::SeedableRng;\n\n use rand_xorshift::XorShiftRng;\n\n use tree_hash::TreeHash;\n\n\n\n fn test_validator_tree_hash(v: &Validator) {\n\n let arena = &mut CacheArena::default();\n\n\n\n let mut cache = v.new_tree_hash_cache(arena);\n\n // With a fresh cache\n\n assert_eq!(\n\n &v.tree_hash_root()[..],\n", "file_path": "consensus/types/src/tree_hash_impls.rs", "rank": 11, "score": 296001.19389578927 }, { "content": "/// Remove any number of newline or carriage returns from the end of a vector of bytes.\n\npub fn strip_off_newlines(mut bytes: Vec<u8>) -> Vec<u8> {\n\n let mut strip_off = 0;\n\n for (i, byte) in bytes.iter().rev().enumerate() {\n\n if *byte == b'\\n' || *byte == b'\\r' {\n\n strip_off = i + 1;\n\n } else {\n\n break;\n\n }\n\n }\n\n bytes.truncate(bytes.len() - strip_off);\n\n bytes\n\n}\n\n\n", "file_path": "common/account_utils/src/lib.rs", "rank": 12, "score": 294641.1506125736 }, { "content": "fn decompress_snappy(data: &[u8]) -> Result<Vec<u8>, String> {\n\n // Exit early if uncompressed data is > GOSSIP_MAX_SIZE\n\n match decompress_len(data) {\n\n Ok(n) if n > GOSSIP_MAX_SIZE => {\n\n return Err(\"ssz_snappy decoded data > GOSSIP_MAX_SIZE\".into());\n\n }\n\n Ok(_) => {}\n\n Err(e) => {\n\n return Err(format!(\"{}\", e));\n\n }\n\n };\n\n let mut decoder = Decoder::new();\n\n match decoder.decompress_vec(data) {\n\n Ok(decompressed_data) => Ok(decompressed_data),\n\n Err(e) => Err(format!(\"{}\", e)),\n\n }\n\n}\n\n\n\nimpl<T: EthSpec> PubsubMessage<T> {\n\n /// Returns the topics that each pubsub message will be sent across, given a supported\n", "file_path": "beacon_node/eth2_libp2p/src/types/pubsub.rs", "rank": 13, "score": 294074.70629386173 }, { "content": "/// Returns the minimum required bytes to represent a given number of bits.\n\n///\n\n/// `bit_len == 0` requires a single byte.\n\nfn bytes_for_bit_len(bit_len: usize) -> usize {\n\n std::cmp::max(1, (bit_len + 7) / 8)\n\n}\n\n\n\n/// An iterator over the bits in a `Bitfield`.\n\npub struct BitIter<'a, T> {\n\n bitfield: &'a Bitfield<T>,\n\n i: usize,\n\n}\n\n\n\nimpl<'a, T: BitfieldBehaviour> Iterator for BitIter<'a, T> {\n\n type Item = bool;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let res = self.bitfield.get(self.i).ok()?;\n\n self.i += 1;\n\n Some(res)\n\n }\n\n}\n\n\n", "file_path": "consensus/ssz_types/src/bitfield.rs", "rank": 14, "score": 289889.2614029598 }, { "content": "pub fn cli_run<E: EthSpec>(matches: &ArgMatches, mut env: Environment<E>) -> Result<(), String> {\n\n let keystore_path: PathBuf = clap_utils::parse_required(matches, KEYSTORE_FLAG)?;\n\n let password_file_path: Option<PathBuf> =\n\n clap_utils::parse_optional(matches, PASSWORD_FILE_FLAG)?;\n\n let stdin_inputs = matches.is_present(STDIN_INPUTS_FLAG);\n\n\n\n let spec = env.eth2_config().spec.clone();\n\n let server_url: String = clap_utils::parse_required(matches, BEACON_SERVER_FLAG)?;\n\n let client = BeaconNodeHttpClient::new(\n\n Url::parse(&server_url)\n\n .map_err(|e| format!(\"Failed to parse beacon http server: {:?}\", e))?,\n\n );\n\n\n\n let testnet_config = env\n\n .testnet\n\n .clone()\n\n .expect(\"network should have a valid config\");\n\n\n\n env.runtime().block_on(publish_voluntary_exit::<E>(\n\n &keystore_path,\n", "file_path": "account_manager/src/validator/exit.rs", "rank": 15, "score": 289233.6076773606 }, { "content": "pub fn serialize<S>(bytes: &[u8; BYTES_LEN], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let mut hex_string: String = \"0x\".to_string();\n\n hex_string.push_str(&hex::encode(&bytes));\n\n\n\n serializer.serialize_str(&hex_string)\n\n}\n\n\n", "file_path": "consensus/serde_utils/src/bytes_4_hex.rs", "rank": 16, "score": 281875.8677427227 }, { "content": "#[allow(dead_code)]\n\nfn keypair_from_bytes(mut bytes: Vec<u8>) -> error::Result<Keypair> {\n\n libp2p::core::identity::secp256k1::SecretKey::from_bytes(&mut bytes)\n\n .map(|secret| {\n\n let keypair: libp2p::core::identity::secp256k1::Keypair = secret.into();\n\n Keypair::Secp256k1(keypair)\n\n })\n\n .map_err(|e| format!(\"Unable to parse p2p secret key: {:?}\", e).into())\n\n}\n\n\n", "file_path": "beacon_node/eth2_libp2p/src/service.rs", "rank": 17, "score": 279040.56666975986 }, { "content": "fn process_epoch_field(val: Epoch, leaf: &mut Hash256, force_update: bool) -> bool {\n\n process_u64_field(val.as_u64(), leaf, force_update)\n\n}\n\n\n", "file_path": "consensus/types/src/tree_hash_impls.rs", "rank": 18, "score": 277044.46134780266 }, { "content": "fn process_u64_field(val: u64, leaf: &mut Hash256, force_update: bool) -> bool {\n\n let new_tree_hash = int_to_fixed_bytes32(val);\n\n process_slice_field(&new_tree_hash[..], leaf, force_update)\n\n}\n\n\n", "file_path": "consensus/types/src/tree_hash_impls.rs", "rank": 19, "score": 277044.46134780266 }, { "content": "pub fn parse_fork_opt(matches: &ArgMatches, name: &'static str) -> Result<Option<[u8; 4]>, String> {\n\n matches\n\n .value_of(name)\n\n .map(|val| {\n\n if val.starts_with(\"0x\") {\n\n let vec = hex::decode(&val[2..])\n\n .map_err(|e| format!(\"Unable to parse {} as hex: {:?}\", name, e))?;\n\n\n\n if vec.len() != 4 {\n\n Err(format!(\"{} must be exactly 4 bytes\", name))\n\n } else {\n\n let mut arr = [0; 4];\n\n arr.copy_from_slice(&vec);\n\n Ok(arr)\n\n }\n\n } else {\n\n Err(format!(\"Unable to parse {}, must have 0x prefix\", name))\n\n }\n\n })\n\n .transpose()\n\n}\n\n\n", "file_path": "lcli/src/helpers.rs", "rank": 20, "score": 271593.72681256116 }, { "content": "pub fn serialize<S>(byte: &u8, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let hex = format!(\"0x{}\", hex::encode([*byte]));\n\n serializer.serialize_str(&hex)\n\n}\n\n\n", "file_path": "consensus/serde_utils/src/u8_hex.rs", "rank": 21, "score": 270594.576140019 }, { "content": "/// Write some bytes to file.\n\nfn write_to_file(path: &PathBuf, item: &[u8]) -> Result<(), String> {\n\n File::create(path)\n\n .map_err(|e| format!(\"Unable to create {:?}: {:?}\", path, e))\n\n .and_then(|mut file| {\n\n file.write_all(item)\n\n .map(|_| ())\n\n .map_err(|e| format!(\"Unable to write to {:?}: {:?}\", path, e))\n\n })\n\n}\n", "file_path": "testing/state_transition_vectors/src/main.rs", "rank": 22, "score": 268047.2226188901 }, { "content": "pub fn check_tree_hash(expected_str: &str, actual_root: &[u8]) -> Result<(), Error> {\n\n let expected_root = hex::decode(&expected_str[2..])\n\n .map_err(|e| Error::FailedToParseTest(format!(\"{:?}\", e)))?;\n\n let expected_root = Hash256::from_slice(&expected_root);\n\n let tree_hash_root = Hash256::from_slice(actual_root);\n\n compare_result::<Hash256, Error>(&Ok(tree_hash_root), &Some(expected_root))\n\n}\n\n\n\nimpl<T: SszStaticType> Case for SszStatic<T> {\n\n fn result(&self, _case_index: usize) -> Result<(), Error> {\n\n check_serialization(&self.value, &self.serialized)?;\n\n check_tree_hash(&self.roots.root, self.value.tree_hash_root().as_bytes())?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T: SszStaticType + CachedTreeHash<C>, C: Debug + Sync> Case for SszStaticTHC<T, C> {\n\n fn result(&self, _case_index: usize) -> Result<(), Error> {\n\n check_serialization(&self.value, &self.serialized)?;\n\n check_tree_hash(&self.roots.root, self.value.tree_hash_root().as_bytes())?;\n", "file_path": "testing/ef_tests/src/cases/ssz_static.rs", "rank": 23, "score": 265691.26159876026 }, { "content": "/// Removes the `0x` prefix from some bytes. Returns an error if the prefix is not present.\n\nfn strip_prefix(hex: &str) -> Result<&str, String> {\n\n if hex.starts_with(\"0x\") {\n\n Ok(&hex[2..])\n\n } else {\n\n Err(\"Hex string did not start with `0x`\".to_string())\n\n }\n\n}\n", "file_path": "beacon_node/eth1/src/http.rs", "rank": 24, "score": 263963.9122845484 }, { "content": "fn get_state<E: EthSpec>(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n }\n\n\n\n state.validators = (0..validator_count)\n\n .collect::<Vec<_>>()\n\n .par_iter()\n\n .map(|&i| Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n\n withdrawal_credentials: Hash256::from_low_u64_le(i as u64),\n", "file_path": "consensus/types/benches/benches.rs", "rank": 25, "score": 259578.37740688177 }, { "content": "fn trim_newline(s: &mut String) {\n\n if s.ends_with('\\n') {\n\n s.pop();\n\n if s.ends_with('\\r') {\n\n s.pop();\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/account_utils/src/lib.rs", "rank": 26, "score": 258555.74691030098 }, { "content": "/// Decrease the balance of a validator, saturating upon overflow, as per the spec.\n\n///\n\n/// Spec v0.12.1\n\npub fn decrease_balance<E: EthSpec>(state: &mut BeaconState<E>, index: usize, delta: u64) {\n\n state.balances[index] = state.balances[index].saturating_sub(delta);\n\n}\n", "file_path": "consensus/state_processing/src/common/mod.rs", "rank": 27, "score": 258358.63664675466 }, { "content": "/// Verify that `s` can be used as a CORS origin.\n\n///\n\n/// ## Notes\n\n///\n\n/// We need this function since `warp` will panic if provided an invalid origin. The verification\n\n/// code is taken from here:\n\n///\n\n/// https://github.com/seanmonstar/warp/blob/3d1760c6ca35ce2d03dee0562259d0320e9face3/src/filters/cors.rs#L616\n\n///\n\n/// Ideally we should make a PR to `warp` to expose this behaviour, however we defer this for a\n\n/// later time. The impact of a false-positive on this function is fairly limited, since only\n\n/// trusted users should be setting CORS origins.\n\nfn verify_cors_origin_str(s: &str) -> Result<(), String> {\n\n // Always the wildcard origin.\n\n if s == \"*\" {\n\n return Ok(());\n\n }\n\n\n\n let mut parts = s.splitn(2, \"://\");\n\n let scheme = parts\n\n .next()\n\n .ok_or_else(|| format!(\"{} is missing a scheme\", s))?;\n\n let rest = parts\n\n .next()\n\n .ok_or_else(|| format!(\"{} is missing the part following the scheme\", s))?;\n\n\n\n headers::Origin::try_from_parts(scheme, rest, None)\n\n .map_err(|e| format!(\"Unable to parse {}: {}\", s, e))\n\n .map(|_| ())\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "common/warp_utils/src/cors.rs", "rank": 28, "score": 257243.19671424403 }, { "content": "pub fn parse_path(matches: &ArgMatches, name: &'static str) -> Result<PathBuf, String> {\n\n matches\n\n .value_of(name)\n\n .ok_or_else(|| format!(\"{} not specified\", name))?\n\n .parse::<PathBuf>()\n\n .map_err(|e| format!(\"Unable to parse {}: {}\", name, e))\n\n}\n\n\n", "file_path": "lcli/src/helpers.rs", "rank": 29, "score": 253198.37936615312 }, { "content": "fn build_node<E: EthSpec>(env: &mut Environment<E>) -> LocalBeaconNode<E> {\n\n let context = env.core_context();\n\n env.runtime()\n\n .block_on(LocalBeaconNode::production(\n\n context,\n\n testing_client_config(),\n\n ))\n\n .expect(\"should block until node created\")\n\n}\n\n\n", "file_path": "beacon_node/tests/test.rs", "rank": 30, "score": 252462.60258249886 }, { "content": "/// Converts an integer represented as a string to a big-endian byte array.\n\nfn int_to_vec(int_str: &str) -> Vec<u8> {\n\n BigUint::parse_bytes(int_str.as_bytes(), 10)\n\n .expect(\"must be able to parse int\")\n\n .to_bytes_be()\n\n}\n\n\n", "file_path": "crypto/eth2_key_derivation/tests/eip2333_vectors.rs", "rank": 31, "score": 251634.3203670251 }, { "content": "/// Reads a mnemonic phrase from TTY or stdin if `use_stdin == true`.\n\npub fn read_input_from_user(use_stdin: bool) -> Result<String, String> {\n\n let mut input = String::new();\n\n if use_stdin {\n\n io::stdin()\n\n .read_line(&mut input)\n\n .map_err(|e| format!(\"Error reading from stdin: {}\", e))?;\n\n } else {\n\n let tty = File::open(\"/dev/tty\").map_err(|e| format!(\"Error opening tty: {}\", e))?;\n\n let mut buf_reader = io::BufReader::new(tty);\n\n buf_reader\n\n .read_line(&mut input)\n\n .map_err(|e| format!(\"Error reading from tty: {}\", e))?;\n\n }\n\n trim_newline(&mut input);\n\n Ok(input)\n\n}\n\n\n", "file_path": "common/account_utils/src/lib.rs", "rank": 32, "score": 251315.0663217272 }, { "content": "/// Decode bytes as a little-endian usize, returning an `Err` if `bytes.len() !=\n\n/// BYTES_PER_LENGTH_OFFSET`.\n\nfn decode_offset(bytes: &[u8]) -> Result<usize, DecodeError> {\n\n let len = bytes.len();\n\n let expected = BYTES_PER_LENGTH_OFFSET;\n\n\n\n if len != expected {\n\n Err(DecodeError::InvalidLengthPrefix { len, expected })\n\n } else {\n\n let mut array: [u8; BYTES_PER_LENGTH_OFFSET] = std::default::Default::default();\n\n array.clone_from_slice(bytes);\n\n\n\n Ok(u32::from_le_bytes(array) as usize)\n\n }\n\n}\n", "file_path": "consensus/ssz/src/decode.rs", "rank": 33, "score": 248289.28362365084 }, { "content": "/// Reads a `BYTES_PER_LENGTH_OFFSET`-byte length from `bytes`, where `bytes.len() >=\n\n/// BYTES_PER_LENGTH_OFFSET`.\n\nfn read_offset(bytes: &[u8]) -> Result<usize, DecodeError> {\n\n decode_offset(bytes.get(0..BYTES_PER_LENGTH_OFFSET).ok_or_else(|| {\n\n DecodeError::InvalidLengthPrefix {\n\n len: bytes.len(),\n\n expected: BYTES_PER_LENGTH_OFFSET,\n\n }\n\n })?)\n\n}\n\n\n", "file_path": "consensus/ssz/src/decode.rs", "rank": 34, "score": 248277.96845306168 }, { "content": "// helper function to identify clients from their agent_version. Returns the client\n\n// kind and it's associated version and the OS kind.\n\nfn client_from_agent_version(agent_version: &str) -> (ClientKind, String, String) {\n\n let mut agent_split = agent_version.split('/');\n\n match agent_split.next() {\n\n Some(\"Lighthouse\") => {\n\n let kind = ClientKind::Lighthouse;\n\n let mut version = String::from(\"unknown\");\n\n let mut os_version = version.clone();\n\n if let Some(agent_version) = agent_split.next() {\n\n version = agent_version.into();\n\n if let Some(agent_os_version) = agent_split.next() {\n\n os_version = agent_os_version.into();\n\n }\n\n }\n\n (kind, version, os_version)\n\n }\n\n Some(\"teku\") => {\n\n let kind = ClientKind::Teku;\n\n let mut version = String::from(\"unknown\");\n\n let mut os_version = version.clone();\n\n if agent_split.next().is_some() {\n", "file_path": "beacon_node/eth2_libp2p/src/peer_manager/client.rs", "rank": 35, "score": 247949.9576445665 }, { "content": "pub fn subnet_id_to_string(i: u64) -> &'static str {\n\n if i < MAX_SUBNET_ID as u64 {\n\n &SUBNET_ID_TO_STRING\n\n .get(i as usize)\n\n .expect(\"index below MAX_SUBNET_ID\")\n\n } else {\n\n \"subnet id out of range\"\n\n }\n\n}\n\n\n\nimpl SubnetId {\n\n pub fn new(id: u64) -> Self {\n\n id.into()\n\n }\n\n\n\n /// Compute the subnet for an attestation with `attestation_data` where each slot in the\n\n /// attestation epoch contains `committee_count_per_slot` committees.\n\n pub fn compute_subnet_for_attestation_data<T: EthSpec>(\n\n attestation_data: &AttestationData,\n\n committee_count_per_slot: u64,\n", "file_path": "consensus/types/src/subnet_id.rs", "rank": 36, "score": 247764.9763074132 }, { "content": "pub fn get_key_for_col(column: &str, key: &[u8]) -> Vec<u8> {\n\n let mut result = column.as_bytes().to_vec();\n\n result.extend_from_slice(key);\n\n result\n\n}\n\n\n\npub enum KeyValueStoreOp {\n\n PutKeyValue(Vec<u8>, Vec<u8>),\n\n DeleteKey(Vec<u8>),\n\n}\n\n\n", "file_path": "beacon_node/store/src/lib.rs", "rank": 37, "score": 247255.11433103235 }, { "content": "// Note: `state.canonical_root()` uses whatever `tree_hash` that the `types` crate\n\n// uses, which is not necessarily this crate. If you want to ensure that types is\n\n// using this local version of `tree_hash`, ensure you add a workspace-level\n\n// [dependency\n\n// patch](https://doc.rust-lang.org/cargo/reference/manifest.html#the-patch-section).\n\nfn bench_suite<T: EthSpec>(c: &mut Criterion, spec_desc: &str, validator_count: usize) {\n\n let state1 = build_state::<T>(validator_count);\n\n let state2 = state1.clone();\n\n let mut state3 = state1.clone();\n\n state3.update_tree_hash_cache().unwrap();\n\n\n\n c.bench(\n\n &format!(\"{}/{}_validators/no_cache\", spec_desc, validator_count),\n\n Benchmark::new(\"genesis_state\", move |b| {\n\n b.iter_batched_ref(\n\n || state1.clone(),\n\n |state| black_box(state.canonical_root()),\n\n criterion::BatchSize::SmallInput,\n\n )\n\n })\n\n .sample_size(10),\n\n );\n\n\n\n c.bench(\n\n &format!(\"{}/{}_validators/empty_cache\", spec_desc, validator_count),\n", "file_path": "consensus/tree_hash/benches/benches.rs", "rank": 38, "score": 246772.49279017845 }, { "content": "/// Run a bench with a highly complex block.\n\nfn worst_bench<T: EthSpec>(c: &mut Criterion, spec_desc: &str, validator_count: usize) {\n\n let mut spec = &mut T::default_spec();\n\n\n\n // Allows the exits to be processed sucessfully.\n\n spec.shard_committee_period = 0;\n\n\n\n let (block, state) = get_worst_block(validator_count, spec);\n\n bench_block::<T>(c, block, state, spec, spec_desc, \"high_complexity_block\");\n\n}\n\n\n", "file_path": "consensus/state_processing/benches/benches.rs", "rank": 39, "score": 246766.30429189483 }, { "content": "/// Run a bench with a average complexity block.\n\nfn average_bench<T: EthSpec>(c: &mut Criterion, spec_desc: &str, validator_count: usize) {\n\n let spec = &T::default_spec();\n\n\n\n let (block, state) = get_average_block(validator_count, spec);\n\n bench_block::<T>(c, block, state, spec, spec_desc, \"average_complexity_block\");\n\n}\n\n\n", "file_path": "consensus/state_processing/benches/benches.rs", "rank": 40, "score": 246766.30429189483 }, { "content": "fn get_state(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n state\n\n .validators\n\n .push(Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n\n withdrawal_credentials: Hash256::from_low_u64_le(i as u64),\n\n effective_balance: i as u64,\n\n slashed: i % 2 == 0,\n\n activation_eligibility_epoch: i.into(),\n\n activation_epoch: i.into(),\n\n exit_epoch: i.into(),\n\n withdrawable_epoch: i.into(),\n\n })\n\n .expect(\"should add validator\");\n\n }\n\n\n\n state\n\n}\n\n\n", "file_path": "consensus/types/examples/clone_state.rs", "rank": 41, "score": 246642.21279008576 }, { "content": "/// Creates a file with `600 (-rw-------)` permissions.\n\npub fn create_with_600_perms<P: AsRef<Path>>(path: P, bytes: &[u8]) -> Result<(), String> {\n\n let path = path.as_ref();\n\n\n\n let mut file =\n\n File::create(&path).map_err(|e| format!(\"Unable to create {:?}: {}\", path, e))?;\n\n\n\n let mut perm = file\n\n .metadata()\n\n .map_err(|e| format!(\"Unable to get {:?} metadata: {}\", path, e))?\n\n .permissions();\n\n\n\n perm.set_mode(0o600);\n\n\n\n file.set_permissions(perm)\n\n .map_err(|e| format!(\"Unable to set {:?} permissions: {}\", path, e))?;\n\n\n\n file.write_all(bytes)\n\n .map_err(|e| format!(\"Unable to write to {:?}: {}\", path, e))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "account_manager/src/wallet/create.rs", "rank": 42, "score": 246411.00519541436 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn store_range<F, E, S, I>(\n\n _: F,\n\n range: I,\n\n start_vindex: usize,\n\n end_vindex: usize,\n\n store: &S,\n\n state: &BeaconState<E>,\n\n spec: &ChainSpec,\n\n ops: &mut Vec<KeyValueStoreOp>,\n\n) -> Result<bool, Error>\n\nwhere\n\n F: Field<E>,\n\n E: EthSpec,\n\n S: KeyValueStore<E>,\n\n I: Iterator<Item = usize>,\n\n{\n\n for chunk_index in range {\n\n let chunk_key = &chunk_key(chunk_index as u64)[..];\n\n\n\n let existing_chunk =\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 43, "score": 246225.32295576404 }, { "content": "fn get_state(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n state\n\n .validators\n\n .push(Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n\n withdrawal_credentials: Hash256::from_low_u64_le(i as u64),\n\n effective_balance: i as u64,\n\n slashed: i % 2 == 0,\n\n activation_eligibility_epoch: i.into(),\n\n activation_epoch: i.into(),\n\n exit_epoch: i.into(),\n\n withdrawable_epoch: i.into(),\n\n })\n\n .expect(\"should add validator\");\n\n }\n\n\n\n state\n\n}\n\n\n", "file_path": "consensus/types/examples/ssz_encode_state.rs", "rank": 44, "score": 242749.35885748832 }, { "content": "fn get_state(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n state\n\n .validators\n\n .push(Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n\n withdrawal_credentials: Hash256::from_low_u64_le(i as u64),\n\n effective_balance: i as u64,\n\n slashed: i % 2 == 0,\n\n activation_eligibility_epoch: i.into(),\n\n activation_epoch: i.into(),\n\n exit_epoch: i.into(),\n\n withdrawable_epoch: i.into(),\n\n })\n\n .expect(\"should add validator\");\n\n }\n\n\n\n state\n\n}\n\n\n", "file_path": "consensus/types/examples/tree_hash_state.rs", "rank": 45, "score": 242749.35885748832 }, { "content": "/// A helper function providing common functionality for finding the Merkle root of some bytes that\n\n/// represent a bitfield.\n\npub fn bitfield_bytes_tree_hash_root<N: Unsigned>(bytes: &[u8]) -> Hash256 {\n\n let byte_size = (N::to_usize() + 7) / 8;\n\n let leaf_count = (byte_size + BYTES_PER_CHUNK - 1) / BYTES_PER_CHUNK;\n\n\n\n let mut hasher = MerkleHasher::with_leaves(leaf_count);\n\n\n\n hasher\n\n .write(bytes)\n\n .expect(\"bitfield should not exceed tree hash leaf limit\");\n\n\n\n hasher\n\n .finish()\n\n .expect(\"bitfield tree hash buffer should not exceed leaf limit\")\n\n}\n", "file_path": "consensus/ssz_types/src/tree_hash.rs", "rank": 46, "score": 241058.77633399848 }, { "content": "pub fn store_updated_vector<F: Field<E>, E: EthSpec, S: KeyValueStore<E>>(\n\n field: F,\n\n store: &S,\n\n state: &BeaconState<E>,\n\n spec: &ChainSpec,\n\n ops: &mut Vec<KeyValueStoreOp>,\n\n) -> Result<(), Error> {\n\n let chunk_size = F::chunk_size();\n\n let (start_vindex, end_vindex) = F::start_and_end_vindex(state.slot, spec);\n\n let start_cindex = start_vindex / chunk_size;\n\n let end_cindex = end_vindex / chunk_size;\n\n\n\n // Store the genesis value if we have access to it, and it hasn't been stored already.\n\n if F::slot_needs_genesis_value(state.slot, spec) {\n\n let genesis_value = F::extract_genesis_value(state, spec)?;\n\n F::check_and_store_genesis_value(store, genesis_value, ops)?;\n\n }\n\n\n\n // Start by iterating backwards from the last chunk, storing new chunks in the database.\n\n // Stop once a chunk in the database matches what we were about to store, this indicates\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 47, "score": 240432.15946709167 }, { "content": "fn variable_list_h256_test<Len: Unsigned>(leaves_and_skips: Vec<(u64, bool)>) -> bool {\n\n let arena = &mut CacheArena::default();\n\n let leaves: Vec<_> = leaves_and_skips\n\n .iter()\n\n .map(|(l, _)| Hash256::from_low_u64_be(*l))\n\n .take(Len::to_usize())\n\n .collect();\n\n\n\n let mut list: VariableList<Hash256, Len>;\n\n let init: VariableList<Hash256, Len> = VariableList::new(vec![]).unwrap();\n\n let mut cache = init.new_tree_hash_cache(arena);\n\n\n\n for (end, (_, update_cache)) in leaves_and_skips.into_iter().enumerate() {\n\n list = VariableList::new(leaves[..end].to_vec()).unwrap();\n\n\n\n if update_cache\n\n && list\n\n .recalculate_tree_hash_root(arena, &mut cache)\n\n .unwrap()\n\n .as_bytes()\n\n != &list.tree_hash_root()[..]\n\n {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n", "file_path": "consensus/cached_tree_hash/src/test.rs", "rank": 48, "score": 240405.4328382326 }, { "content": "/// Returns `true` if we should consider the `file_name` to represent a voting keystore.\n\npub fn is_voting_keystore(file_name: &str) -> bool {\n\n // All formats end with `.json`.\n\n if !file_name.ends_with(\".json\") {\n\n return false;\n\n }\n\n\n\n // The format used by Lighthouse.\n\n if file_name == VOTING_KEYSTORE_FILE {\n\n return true;\n\n }\n\n\n\n // The format exported by the `eth2.0-deposit-cli` library.\n\n //\n\n // Reference to function that generates keystores:\n\n //\n\n // https://github.com/ethereum/eth2.0-deposit-cli/blob/7cebff15eac299b3b1b090c896dd3410c8463450/eth2deposit/credentials.py#L58-L62\n\n //\n\n // Since we include the key derivation path of `m/12381/3600/x/0/0` this should only ever match\n\n // with a voting keystore and never a withdrawal keystore.\n\n //\n", "file_path": "common/account_utils/src/validator_definitions.rs", "rank": 49, "score": 240196.18556626939 }, { "content": "/// Converts 0x-prefixed hex to bytes.\n\nfn hex_to_vec(hex: &str) -> Vec<u8> {\n\n hex::decode(&hex[2..]).expect(\"should decode hex as vec\")\n\n}\n\n\n", "file_path": "crypto/eth2_key_derivation/tests/eip2333_vectors.rs", "rank": 50, "score": 239867.51367373724 }, { "content": "/// Parses a `0x`-prefixed, **big-endian** hex string as a u64.\n\n///\n\n/// Note: the JSON-RPC encodes integers as big-endian. The deposit contract uses little-endian.\n\n/// Therefore, this function is only useful for numbers encoded by the JSON RPC.\n\n///\n\n/// E.g., `0x01 == 1`\n\nfn hex_to_u64_be(hex: &str) -> Result<u64, String> {\n\n u64::from_str_radix(strip_prefix(hex)?, 16)\n\n .map_err(|e| format!(\"Failed to parse hex as u64: {:?}\", e))\n\n}\n\n\n", "file_path": "beacon_node/eth1/src/http.rs", "rank": 51, "score": 237855.68858469246 }, { "content": "// hex::encode only allows up to 32 bytes.\n\npub fn bytes96_to_hex_string(data: [u8; 96]) -> Result<String, Error> {\n\n static CHARS: &[u8] = b\"0123456789abcdef\";\n\n let mut s = String::with_capacity(96 * 2 + 2);\n\n\n\n s.write_char('0')?;\n\n s.write_char('x')?;\n\n\n\n for &byte in data.iter() {\n\n s.write_char(CHARS[(byte >> 4) as usize].into())?;\n\n s.write_char(CHARS[(byte & 0xf) as usize].into())?;\n\n }\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "remote_signer/backend/src/utils.rs", "rank": 52, "score": 237784.2542164644 }, { "content": "/// Reads a `BYTES_PER_LENGTH_OFFSET`-byte union index from `bytes`, where `bytes.len() >=\n\n/// BYTES_PER_LENGTH_OFFSET`.\n\npub fn read_union_index(bytes: &[u8]) -> Result<usize, DecodeError> {\n\n read_offset(bytes)\n\n}\n\n\n", "file_path": "consensus/ssz/src/decode.rs", "rank": 53, "score": 237420.58756072045 }, { "content": "/// Reads a password from TTY or stdin if `use_stdin == true`.\n\npub fn read_password_from_user(use_stdin: bool) -> Result<ZeroizeString, String> {\n\n let result = if use_stdin {\n\n rpassword::prompt_password_stderr(\"\")\n\n .map_err(|e| format!(\"Error reading from stdin: {}\", e))\n\n } else {\n\n rpassword::read_password_from_tty(None)\n\n .map_err(|e| format!(\"Error reading from tty: {}\", e))\n\n };\n\n\n\n result.map(ZeroizeString::from)\n\n}\n\n\n", "file_path": "common/account_utils/src/lib.rs", "rank": 54, "score": 236908.22784797937 }, { "content": "/// Encode `index` as a little-endian byte array of `BYTES_PER_LENGTH_OFFSET` length.\n\n///\n\n/// If `len` is larger than `2 ^ BYTES_PER_LENGTH_OFFSET`, a `debug_assert` is raised.\n\npub fn encode_union_index(index: usize) -> [u8; BYTES_PER_LENGTH_OFFSET] {\n\n encode_length(index)\n\n}\n\n\n", "file_path": "consensus/ssz/src/encode.rs", "rank": 55, "score": 236549.4760129461 }, { "content": "pub fn load_vector_from_db<F: FixedLengthField<E>, E: EthSpec, S: KeyValueStore<E>>(\n\n store: &S,\n\n slot: Slot,\n\n spec: &ChainSpec,\n\n) -> Result<FixedVector<F::Value, F::Length>, Error> {\n\n // Do a range query\n\n let chunk_size = F::chunk_size();\n\n let (start_vindex, end_vindex) = F::start_and_end_vindex(slot, spec);\n\n let start_cindex = start_vindex / chunk_size;\n\n let end_cindex = end_vindex / chunk_size;\n\n\n\n let chunks = range_query(store, F::column(), start_cindex, end_cindex)?;\n\n\n\n let default = if F::slot_needs_genesis_value(slot, spec) {\n\n F::load_genesis_value(store)?\n\n } else {\n\n F::Value::default()\n\n };\n\n\n\n let result = stitch(\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 56, "score": 236451.59909715707 }, { "content": "/// Takes a string password and checks that it meets minimum requirements.\n\n///\n\n/// The current minimum password requirement is a 12 character length character length.\n\npub fn is_password_sufficiently_complex(password: &[u8]) -> Result<(), String> {\n\n if count_unicode_characters(password) >= MINIMUM_PASSWORD_LEN {\n\n Ok(())\n\n } else {\n\n Err(format!(\n\n \"Please use at least {} characters for your password.\",\n\n MINIMUM_PASSWORD_LEN\n\n ))\n\n }\n\n}\n\n\n", "file_path": "common/account_utils/src/lib.rs", "rank": 57, "score": 234666.4619240947 }, { "content": "/// The historical roots are stored in vector chunks, despite not actually being a vector.\n\npub fn load_variable_list_from_db<F: VariableLengthField<E>, E: EthSpec, S: KeyValueStore<E>>(\n\n store: &S,\n\n slot: Slot,\n\n spec: &ChainSpec,\n\n) -> Result<VariableList<F::Value, F::Length>, Error> {\n\n let chunk_size = F::chunk_size();\n\n let (start_vindex, end_vindex) = F::start_and_end_vindex(slot, spec);\n\n let start_cindex = start_vindex / chunk_size;\n\n let end_cindex = end_vindex / chunk_size;\n\n\n\n let chunks: Vec<Chunk<F::Value>> = range_query(store, F::column(), start_cindex, end_cindex)?;\n\n\n\n let mut result = Vec::with_capacity(chunk_size * chunks.len());\n\n\n\n for (chunk_index, chunk) in chunks.into_iter().enumerate() {\n\n for (i, value) in chunk.values.into_iter().enumerate() {\n\n let vindex = chunk_index * chunk_size + i;\n\n\n\n if vindex >= start_vindex && vindex < end_vindex {\n\n result.push(value);\n\n }\n\n }\n\n }\n\n\n\n Ok(result.into())\n\n}\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 58, "score": 234552.59034174925 }, { "content": "/// Remove all entries from the given hash map for which `prune_if` returns true.\n\n///\n\n/// The keys in the map should be validator indices, which will be looked up\n\n/// in the state's validator registry and then passed to `prune_if`.\n\n/// Entries for unknown validators will be kept.\n\nfn prune_validator_hash_map<T, F, E: EthSpec>(\n\n map: &mut HashMap<u64, T>,\n\n prune_if: F,\n\n head_state: &BeaconState<E>,\n\n) where\n\n F: Fn(&Validator) -> bool,\n\n{\n\n map.retain(|&validator_index, _| {\n\n head_state\n\n .validators\n\n .get(validator_index as usize)\n\n .map_or(true, |validator| !prune_if(validator))\n\n });\n\n}\n\n\n\n/// Compare two operation pools.\n\nimpl<T: EthSpec + Default> PartialEq for OperationPool<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n if ptr::eq(self, other) {\n\n return true;\n", "file_path": "beacon_node/operation_pool/src/lib.rs", "rank": 59, "score": 234248.8348295537 }, { "content": "fn update_parent_roots(snapshots: &mut [BeaconSnapshot<E>]) {\n\n for i in 0..snapshots.len() {\n\n let root = snapshots[i].beacon_block.canonical_root();\n\n if let Some(child) = snapshots.get_mut(i + 1) {\n\n child.beacon_block.message.parent_root = root\n\n }\n\n }\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/block_verification.rs", "rank": 60, "score": 234187.00176522054 }, { "content": "/// Merkleize `bytes` and return the root, optionally padding the tree out to `min_leaves` number of\n\n/// leaves.\n\n///\n\n/// **Note**: This function is generally worse than using the `crate::merkle_root` which uses\n\n/// `MerkleHasher`. We only keep this function around for reference testing.\n\n///\n\n/// First all nodes are extracted from `bytes` and then a padding node is added until the number of\n\n/// leaf chunks is greater than or equal to `min_leaves`. Callers may set `min_leaves` to `0` if no\n\n/// adding additional chunks should be added to the given `bytes`.\n\n///\n\n/// If `bytes.len() <= BYTES_PER_CHUNK`, no hashing is done and `bytes` is returned, potentially\n\n/// padded out to `BYTES_PER_CHUNK` length with `0`.\n\n///\n\n/// ## CPU Performance\n\n///\n\n/// A cache of `MAX_TREE_DEPTH` hashes are stored to avoid re-computing the hashes of padding nodes\n\n/// (or their parents). Therefore, adding padding nodes only incurs one more hash per additional\n\n/// height of the tree.\n\n///\n\n/// ## Memory Performance\n\n///\n\n/// This algorithm has two interesting memory usage properties:\n\n///\n\n/// 1. The maximum memory footprint is roughly `O(V / 2)` memory, where `V` is the number of leaf\n\n/// chunks with values (i.e., leaves that are not padding). The means adding padding nodes to\n\n/// the tree does not increase the memory footprint.\n\n/// 2. At each height of the tree half of the memory is freed until only a single chunk is stored.\n\n/// 3. The input `bytes` are not copied into another list before processing.\n\n///\n\n/// _Note: there are some minor memory overheads, including a handful of usizes and a list of\n\n/// `MAX_TREE_DEPTH` hashes as `lazy_static` constants._\n\npub fn merkleize_padded(bytes: &[u8], min_leaves: usize) -> Hash256 {\n\n // If the bytes are just one chunk or less, pad to one chunk and return without hashing.\n\n if bytes.len() <= BYTES_PER_CHUNK && min_leaves <= 1 {\n\n let mut o = bytes.to_vec();\n\n o.resize(BYTES_PER_CHUNK, 0);\n\n return Hash256::from_slice(&o);\n\n }\n\n\n\n assert!(\n\n bytes.len() > BYTES_PER_CHUNK || min_leaves > 1,\n\n \"Merkle hashing only needs to happen if there is more than one chunk\"\n\n );\n\n\n\n // The number of leaves that can be made directly from `bytes`.\n\n let leaves_with_values = (bytes.len() + (BYTES_PER_CHUNK - 1)) / BYTES_PER_CHUNK;\n\n\n\n // The number of parents that have at least one non-padding leaf.\n\n //\n\n // Since there is more than one node in this tree (see prior assertion), there should always be\n\n // one or more initial parent nodes.\n", "file_path": "consensus/tree_hash/src/merkleize_padded.rs", "rank": 61, "score": 233678.1031970629 }, { "content": "/// Convenience method for `MerkleHasher` which also provides some fast-paths for small trees.\n\n///\n\n/// `minimum_leaf_count` will only be used if it is greater than or equal to the minimum number of leaves that can be created from `bytes`.\n\npub fn merkle_root(bytes: &[u8], minimum_leaf_count: usize) -> Hash256 {\n\n let leaves = std::cmp::max(\n\n (bytes.len() + (HASHSIZE - 1)) / HASHSIZE,\n\n minimum_leaf_count,\n\n );\n\n\n\n if leaves == 0 {\n\n // If there are no bytes then the hash is always zero.\n\n Hash256::zero()\n\n } else if leaves == 1 {\n\n // If there is only one leaf, the hash is always those leaf bytes padded out to 32-bytes.\n\n let mut hash = [0; HASHSIZE];\n\n hash[0..bytes.len()].copy_from_slice(bytes);\n\n Hash256::from_slice(&hash)\n\n } else if leaves == 2 {\n\n // If there are only two leaves (this is common with BLS pubkeys), we can avoid some\n\n // overhead with `MerkleHasher` and just do a simple 3-node tree here.\n\n let mut leaves = [0; HASHSIZE * 2];\n\n leaves[0..bytes.len()].copy_from_slice(bytes);\n\n\n", "file_path": "consensus/tree_hash/src/lib.rs", "rank": 62, "score": 233671.6387915014 }, { "content": "/// Executes a `Command`, returning a `Result` based upon the success exit code of the command.\n\nfn output_result(cmd: &mut Command) -> Result<Output, String> {\n\n let output = cmd.output().expect(\"should run command\");\n\n\n\n if output.status.success() {\n\n Ok(output)\n\n } else {\n\n Err(from_utf8(&output.stderr)\n\n .expect(\"stderr is not utf8\")\n\n .to_string())\n\n }\n\n}\n\n\n", "file_path": "lighthouse/tests/account_manager.rs", "rank": 63, "score": 233493.5830435321 }, { "content": "/// A bit of hack to find an unused port.\n\n///\n\n/// Does not guarantee that the given port is unused after the function exists, just that it was\n\n/// unused before the function started (i.e., it does not reserve a port).\n\n///\n\n/// Used for passing unused ports to libp2 so that lighthouse won't have to update\n\n/// its own ENR.\n\n///\n\n/// NOTE: It is possible that libp2p/discv5 is unable to bind to the\n\n/// ports returned by this function as the OS has a buffer period where\n\n/// it doesn't allow binding to the same port even after the socket is closed.\n\n/// We might have to use SO_REUSEADDR socket option from `std::net2` crate in\n\n/// that case.\n\npub fn unused_port(transport: &str) -> Result<u16, String> {\n\n let local_addr = match transport {\n\n \"tcp\" => {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").map_err(|e| {\n\n format!(\"Failed to create TCP listener to find unused port: {:?}\", e)\n\n })?;\n\n listener.local_addr().map_err(|e| {\n\n format!(\n\n \"Failed to read TCP listener local_addr to find unused port: {:?}\",\n\n e\n\n )\n\n })?\n\n }\n\n \"udp\" => {\n\n let socket = UdpSocket::bind(\"127.0.0.1:0\")\n\n .map_err(|e| format!(\"Failed to create UDP socket to find unused port: {:?}\", e))?;\n\n socket.local_addr().map_err(|e| {\n\n format!(\n\n \"Failed to read UDP socket local_addr to find unused port: {:?}\",\n\n e\n\n )\n\n })?\n\n }\n\n _ => return Err(\"Invalid transport to find unused port\".into()),\n\n };\n\n Ok(local_addr.port())\n\n}\n", "file_path": "beacon_node/src/config.rs", "rank": 64, "score": 233332.01558145863 }, { "content": "/// Return a G1 point for the given `validator_index`, encoded as a compressed point in\n\n/// big-endian byte-ordering.\n\npub fn be_private_key(validator_index: usize) -> [u8; PRIVATE_KEY_BYTES] {\n\n let preimage = {\n\n let mut bytes = [0; HASH_BYTES];\n\n let index = validator_index.to_le_bytes();\n\n bytes[0..index.len()].copy_from_slice(&index);\n\n bytes\n\n };\n\n\n\n let privkey = BigUint::from_bytes_le(&hash(&preimage)) % &*CURVE_ORDER;\n\n\n\n let mut bytes = [0; PRIVATE_KEY_BYTES];\n\n let privkey_bytes = privkey.to_bytes_be();\n\n bytes[PRIVATE_KEY_BYTES - privkey_bytes.len()..].copy_from_slice(&privkey_bytes);\n\n bytes\n\n}\n\n\n", "file_path": "common/eth2_interop_keypairs/src/lib.rs", "rank": 65, "score": 232956.43407642067 }, { "content": "/// Accepts an entire HTTP body (as a string) and returns the `result` field, as a serde `Value`.\n\nfn response_result(response: &str) -> Result<Option<Value>, String> {\n\n let json = serde_json::from_str::<Value>(&response)\n\n .map_err(|e| format!(\"Failed to parse response: {:?}\", e))?;\n\n\n\n if let Some(error) = json.get(\"error\") {\n\n Err(format!(\"Eth1 node returned error: {}\", error))\n\n } else {\n\n Ok(json\n\n .get(\"result\")\n\n .cloned()\n\n .map(Some)\n\n .unwrap_or_else(|| None))\n\n }\n\n}\n\n\n", "file_path": "beacon_node/eth1/src/http.rs", "rank": 66, "score": 230277.73145111592 }, { "content": "/// Attempts to parse a mnemonic phrase.\n\npub fn mnemonic_from_phrase(phrase: &str) -> Result<Mnemonic, String> {\n\n Mnemonic::from_phrase(phrase, Language::English).map_err(|e| e.to_string())\n\n}\n\n\n\n/// Provides a new-type wrapper around `String` that is zeroized on `Drop`.\n\n///\n\n/// Useful for ensuring that password memory is zeroed-out on drop.\n\n#[derive(Clone, PartialEq, Serialize, Deserialize, Zeroize)]\n\n#[zeroize(drop)]\n\n#[serde(transparent)]\n\npub struct ZeroizeString(String);\n\n\n\nimpl From<String> for ZeroizeString {\n\n fn from(s: String) -> Self {\n\n Self(s)\n\n }\n\n}\n\n\n\nimpl ZeroizeString {\n\n pub fn as_str(&self) -> &str {\n", "file_path": "common/account_utils/src/lib.rs", "rank": 67, "score": 230271.70090317028 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<[u8; BYTES_LEN], D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let decoded = deserializer.deserialize_str(PrefixedHexVisitor)?;\n\n\n\n if decoded.len() != BYTES_LEN {\n\n return Err(D::Error::custom(format!(\n\n \"expected {} bytes for array, got {}\",\n\n BYTES_LEN,\n\n decoded.len()\n\n )));\n\n }\n\n\n\n let mut array = [0; BYTES_LEN];\n\n array.copy_from_slice(&decoded);\n\n Ok(array)\n\n}\n", "file_path": "consensus/serde_utils/src/bytes_4_hex.rs", "rank": 68, "score": 229110.49678136807 }, { "content": "fn get_block<T, F>(mut mutate_builder: F) -> (SignedBeaconBlock<T>, BeaconState<T>)\n\nwhere\n\n T: EthSpec,\n\n F: FnMut(&mut BlockBuilder<T>),\n\n{\n\n let spec = T::default_spec();\n\n let mut builder: BlockBuilder<T> = BlockBuilder::new(VALIDATOR_COUNT, &spec);\n\n builder.set_slot(Slot::from(T::slots_per_epoch() * 3 - 2));\n\n builder.build_caches(&spec);\n\n mutate_builder(&mut builder);\n\n builder.build(&spec)\n\n}\n\n\n", "file_path": "consensus/state_processing/tests/tests.rs", "rank": 69, "score": 228585.90326698864 }, { "content": "/// Returns `int` as little-endian bytes with a length of 1.\n\npub fn int_to_bytes1(int: u8) -> Vec<u8> {\n\n vec![int]\n\n}\n\n\n", "file_path": "consensus/int_to_bytes/src/lib.rs", "rank": 70, "score": 227804.81160978408 }, { "content": "/// Peforms a `HKDF-Expand` on the `pkr` (pseudo-random key), returning `l` bytes.\n\n///\n\n/// Defined in [RFC5869](https://tools.ietf.org/html/rfc5869).\n\nfn hkdf_expand(prk: Prk, info: &[u8], l: usize) -> SecretBytes {\n\n struct ExpandLen(usize);\n\n\n\n impl KeyType for ExpandLen {\n\n fn len(&self) -> usize {\n\n self.0\n\n }\n\n }\n\n\n\n let mut okm = SecretBytes::zero(l);\n\n prk.expand(&[info], ExpandLen(l))\n\n .expect(\"expand len is constant and cannot be too large\")\n\n .fill(okm.as_mut_bytes())\n\n .expect(\"fill len is constant and cannot be too large\");\n\n okm\n\n}\n\n\n", "file_path": "crypto/eth2_key_derivation/src/derived_key.rs", "rank": 71, "score": 227465.67218124063 }, { "content": "pub fn get_environment(is_log_active: bool) -> Environment<E> {\n\n let environment_builder = EnvironmentBuilder::mainnet();\n\n\n\n let builder = if is_log_active {\n\n environment_builder.async_logger(\"info\", None).unwrap()\n\n } else {\n\n environment_builder.null_logger().unwrap()\n\n };\n\n\n\n builder\n\n .multi_threaded_tokio_runtime()\n\n .unwrap()\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "testing/remote_signer_test/src/api_test_signer.rs", "rank": 72, "score": 226539.36279354285 }, { "content": "/// Encode `data` as a 0x-prefixed hex string.\n\npub fn encode<T: AsRef<[u8]>>(data: T) -> String {\n\n let hex = hex::encode(data);\n\n let mut s = \"0x\".to_string();\n\n s.push_str(hex.as_str());\n\n s\n\n}\n\n\n", "file_path": "consensus/serde_utils/src/hex.rs", "rank": 73, "score": 226035.30167336622 }, { "content": "fn all_benches(c: &mut Criterion) {\n\n let validator_count = 16_384;\n\n let spec = &MainnetEthSpec::default_spec();\n\n\n\n let mut state = get_state::<MainnetEthSpec>(validator_count);\n\n state.build_all_caches(spec).expect(\"should build caches\");\n\n let state_bytes = state.as_ssz_bytes();\n\n\n\n let inner_state = state.clone();\n\n c.bench(\n\n &format!(\"{}_validators\", validator_count),\n\n Benchmark::new(\"encode/beacon_state\", move |b| {\n\n b.iter_batched_ref(\n\n || inner_state.clone(),\n\n |state| black_box(state.as_ssz_bytes()),\n\n criterion::BatchSize::SmallInput,\n\n )\n\n })\n\n .sample_size(10),\n\n );\n", "file_path": "consensus/types/benches/benches.rs", "rank": 74, "score": 225661.80730044126 }, { "content": "fn unwrap_err<T, E>(result: Result<T, E>) -> E {\n\n match result {\n\n Ok(_) => panic!(\"called unwrap_err on Ok\"),\n\n Err(e) => e,\n\n }\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/block_verification.rs", "rank": 75, "score": 225222.78928538598 }, { "content": "// A bit of hack to find an unused port.\n\n///\n\n/// Does not guarantee that the given port is unused after the function exists, just that it was\n\n/// unused before the function started (i.e., it does not reserve a port).\n\npub fn unused_port(transport: &str) -> Result<u16, String> {\n\n let local_addr = match transport {\n\n \"tcp\" => {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").map_err(|e| {\n\n format!(\"Failed to create TCP listener to find unused port: {:?}\", e)\n\n })?;\n\n listener.local_addr().map_err(|e| {\n\n format!(\n\n \"Failed to read TCP listener local_addr to find unused port: {:?}\",\n\n e\n\n )\n\n })?\n\n }\n\n \"udp\" => {\n\n let socket = UdpSocket::bind(\"127.0.0.1:0\")\n\n .map_err(|e| format!(\"Failed to create UDP socket to find unused port: {:?}\", e))?;\n\n socket.local_addr().map_err(|e| {\n\n format!(\n\n \"Failed to read UDP socket local_addr to find unused port: {:?}\",\n\n e\n\n )\n\n })?\n\n }\n\n _ => return Err(\"Invalid transport to find unused port\".into()),\n\n };\n\n Ok(local_addr.port())\n\n}\n\n\n", "file_path": "beacon_node/eth2_libp2p/tests/common/mod.rs", "rank": 76, "score": 224552.1411596595 }, { "content": "pub fn check_serialization<T: SszStaticType>(value: &T, serialized: &[u8]) -> Result<(), Error> {\n\n // Check serialization\n\n let serialized_result = value.as_ssz_bytes();\n\n compare_result::<usize, Error>(&Ok(value.ssz_bytes_len()), &Some(serialized.len()))?;\n\n compare_result::<Vec<u8>, Error>(&Ok(serialized_result), &Some(serialized.to_vec()))?;\n\n\n\n // Check deserialization\n\n let deserialized_result = T::from_ssz_bytes(serialized);\n\n compare_result(&deserialized_result, &Some(value.clone()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/ef_tests/src/cases/ssz_static.rs", "rank": 77, "score": 224520.2948684793 }, { "content": "/// Returns a beacon chain harness.\n\nfn get_harness(validator_count: usize) -> BeaconChainHarness<EphemeralHarnessType<E>> {\n\n let harness = BeaconChainHarness::new_with_target_aggregators(\n\n MainnetEthSpec,\n\n KEYPAIRS[0..validator_count].to_vec(),\n\n // A kind-of arbitrary number that ensures that _some_ validators are aggregators, but\n\n // not all.\n\n 4,\n\n StoreConfig::default(),\n\n );\n\n\n\n harness.advance_slot();\n\n\n\n harness\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/attestation_verification.rs", "rank": 78, "score": 222404.17398378815 }, { "content": "fn get_harness(validator_count: usize) -> BeaconChainHarness<EphemeralHarnessType<E>> {\n\n let harness = BeaconChainHarness::new_with_store_config(\n\n MainnetEthSpec,\n\n KEYPAIRS[0..validator_count].to_vec(),\n\n StoreConfig::default(),\n\n );\n\n\n\n harness.advance_slot();\n\n\n\n harness\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/block_verification.rs", "rank": 79, "score": 222404.17398378812 }, { "content": "/// Writes a list of `vectors` to the `title` dir.\n\nfn write_vectors_to_file(title: &str, vectors: &[TestVector]) -> Result<(), String> {\n\n let dir = env::var(\"CARGO_MANIFEST_DIR\")\n\n .map_err(|e| format!(\"Unable to find manifest dir: {:?}\", e))?\n\n .parse::<PathBuf>()\n\n .map_err(|e| format!(\"Unable to parse manifest dir: {:?}\", e))?\n\n .join(BASE_VECTOR_DIR)\n\n .join(title);\n\n\n\n if dir.exists() {\n\n fs::remove_dir_all(&dir).map_err(|e| format!(\"Unable to remove {:?}: {:?}\", dir, e))?;\n\n }\n\n fs::create_dir_all(&dir).map_err(|e| format!(\"Unable to create {:?}: {:?}\", dir, e))?;\n\n\n\n for vector in vectors {\n\n let dir = dir.clone().join(&vector.title);\n\n if dir.exists() {\n\n fs::remove_dir_all(&dir).map_err(|e| format!(\"Unable to remove {:?}: {:?}\", dir, e))?;\n\n }\n\n fs::create_dir_all(&dir).map_err(|e| format!(\"Unable to create {:?}: {:?}\", dir, e))?;\n\n\n", "file_path": "testing/state_transition_vectors/src/main.rs", "rank": 80, "score": 221861.2736875224 }, { "content": "fn test_scenario<T: EthSpec, F, G>(mutate_builder: F, mut invalidate_block: G, spec: &ChainSpec)\n\nwhere\n\n T: EthSpec,\n\n F: FnMut(&mut BlockBuilder<T>),\n\n G: FnMut(&mut SignedBeaconBlock<T>),\n\n{\n\n let (mut block, mut state) = get_block::<T, _>(mutate_builder);\n\n\n\n /*\n\n * Control check to ensure the valid block should pass verification.\n\n */\n\n\n\n assert_eq!(\n\n per_block_processing(\n\n &mut state.clone(),\n\n &block,\n\n None,\n\n BlockSignatureStrategy::VerifyIndividual,\n\n spec\n\n ),\n", "file_path": "consensus/state_processing/tests/tests.rs", "rank": 81, "score": 218096.670977519 }, { "content": "fn do_round(seed: &[u8], index: usize, pivot: usize, round: u8, list_size: usize) -> Option<usize> {\n\n let flip = (pivot + (list_size - index)) % list_size;\n\n let position = max(index, flip);\n\n let source = hash_with_round_and_position(seed, round, position);\n\n let byte = source[(position % 256) / 8];\n\n let bit = (byte >> (position % 8)) % 2;\n\n Some(if bit == 1 { flip } else { index })\n\n}\n\n\n", "file_path": "consensus/swap_or_not_shuffle/src/compute_shuffled_index.rs", "rank": 82, "score": 217937.25936256032 }, { "content": "/// Loads a list of keypairs from file.\n\npub fn load_keypairs_from_yaml(path: PathBuf) -> Result<Vec<Keypair>, String> {\n\n keypairs_from_yaml_file(path)\n\n}\n", "file_path": "consensus/types/src/test_utils/generate_deterministic_keypairs.rs", "rank": 83, "score": 216969.0008618427 }, { "content": "pub fn u64_leaf_count(len: usize) -> usize {\n\n let type_size = size_of::<u64>();\n\n let vals_per_chunk = BYTES_PER_CHUNK / type_size;\n\n\n\n (len + vals_per_chunk - 1) / vals_per_chunk\n\n}\n\n\n", "file_path": "consensus/cached_tree_hash/src/impls.rs", "rank": 84, "score": 216852.93125966343 }, { "content": "pub fn hash256_leaf_count(len: usize) -> usize {\n\n len\n\n}\n\n\n", "file_path": "consensus/cached_tree_hash/src/impls.rs", "rank": 85, "score": 216852.93125966343 }, { "content": "fn get_state<E: EthSpec>(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n }\n\n\n\n state.validators = (0..validator_count)\n\n .collect::<Vec<_>>()\n\n .par_iter()\n\n .map(|&i| Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n\n withdrawal_credentials: Hash256::from_low_u64_le(i as u64),\n", "file_path": "beacon_node/store/benches/benches.rs", "rank": 86, "score": 213945.07175198797 }, { "content": "pub fn parse_u64(matches: &ArgMatches, name: &'static str) -> Result<u64, String> {\n\n matches\n\n .value_of(name)\n\n .ok_or_else(|| format!(\"{} not specified\", name))?\n\n .parse::<u64>()\n\n .map_err(|e| format!(\"Unable to parse {}: {}\", name, e))\n\n}\n\n\n", "file_path": "lcli/src/helpers.rs", "rank": 87, "score": 213167.60630401192 }, { "content": "pub fn parse_address(matches: &ArgMatches, name: &'static str) -> Result<Address, String> {\n\n matches\n\n .value_of(name)\n\n .ok_or_else(|| format!(\"{} not specified\", name))\n\n .and_then(|val| {\n\n if val.starts_with(\"0x\") {\n\n val[2..]\n\n .parse()\n\n .map_err(|e| format!(\"Unable to parse {}: {:?}\", name, e))\n\n } else {\n\n Err(format!(\"Unable to parse {}, must have 0x prefix\", name))\n\n }\n\n })\n\n}\n\n\n", "file_path": "lcli/src/helpers.rs", "rank": 88, "score": 213167.60630401192 }, { "content": "/// According to unicode, every byte that starts with 0b10xxxxxx continues encoding of character\n\n/// Therefore the number of characters equals number of bytes minus number of 0b10xxxxxx bytes\n\nfn count_unicode_characters(bits: &[u8]) -> usize {\n\n bits.iter().filter(|bit| *bit >> 6 != 2).count()\n\n}\n\n\n", "file_path": "common/account_utils/src/lib.rs", "rank": 89, "score": 211596.67335140437 }, { "content": "/// Returns `false` for a timestamp that would result in a genesis time that is earlier than\n\n/// `MIN_GENESIS_TIME`.\n\nfn timestamp_can_trigger_genesis(timestamp: u64, spec: &ChainSpec) -> Result<bool, String> {\n\n eth2_genesis_time(timestamp, spec)\n\n .map(|t| t >= spec.min_genesis_time)\n\n .map_err(|e| format!(\"Arith error when during genesis calculation: {:?}\", e))\n\n}\n", "file_path": "beacon_node/genesis/src/eth1_genesis_service.rs", "rank": 90, "score": 210478.5803625136 }, { "content": "fn hash_with_round_and_position(seed: &[u8], round: u8, position: usize) -> Hash256 {\n\n let mut context = Context::new(&SHA256);\n\n\n\n context.update(seed);\n\n context.update(&[round]);\n\n /*\n\n * Note: the specification has an implicit assertion in `int_to_bytes4` that `position / 256 <\n\n * 2**24`. For efficiency, we do not check for that here as it is checked in `compute_shuffled_index`.\n\n */\n\n context.update(&(position / 256).to_le_bytes()[0..4]);\n\n\n\n let digest = context.finish();\n\n Hash256::from_slice(digest.as_ref())\n\n}\n\n\n", "file_path": "consensus/swap_or_not_shuffle/src/compute_shuffled_index.rs", "rank": 91, "score": 209103.37545549657 }, { "content": "/// Returns true if some field has an attribute declaring it should not be serialized.\n\n///\n\n/// The field attribute is: `#[ssz(skip_serializing)]`\n\nfn should_skip_serializing(field: &syn::Field) -> bool {\n\n field.attrs.iter().any(|attr| {\n\n attr.path.is_ident(\"ssz\")\n\n && attr.tokens.to_string().replace(\" \", \"\") == \"(skip_serializing)\"\n\n })\n\n}\n\n\n\n/// Implements `ssz::Encode` for some `struct`.\n\n///\n\n/// Fields are encoded in the order they are defined.\n\n///\n\n/// ## Field attributes\n\n///\n\n/// - `#[ssz(skip_serializing)]`: the field will not be serialized.\n", "file_path": "consensus/ssz_derive/src/lib.rs", "rank": 92, "score": 208475.0808852706 }, { "content": "/// Interprets `bytes` as a big-endian integer and returns that integer modulo the order of the\n\n/// BLS-381 curve.\n\n///\n\n/// This function is a part of the `HKDF_mod_r` function in EIP-2333.\n\nfn mod_r(bytes: &[u8]) -> ZeroizeHash {\n\n let n = BigUint::from_bytes_be(bytes);\n\n let r = BigUint::parse_bytes(R.as_bytes(), 10).expect(\"must be able to parse R\");\n\n let x = SecretBytes::from((n % r).to_bytes_be());\n\n\n\n let x_slice = x.as_bytes();\n\n\n\n debug_assert!(x_slice.len() <= HASH_SIZE);\n\n\n\n let mut output = ZeroizeHash::zero();\n\n output.as_mut_bytes()[HASH_SIZE - x_slice.len()..].copy_from_slice(&x_slice);\n\n output\n\n}\n\n\n", "file_path": "crypto/eth2_key_derivation/src/derived_key.rs", "rank": 93, "score": 208383.59890618623 }, { "content": "fn bytes_to_int64(slice: &[u8]) -> u64 {\n\n let mut bytes = [0; 8];\n\n bytes.copy_from_slice(&slice[0..8]);\n\n u64::from_le_bytes(bytes)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ethereum_types::H256 as Hash256;\n\n\n\n #[test]\n\n #[ignore]\n\n fn fuzz_test() {\n\n let max_list_size = 2_usize.pow(24);\n\n let test_runs = 1000;\n\n\n\n // Test at max list_size with the end index.\n\n for _ in 0..test_runs {\n\n let index = max_list_size - 1;\n", "file_path": "consensus/swap_or_not_shuffle/src/compute_shuffled_index.rs", "rank": 94, "score": 208377.95030471613 }, { "content": "/// Returns the peer count, returning something helpful if it's `usize::max_value` (effectively a\n\n/// `None` value).\n\nfn peer_count_pretty(peer_count: usize) -> String {\n\n if peer_count == usize::max_value() {\n\n String::from(\"--\")\n\n } else {\n\n format!(\"{}\", peer_count)\n\n }\n\n}\n\n\n", "file_path": "beacon_node/client/src/notifier.rs", "rank": 95, "score": 208286.6356210762 }, { "content": "/// Returns the value of `name` or an error if it is not in `matches` or does not parse\n\n/// successfully using `std::string::FromStr`.\n\npub fn parse_required<T>(matches: &ArgMatches, name: &str) -> Result<T, String>\n\nwhere\n\n T: FromStr,\n\n <T as FromStr>::Err: std::fmt::Display,\n\n{\n\n parse_optional(matches, name)?.ok_or_else(|| format!(\"{} not specified\", name))\n\n}\n\n\n", "file_path": "common/clap_utils/src/lib.rs", "rank": 96, "score": 207917.93992575584 }, { "content": "/// Ensure that the directory at `path` exists, by creating it and all parents if necessary.\n\nfn ensure_dir_exists(path: PathBuf) -> Result<PathBuf, String> {\n\n fs::create_dir_all(&path).map_err(|e| format!(\"Unable to create {}: {}\", path.display(), e))?;\n\n Ok(path)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use toml;\n\n\n\n #[test]\n\n fn serde() {\n\n let config = Config::default();\n\n let serialized = toml::to_string(&config).expect(\"should serde encode default config\");\n\n toml::from_str::<Config>(&serialized).expect(\"should serde decode default config\");\n\n }\n\n}\n", "file_path": "beacon_node/client/src/config.rs", "rank": 97, "score": 207680.7636775071 }, { "content": "// Check that the config from the Eth2.0 spec tests matches our minimal/mainnet config.\n\nfn config_test<E: EthSpec + TypeName>() {\n\n let config_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"eth2.0-spec-tests\")\n\n .join(\"tests\")\n\n .join(E::name())\n\n .join(\"config\")\n\n .join(\"phase0.yaml\");\n\n let yaml_config = YamlConfig::from_file(&config_path).expect(\"config file loads OK\");\n\n let spec = E::default_spec();\n\n let yaml_from_spec = YamlConfig::from_spec::<E>(&spec);\n\n assert_eq!(yaml_config.apply_to_chain_spec::<E>(&spec), Some(spec));\n\n assert_eq!(yaml_from_spec, yaml_config);\n\n}\n\n\n", "file_path": "testing/ef_tests/tests/tests.rs", "rank": 98, "score": 207659.72885339306 }, { "content": "fn shuffle_list(seed: &[u8], list_size: usize) -> Vec<usize> {\n\n let mut output = Vec::with_capacity(list_size);\n\n for i in 0..list_size {\n\n output.push(compute_shuffled_index(i, list_size, seed, SHUFFLE_ROUND_COUNT).unwrap());\n\n }\n\n output\n\n}\n\n\n", "file_path": "consensus/swap_or_not_shuffle/benches/benches.rs", "rank": 99, "score": 207570.6238007163 } ]
Rust
src/developer/ffx/plugins/setui/display/src/lib.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
use anyhow::Result; use ffx_core::ffx_plugin; use ffx_setui_display_args::Display; use fidl_fuchsia_settings::{DisplayProxy, DisplaySettings}; use utils::handle_mixed_result; use utils::{self, Either, WatchOrSetResult}; #[ffx_plugin("setui", DisplayProxy = "core/setui_service:expose:fuchsia.settings.Display")] pub async fn run_command(display_proxy: DisplayProxy, display: Display) -> Result<()> { handle_mixed_result("Display", command(display_proxy, DisplaySettings::from(display)).await) .await } async fn command(proxy: DisplayProxy, settings: DisplaySettings) -> WatchOrSetResult { if settings == DisplaySettings::EMPTY { Ok(Either::Watch(utils::watch_to_stream(proxy, |p| p.watch()))) } else { Ok(Either::Set(if let Err(err) = proxy.set(settings.clone()).await? { format!("{:?}", err) } else { format!("Successfully set Display to {:?}", Display::from(settings)) })) } } #[cfg(test)] mod test { use super::*; use fidl_fuchsia_settings::{DisplayRequest, LowLightMode, Theme, ThemeMode, ThemeType}; use futures::prelude::*; use test_case::test_case; #[fuchsia_async::run_singlethreaded(test)] async fn test_run_command() { let proxy = setup_fake_display_proxy(move |req| match req { DisplayRequest::Set { responder, .. } => { let _ = responder.send(&mut Ok(())); } DisplayRequest::Watch { .. } => { panic!("Unexpected call to watch"); } DisplayRequest::WatchLightSensor { .. } => { panic!("Unexpected call to watch light sensor"); } }); let display = Display { brightness: None, auto_brightness_level: None, auto_brightness: Some(true), low_light_mode: None, theme: None, screen_enabled: None, }; let response = run_command(proxy, display).await; assert!(response.is_ok()); } #[test_case( Display { brightness: Some(0.5), auto_brightness_level: None, auto_brightness: Some(false), low_light_mode: None, theme: None, screen_enabled: None, }; "Test display set() output with non-empty input." )] #[test_case( Display { brightness: None, auto_brightness_level: Some(0.8), auto_brightness: Some(true), low_light_mode: Some(LowLightMode::Enable), theme: Some(Theme { theme_type: Some(ThemeType::Dark), theme_mode: Some(ThemeMode::AUTO), ..Theme::EMPTY }), screen_enabled: Some(true), }; "Test display set() output with a different non-empty input." )] #[fuchsia_async::run_singlethreaded(test)] async fn validate_display_set_output(expected_display: Display) -> Result<()> { let proxy = setup_fake_display_proxy(move |req| match req { DisplayRequest::Set { responder, .. } => { let _ = responder.send(&mut Ok(())); } DisplayRequest::Watch { .. } => { panic!("Unexpected call to watch"); } DisplayRequest::WatchLightSensor { .. } => { panic!("Unexpected call to watch light sensor"); } }); let output = utils::assert_set!(command(proxy, DisplaySettings::from(expected_display.clone()))); assert_eq!(output, format!("Successfully set Display to {:?}", expected_display)); Ok(()) } #[test_case( Display { brightness: None, auto_brightness_level: None, auto_brightness: None, low_light_mode: None, theme: None, screen_enabled: None, }; "Test display watch() output with empty input." )] #[test_case( Display { brightness: Some(0.5), auto_brightness_level: None, auto_brightness: Some(false), low_light_mode: None, theme: None, screen_enabled: None, }; "Test display watch() output with non-empty input." )] #[fuchsia_async::run_singlethreaded(test)] async fn validate_display_watch_output(expected_display: Display) -> Result<()> { let expected_display_clone = expected_display.clone(); let proxy = setup_fake_display_proxy(move |req| match req { DisplayRequest::Set { .. } => { panic!("Unexpected call to set"); } DisplayRequest::Watch { responder } => { let _ = responder.send(DisplaySettings::from(expected_display.clone())); } DisplayRequest::WatchLightSensor { .. } => { panic!("Unexpected call to watch light sensor"); } }); let output = utils::assert_watch!(command( proxy, DisplaySettings::from(Display { brightness: None, auto_brightness_level: None, auto_brightness: None, low_light_mode: None, theme: None, screen_enabled: None, }) )); assert_eq!(output, format!("{:#?}", DisplaySettings::from(expected_display_clone))); Ok(()) } }
use anyhow::Result; use ffx_core::ffx_plugin; use ffx_setui_display_args::Display; use fidl_fuchsia_settings::{DisplayProxy, DisplaySettings}; use utils::handle_mixed_result; use utils::{self, Either, WatchOrSetResult}; #[ffx_plugin("setui", DisplayProxy = "core/setui_service:expose:fuchsia.settings.Display")] pub async fn run_command(display_proxy: DisplayProxy, display: Display) -> Result<()> { handle_mixed_result("Display", command(display_proxy, DisplaySettings::from(display)).await) .await } async fn command(proxy: DisplayProxy, settings: DisplaySettings) -> WatchOrSetResult { if settings == DisplaySettings::EMPTY { Ok(Either::Watch(utils::watch_to_stream(proxy, |p| p.watch()))) } else { Ok(Either::Set(if let Err(err) = proxy.set(settings.clone()).await? { format!("{:?}", err) } else { format!("Successfully set Display to {:?}", Display::from(settings)) })) } } #[cfg(test)] mod test { use super::*; use fidl_fuchsia_settings::{DisplayRequest, LowLightMode, Theme, ThemeMode, ThemeType}; use futures::prelude::*; use test_case::test_case; #[fuchsia_async::run_singlethreaded(test)] async fn test_run_command() { let proxy = setup_fake_display_proxy(move |req| match req { DisplayRequest::Set { responder, .. } => { let _ = responder.send(&mut Ok(())); } DisplayRequest::Watch { .. } => { panic!("Unexpected call to watch"); } DisplayRequest::WatchLightSensor { .. } => { panic!("Unexpected call to watch light sensor"); } }); let display = Display { brightness: None, auto_brightness_level: None, auto_brightness: Some(true), low_light_mode: None, theme: None, screen_enabled: None, }; let response = run_command(proxy, display).await; assert!(response.is_ok()); } #[test_case( Display { brightness: Some(0.5), auto_brightness_level: None, auto_brightness: Some(false), low_light_mode: None, theme: None, screen_enabled: None, }; "Test display set() output with non-empty input." )] #[test_case( Display { brightness: None, auto_brightness_level: Some(0.8), auto_brightness: Some(true), low_light_mode: Some(LowLightMode::Enable), theme: Some(Theme { theme_type: Some(ThemeType::Dark), theme_mode: Some(ThemeMode::AUTO), ..Theme::EMPTY }), screen_enabled: Some(true), }; "Test display set() output with a different non-empty input." )] #[fuchsia_async::run_singlethreaded(test)]
#[test_case( Display { brightness: None, auto_brightness_level: None, auto_brightness: None, low_light_mode: None, theme: None, screen_enabled: None, }; "Test display watch() output with empty input." )] #[test_case( Display { brightness: Some(0.5), auto_brightness_level: None, auto_brightness: Some(false), low_light_mode: None, theme: None, screen_enabled: None, }; "Test display watch() output with non-empty input." )] #[fuchsia_async::run_singlethreaded(test)] async fn validate_display_watch_output(expected_display: Display) -> Result<()> { let expected_display_clone = expected_display.clone(); let proxy = setup_fake_display_proxy(move |req| match req { DisplayRequest::Set { .. } => { panic!("Unexpected call to set"); } DisplayRequest::Watch { responder } => { let _ = responder.send(DisplaySettings::from(expected_display.clone())); } DisplayRequest::WatchLightSensor { .. } => { panic!("Unexpected call to watch light sensor"); } }); let output = utils::assert_watch!(command( proxy, DisplaySettings::from(Display { brightness: None, auto_brightness_level: None, auto_brightness: None, low_light_mode: None, theme: None, screen_enabled: None, }) )); assert_eq!(output, format!("{:#?}", DisplaySettings::from(expected_display_clone))); Ok(()) } }
async fn validate_display_set_output(expected_display: Display) -> Result<()> { let proxy = setup_fake_display_proxy(move |req| match req { DisplayRequest::Set { responder, .. } => { let _ = responder.send(&mut Ok(())); } DisplayRequest::Watch { .. } => { panic!("Unexpected call to watch"); } DisplayRequest::WatchLightSensor { .. } => { panic!("Unexpected call to watch light sensor"); } }); let output = utils::assert_set!(command(proxy, DisplaySettings::from(expected_display.clone()))); assert_eq!(output, format!("Successfully set Display to {:?}", expected_display)); Ok(()) }
function_block-full_function
[]
Rust
src/setup_config.rs
amarant/msvc-helper
27821c5cdbfdebcae94342f8b6662791f1e78b02
#![allow(bad_style)] use std::ffi::OsString; use std::ptr::null_mut; use std::fmt; use winapi::Interface; use winapi::shared::minwindef::{LPFILETIME, ULONG}; use winapi::shared::winerror::S_FALSE; use winapi::shared::wtypes::BSTR; use winapi::shared::wtypesbase::LPCOLESTR; use winapi::um::combaseapi::{CoCreateInstance, CLSCTX_ALL}; use winapi::um::oaidl::LPSAFEARRAY; use winapi::um::unknwnbase::{IUnknown, IUnknownVtbl}; use winapi::um::winnt::{HRESULT, LCID, LPCWSTR, PULONGLONG}; use winapi::um::combaseapi::CoInitializeEx; use winapi::um::objbase::COINIT_MULTITHREADED; use utils::BStr; use wio::com::ComPtr; ENUM!{enum InstanceState { eNone = 0, eLocal = 1, eRegistered = 2, eNoRebootRequired = 4, eComplete = -1i32 as u32, }} RIDL!{#[uuid(0xb41463c3, 0x8866, 0x43b5, 0xbc, 0x33, 0x2b, 0x06, 0x76, 0xf7, 0xf4, 0x2e)] interface ISetupInstance(ISetupInstanceVtbl): IUnknown(IUnknownVtbl) { fn GetInstanceId( pbstrInstanceId: *mut BSTR, ) -> HRESULT, fn GetInstallDate( pInstallDate: LPFILETIME, ) -> HRESULT, fn GetInstallationName( pbstrInstallationName: *mut BSTR, ) -> HRESULT, fn GetInstallationPath( pbstrInstallationPath: *mut BSTR, ) -> HRESULT, fn GetInstallationVersion( pbstrInstallationVersion: *mut BSTR, ) -> HRESULT, fn GetDisplayName( lcid: LCID, pbstrDisplayName: *mut BSTR, ) -> HRESULT, fn GetDescription( lcid: LCID, pbstrDescription: *mut BSTR, ) -> HRESULT, fn ResolvePath( pwszRelativePath: LPCOLESTR, pbstrAbsolutePath: *mut BSTR, ) -> HRESULT, }} RIDL!{#[uuid(0x89143c9a, 0x05af, 0x49b0, 0xb7, 0x17, 0x72, 0xe2, 0x18, 0xa2, 0x18, 0x5c)] interface ISetupInstance2(ISetupInstance2Vtbl): ISetupInstance(ISetupInstanceVtbl) { fn GetState( pState: *mut InstanceState, ) -> HRESULT, fn GetPackages( ppsaPackages: *mut LPSAFEARRAY, ) -> HRESULT, fn GetProduct( ppPackage: *mut *mut ISetupPackageReference, ) -> HRESULT, fn GetProductPath( pbstrProductPath: *mut BSTR, ) -> HRESULT, }} RIDL!{#[uuid(0x6380bcff, 0x41d3, 0x4b2e, 0x8b, 0x2e, 0xbf, 0x8a, 0x68, 0x10, 0xc8, 0x48)] interface IEnumSetupInstances(IEnumSetupInstancesVtbl): IUnknown(IUnknownVtbl) { fn Next( celt: ULONG, rgelt: *mut *mut ISetupInstance, pceltFetched: *mut ULONG, ) -> HRESULT, fn Skip( celt: ULONG, ) -> HRESULT, fn Reset() -> HRESULT, fn Clone( ppenum: *mut *mut IEnumSetupInstances, ) -> HRESULT, }} RIDL!{#[uuid(0x42843719, 0xdb4c, 0x46c2, 0x8e, 0x7c, 0x64, 0xf1, 0x81, 0x6e, 0xfd, 0x5b)] interface ISetupConfiguration(ISetupConfigurationVtbl): IUnknown(IUnknownVtbl) { fn EnumInstances( ppEnumInstances: *mut *mut IEnumSetupInstances, ) -> HRESULT, fn GetInstanceForCurrentProcess( ppInstance: *mut *mut ISetupInstance, ) -> HRESULT, fn GetInstanceForPath( wzPath: LPCWSTR, ppInstance: *mut *mut ISetupInstance, ) -> HRESULT, }} RIDL!{#[uuid(0x26aab78c, 0x4a60, 0x49d6, 0xaf, 0x3b, 0x3c, 0x35, 0xbc, 0x93, 0x36, 0x5d)] interface ISetupConfiguration2(ISetupConfiguration2Vtbl): ISetupConfiguration(ISetupConfigurationVtbl) { fn EnumAllInstances( ppEnumInstances: *mut *mut IEnumSetupInstances, ) -> HRESULT, }} RIDL!{#[uuid(0xda8d8a16, 0xb2b6, 0x4487, 0xa2, 0xf1, 0x59, 0x4c, 0xcc, 0xcd, 0x6b, 0xf5)] interface ISetupPackageReference(ISetupPackageReferenceVtbl): IUnknown(IUnknownVtbl) { fn GetId( pbstrId: *mut BSTR, ) -> HRESULT, fn GetVersion( pbstrVersion: *mut BSTR, ) -> HRESULT, fn GetChip( pbstrChip: *mut BSTR, ) -> HRESULT, fn GetLanguage( pbstrLanguage: *mut BSTR, ) -> HRESULT, fn GetBranch( pbstrBranch: *mut BSTR, ) -> HRESULT, fn GetType( pbstrType: *mut BSTR, ) -> HRESULT, fn GetUniqueId( pbstrUniqueId: *mut BSTR, ) -> HRESULT, }} RIDL!{#[uuid(0x42b21b78, 0x6192, 0x463e, 0x87, 0xbf, 0xd5, 0x77, 0x83, 0x8f, 0x1d, 0x5c)] interface ISetupHelper(ISetupHelperVtbl): IUnknown(IUnknownVtbl) { fn ParseVersion( pwszVersion: LPCOLESTR, pullVersion: PULONGLONG, ) -> HRESULT, fn ParseVersionRange( pwszVersionRange: LPCOLESTR, pullMinVersion: PULONGLONG, pullMaxVersion: PULONGLONG, ) -> HRESULT, }} DEFINE_GUID!{CLSID_SetupConfiguration, 0x177f0c4a, 0x1cd3, 0x4de7, 0xa3, 0x2c, 0x71, 0xdb, 0xbb, 0x9f, 0xa3, 0x6d} pub fn initialize_com() -> Result<i32, i32> { let err = unsafe { CoInitializeEx(null_mut(), COINIT_MULTITHREADED) }; if err < 0 { return Err(err); } Ok(err) } lazy_static! { static ref COM_INIT: Result<i32, i32> = { initialize_com() }; } pub struct SetupConfiguration(ComPtr<ISetupConfiguration>); impl SetupConfiguration { pub fn new() -> Result<SetupConfiguration, i32> { if let Err(i) = *COM_INIT { return Err(i); } let mut obj = null_mut(); let err = unsafe { CoCreateInstance( &CLSID_SetupConfiguration, null_mut(), CLSCTX_ALL, &ISetupConfiguration::uuidof(), &mut obj, ) }; if err < 0 { return Err(err); } let obj = unsafe { ComPtr::from_raw(obj as *mut ISetupConfiguration) }; Ok(SetupConfiguration(obj)) } pub fn get_instance_for_current_process(&self) -> Result<SetupInstance, i32> { let mut obj = null_mut(); let err = unsafe { self.0.GetInstanceForCurrentProcess(&mut obj) }; if err < 0 { return Err(err); } Ok(unsafe { SetupInstance::from_raw(obj) }) } pub fn enum_instances(&self) -> Result<EnumSetupInstances, i32> { let mut obj = null_mut(); let err = unsafe { self.0.EnumInstances(&mut obj) }; if err < 0 { return Err(err); } Ok(unsafe { EnumSetupInstances::from_raw(obj) }) } pub fn enum_all_instances(&self) -> Result<EnumSetupInstances, i32> { let mut obj = null_mut(); let this = try!(self.0.cast::<ISetupConfiguration2>()); let err = unsafe { this.EnumAllInstances(&mut obj) }; if err < 0 { return Err(err); } Ok(unsafe { EnumSetupInstances::from_raw(obj) }) } } pub struct SetupInstance(ComPtr<ISetupInstance>); impl SetupInstance { pub unsafe fn from_raw(obj: *mut ISetupInstance) -> SetupInstance { SetupInstance(ComPtr::from_raw(obj)) } pub fn instance_id(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstanceId(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn installation_name(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstallationName(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn installation_path(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstallationPath(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn installation_version(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstallationVersion(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn product_path(&self) -> Result<OsString, i32> { let mut s = null_mut(); let this = try!(self.0.cast::<ISetupInstance2>()); let err = unsafe { this.GetProductPath(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } } impl fmt::Debug for SetupInstance { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Setup Instance {{ instance_id: {:?}, installation_name: {:?}, installation_path: {:?}, installation_version: {:?}, product_path: {:?}}}", self.instance_id(), self.installation_name(), self.installation_path(), self.installation_version(), self.product_path(), ) } } fn displayRes(res: Result<OsString, i32>) -> String { match res { Ok(s) => match s.into_string() { Ok(s) => s, Err(_) => "Error".into(), }, Err(i) => format!("Error: {}", i), } } impl fmt::Display for SetupInstance { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Instance id: {}, Installation name: {}, Installation path: {}, Installation version: {}, Product path: {}", displayRes(self.instance_id()), displayRes(self.installation_name()), displayRes(self.installation_path()), displayRes(self.installation_version()), displayRes(self.product_path()) ) } } pub struct EnumSetupInstances(ComPtr<IEnumSetupInstances>); impl EnumSetupInstances { pub unsafe fn from_raw(obj: *mut IEnumSetupInstances) -> EnumSetupInstances { EnumSetupInstances(ComPtr::from_raw(obj)) } } impl Iterator for EnumSetupInstances { type Item = Result<SetupInstance, i32>; fn next(&mut self) -> Option<Result<SetupInstance, i32>> { let mut obj = null_mut(); let err = unsafe { self.0.Next(1, &mut obj, null_mut()) }; if err < 0 { return Some(Err(err)); } if err == S_FALSE { return None; } Some(Ok(unsafe { SetupInstance::from_raw(obj) })) } }
#![allow(bad_style)] use std::ffi::OsString; use std::ptr::null_mut; use std::fmt; use winapi::Interface; use winapi::shared::minwindef::{LPFILETIME, ULONG}; use winapi::shared::winerror::S_FALSE; use winapi::shared::wtypes::BSTR; use winapi::shared::wtypesbase::LPCOLESTR; use winapi::um::combaseapi::{CoCreateInstance, CLSCTX_ALL}; use winapi::um::oaidl::LPSAFEARRAY; use winapi::um::unknwnbase::{IUnknown, IUnknownVtbl}; use winapi::um::winnt::{HRESULT, LCID, LPCWSTR, PULONGLONG}; use winapi::um::combaseapi::CoInitializeEx; use winapi::um::objbase::COINIT_MULTITHREADED; use utils::BStr; use wio::com::ComPtr; ENUM!{enum InstanceState { eNone = 0, eLocal = 1, eRegistered = 2, eNoRebootRequired = 4, eComplete = -1i32 as u32, }} RIDL!{#[uuid(0xb41463c3, 0x8866, 0x43b5, 0xbc, 0x33, 0x2b, 0x06, 0x76, 0xf7, 0xf4, 0x2e)] interface ISetupInstance(ISetupInstanceVtbl): IUnknown(IUnknownVtbl) { fn GetInstanceId( pbstrInstanceId: *mut BSTR, ) -> HRESULT, fn GetInstallDate( pInstallDate: LPFILETIME, ) -> HRESULT, fn GetInstallationName( pbstrInstallationName: *mut BSTR, ) -> HRESULT, fn GetInstallationPath( pbstrInstallationPath: *mut BSTR, ) -> HRESULT, fn GetInstallationVersion( pbstrInstallationVersion: *mut BSTR, ) -> HRESULT, fn GetDisplayName( lcid: LCID, pbstrDisplayName: *mut BSTR, ) -> HRESULT, fn GetDescription( lcid: LCID, pbstrDescription: *mut BSTR, ) -> HRESULT, fn ResolvePath( pwszRelativePath: LPCOLESTR, pbstrAbsolutePath: *mut BSTR, ) -> HRESULT, }} RIDL!{#[uuid(0x89143c9a, 0x05af, 0x49b0, 0xb7, 0x17, 0x72, 0xe2, 0x18, 0xa2, 0x18, 0x5c)] interface ISetupInstance2(ISetupInstance2Vtbl): ISetupInstance(ISetupInstanceVtbl) { fn GetState( pState: *mut InstanceState, ) -> HRESULT, fn GetPackages( ppsaPackages: *mut LPSAFEARRAY, ) -> HRESULT, fn GetProduct( ppPackage: *mut *mut ISetupPackageReference, ) -> HRESULT, fn GetProductPath( pbstrProductPath: *mut BSTR, ) -> HRESULT, }} RIDL!{#[uuid(0x6380bcff, 0x41d3, 0x4b2e, 0x8b, 0x2e, 0xbf, 0x8a, 0x68, 0x10, 0xc8, 0x48)] interface IEnumSetupInstances(IEnumSetupInstancesVtbl): IUnknown(IUnknownVtbl) { fn Next( celt: ULONG, rgelt: *mut *mut ISetupInstance, pceltFetched: *mut ULONG, ) -> HRESULT, fn Skip( celt: ULONG, ) -> HRESULT, fn Reset() -> HRESULT, fn Clone( ppenum: *mut *mut IEnumSetupInstances, ) -> HRESULT, }} RIDL!{#[uuid(0x42843719, 0xdb4c, 0x46c2, 0x8e, 0x7c, 0x64, 0xf1, 0x81, 0x6e, 0xfd, 0x5b)] interface ISetupConfiguration(ISetupConfigurationVtbl): IUnknown(IUnknownVtbl) { fn EnumInstances( ppEnumInstances: *mut *mut IEnumSetupInstances, ) -> HRESULT, fn GetInstanceForCurrentProcess( ppInstance: *mut *mut ISetupInstance, ) -> HRESULT, fn GetInstanceForPath( wzPath: LPCWSTR, ppInstance: *mut *mut ISetupInstance, ) -> HRESULT, }} RIDL!{#[uuid(0x26aab78c, 0x4a60, 0x49d6, 0xaf, 0x3b, 0x3c, 0x35, 0xbc, 0x93, 0x36, 0x5d)] interface ISetupConfiguration2(ISetupConfiguration2Vtbl): ISetupConfiguration(ISetupConfigurationVtbl) { fn EnumAllInstances( ppEnumInstances: *mut *mut IEnumSetupInstances, ) -> HRESULT, }} RIDL!{#[uuid(0xda8d8a16, 0xb2b6, 0x4487, 0xa2, 0xf1, 0x59, 0x4c, 0xcc, 0xcd, 0x6b, 0xf5)] interface ISetupPackageReference(ISetupPackageReferenceVtbl): IUnknown(IUnknownVtbl) { fn GetId( pbstrId: *mut BSTR, ) -> HRESULT, fn GetVersion( pbstrVersion: *mut BSTR, ) -> HRESULT, fn GetChip( pbstrChip: *mut BSTR, ) -> HRESULT, fn GetLanguage( pbstrLanguage: *mut BSTR, ) -> HRESULT, fn GetBranch( pbstrBranch: *mut BSTR, ) -> HRESULT, fn GetType( pbstrType: *mut BSTR, ) -> HRESULT, fn GetUniqueId( pbstrUniqueId: *mut BSTR, ) -> HRESULT, }} RIDL!{#[uuid(0x42b21b78, 0x6192, 0x463e, 0x87, 0xbf, 0xd5, 0x77, 0x83, 0x8f, 0x1d, 0x5c)] interface ISetupHelper(ISetupHelperVtbl): IUnknown(IUnknownVtbl) { fn ParseVersion( pwszVersion: LPCOLESTR, pullVersion: PULONGLONG, ) -> HRESULT, fn ParseVersionRange( pwszVersionRange: LPCOLESTR, pullMinVersion: PULONGLONG, pullMaxVersion: PULONGLONG, ) -> HRESULT, }} DEFINE_GUID!{CLSID_SetupConfiguration, 0x177f0c4a, 0x1cd3, 0x4de7, 0xa3, 0x2c, 0x71, 0xdb, 0xbb, 0x9f, 0xa3, 0x6d} pub fn initialize_com() -> Result<i32, i32> { let err = unsafe { CoInitializeEx(null_mut(), COINIT_MULTITHREADED) }; if err < 0 { return Err(err); } Ok(err) } lazy_static! { static ref COM_INIT: Result<i32, i32> = { initialize_com() }; } pub struct SetupConfiguration(ComPtr<ISetupConfiguration>); impl SetupConfiguration { pub fn new() -> Result<SetupConfiguration, i32> { if let Err(i) = *COM_INIT { return Err(i); } let mut obj = null_mut(); let err = unsafe {
}; if err < 0 { return Err(err); } let obj = unsafe { ComPtr::from_raw(obj as *mut ISetupConfiguration) }; Ok(SetupConfiguration(obj)) } pub fn get_instance_for_current_process(&self) -> Result<SetupInstance, i32> { let mut obj = null_mut(); let err = unsafe { self.0.GetInstanceForCurrentProcess(&mut obj) }; if err < 0 { return Err(err); } Ok(unsafe { SetupInstance::from_raw(obj) }) } pub fn enum_instances(&self) -> Result<EnumSetupInstances, i32> { let mut obj = null_mut(); let err = unsafe { self.0.EnumInstances(&mut obj) }; if err < 0 { return Err(err); } Ok(unsafe { EnumSetupInstances::from_raw(obj) }) } pub fn enum_all_instances(&self) -> Result<EnumSetupInstances, i32> { let mut obj = null_mut(); let this = try!(self.0.cast::<ISetupConfiguration2>()); let err = unsafe { this.EnumAllInstances(&mut obj) }; if err < 0 { return Err(err); } Ok(unsafe { EnumSetupInstances::from_raw(obj) }) } } pub struct SetupInstance(ComPtr<ISetupInstance>); impl SetupInstance { pub unsafe fn from_raw(obj: *mut ISetupInstance) -> SetupInstance { SetupInstance(ComPtr::from_raw(obj)) } pub fn instance_id(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstanceId(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn installation_name(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstallationName(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn installation_path(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstallationPath(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn installation_version(&self) -> Result<OsString, i32> { let mut s = null_mut(); let err = unsafe { self.0.GetInstallationVersion(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } pub fn product_path(&self) -> Result<OsString, i32> { let mut s = null_mut(); let this = try!(self.0.cast::<ISetupInstance2>()); let err = unsafe { this.GetProductPath(&mut s) }; let bstr = unsafe { BStr::from_raw(s) }; if err < 0 { return Err(err); } Ok(bstr.to_osstring()) } } impl fmt::Debug for SetupInstance { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Setup Instance {{ instance_id: {:?}, installation_name: {:?}, installation_path: {:?}, installation_version: {:?}, product_path: {:?}}}", self.instance_id(), self.installation_name(), self.installation_path(), self.installation_version(), self.product_path(), ) } } fn displayRes(res: Result<OsString, i32>) -> String { match res { Ok(s) => match s.into_string() { Ok(s) => s, Err(_) => "Error".into(), }, Err(i) => format!("Error: {}", i), } } impl fmt::Display for SetupInstance { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Instance id: {}, Installation name: {}, Installation path: {}, Installation version: {}, Product path: {}", displayRes(self.instance_id()), displayRes(self.installation_name()), displayRes(self.installation_path()), displayRes(self.installation_version()), displayRes(self.product_path()) ) } } pub struct EnumSetupInstances(ComPtr<IEnumSetupInstances>); impl EnumSetupInstances { pub unsafe fn from_raw(obj: *mut IEnumSetupInstances) -> EnumSetupInstances { EnumSetupInstances(ComPtr::from_raw(obj)) } } impl Iterator for EnumSetupInstances { type Item = Result<SetupInstance, i32>; fn next(&mut self) -> Option<Result<SetupInstance, i32>> { let mut obj = null_mut(); let err = unsafe { self.0.Next(1, &mut obj, null_mut()) }; if err < 0 { return Some(Err(err)); } if err == S_FALSE { return None; } Some(Ok(unsafe { SetupInstance::from_raw(obj) })) } }
CoCreateInstance( &CLSID_SetupConfiguration, null_mut(), CLSCTX_ALL, &ISetupConfiguration::uuidof(), &mut obj, )
call_expression
[ { "content": "pub fn get_lasted_platform_toolset() -> Option<String> {\n\n get_toolchains()\n\n .iter()\n\n .next()\n\n .map(|v| v.platform_toolset.clone())\n\n}\n", "file_path": "src/toolchain.rs", "rank": 1, "score": 59086.79993836931 }, { "content": "fn os_to_res_string(res: Result<OsString, i32>) -> Result<String, Option<i32>> {\n\n match res {\n\n Ok(s) => match s.into_string() {\n\n Ok(s) => Ok(s),\n\n Err(_) => Err(None),\n\n },\n\n Err(i) => Err(Some(i)),\n\n }\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 2, "score": 59073.93136446357 }, { "content": "fn os_to_res_pathbuf(res: Result<OsString, i32>) -> Result<PathBuf, Option<i32>> {\n\n match res {\n\n Ok(s) => match s.into_string() {\n\n Ok(s) => Ok(PathBuf::from(s)),\n\n Err(_) => Err(None),\n\n },\n\n Err(i) => Err(Some(i)),\n\n }\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 3, "score": 58203.30645145073 }, { "content": "pub fn get_toolchains() -> Vec<VisualStudioInstallationInstance> {\n\n let config = SetupConfiguration::new().unwrap();\n\n let iter = config.enum_all_instances().unwrap();\n\n let mut toolchains: Vec<VisualStudioInstallationInstance> = Vec::new();\n\n for instance in iter {\n\n let mut instance: VisualStudioInstallationInstance = match instance {\n\n Ok(instance) => match transform(&instance) {\n\n Ok(instance) => instance,\n\n Err(_) => continue,\n\n },\n\n Err(_) => continue,\n\n };\n\n let installation_version = instance.installation_version.clone();\n\n let split: Vec<&str> = installation_version.split('.').collect();\n\n let major_version = split[0];\n\n let medium_version = split[1];\n\n let platform_toolset = match (major_version, medium_version) as (&str, &str) {\n\n // from http://marcofoco.com/microsoft-visual-c-version-map/\n\n (\"15\", _) => Some(\"v141\"),\n\n (\"14\", _) => Some(\"v140\"),\n", "file_path": "src/toolchain.rs", "rank": 4, "score": 57908.0456476947 }, { "content": "pub fn get_windows_sdk() -> Vec<WindowsSdk> {\n\n let mut windows_sdk = Vec::<WindowsSdk>::new();\n\n\n\n debug!(\"Looking for Windows 10 SDK\");\n\n if let Some(path) = get_windows_sdk_path_from_regsitry_logged(MS_REG_PATH, REG_VALUE_10) {\n\n windows_sdk = check_add_windows_10_sdk(&path, None, windows_sdk);\n\n }\n\n if let Some(path) = get_windows_sdk_path_from_regsitry_logged(WOW_REG_PATH, REG_VALUE_10) {\n\n windows_sdk = check_add_windows_10_sdk(&path, None, windows_sdk);\n\n }\n\n\n\n if let Some(program_files_path) =\n\n env::var_os(PROGRAM_FILES_ENV).and_then(|p| p.into_string().ok())\n\n {\n\n windows_sdk =\n\n check_add_windows_10_sdk(&program_files_path, Some(WIN_KITS_PATH_10), windows_sdk);\n\n }\n\n if let Some(program_files_x86_path) =\n\n env::var_os(PROGRAM_FILES_X86_ENV).and_then(|p| p.into_string().ok())\n\n {\n", "file_path": "src/windows_sdk.rs", "rank": 5, "score": 57908.0456476947 }, { "content": "pub fn get_latest_windows_sdk() -> Option<String> {\n\n get_windows_sdk()\n\n .iter()\n\n .next()\n\n .map(|v| v.windows_target_platform_version.clone())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n extern crate env_logger;\n\n\n\n use super::{get_windows_sdk, SdkVersion};\n\n use std::process::Command;\n\n\n\n fn get_powershell_windows_sdk_version(param_opt: Option<&str>) -> Option<String> {\n\n let mut args = Vec::new();\n\n args.push(\"powershell\\\\getWindowsSDK.ps1\");\n\n if let Some(param) = param_opt {\n\n args.push(param);\n\n }\n", "file_path": "src/windows_sdk.rs", "rank": 6, "score": 57908.0456476947 }, { "content": "fn transform(instance: &SetupInstance) -> Result<VisualStudioInstallationInstance, Option<i32>> {\n\n let instance_id = os_to_res_string(instance.instance_id())?;\n\n let installation_name = os_to_res_string(instance.installation_name())?;\n\n let installation_path = os_to_res_pathbuf(instance.installation_path())?;\n\n let installation_version = os_to_res_string(instance.installation_version())?;\n\n let product_path = os_to_res_pathbuf(instance.product_path())?;\n\n Ok(VisualStudioInstallationInstance {\n\n instance_id,\n\n installation_name,\n\n installation_path,\n\n installation_version,\n\n product_path,\n\n platform_toolset: \"\".into(),\n\n })\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 8, "score": 50869.55504177447 }, { "content": "pub trait FromWide\n\nwhere\n\n Self: Sized,\n\n{\n\n fn from_wide(wide: &[u16]) -> Self;\n\n fn from_wide_null(wide: &[u16]) -> Self {\n\n let len = wide.iter().take_while(|&&c| c != 0).count();\n\n Self::from_wide(&wide[..len])\n\n }\n\n}\n\nimpl FromWide for OsString {\n\n fn from_wide(wide: &[u16]) -> OsString {\n\n OsStringExt::from_wide(wide)\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 9, "score": 37133.99477081855 }, { "content": "pub trait ToWide {\n\n fn to_wide(&self) -> Vec<u16>;\n\n fn to_wide_null(&self) -> Vec<u16>;\n\n}\n\nimpl<T> ToWide for T\n\nwhere\n\n T: AsRef<OsStr>,\n\n{\n\n fn to_wide(&self) -> Vec<u16> {\n\n self.as_ref().encode_wide().collect()\n\n }\n\n fn to_wide_null(&self) -> Vec<u16> {\n\n self.as_ref().encode_wide().chain(Some(0)).collect()\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 10, "score": 37133.99477081855 }, { "content": "fn check_add_windows_10_sdk(\n\n path: &str,\n\n path_opt: Option<&str>,\n\n mut valid_instance: Vec<WindowsSdk>,\n\n) -> Vec<WindowsSdk> {\n\n let mut path_buf = PathBuf::new();\n\n path_buf.push(path);\n\n if let Some(some_path) = path_opt {\n\n path_buf.push(some_path);\n\n }\n\n debug!(\"Check {:?}\", path_buf);\n\n path_buf.push(\"Include\");\n\n if !path_buf.exists() {\n\n debug!(\"{:?} Not found\", path_buf);\n\n return valid_instance;\n\n }\n\n if let Ok(dir) = path_buf.read_dir() {\n\n let kit_paths: Vec<DirEntry> = dir.filter_map(|sub_res| sub_res.ok())\n\n .filter(|sub| {\n\n sub.file_name()\n", "file_path": "src/windows_sdk.rs", "rank": 11, "score": 32654.438070298478 }, { "content": "fn check_add_windows_81_sdk(\n\n path: &str,\n\n path_opt: Option<&str>,\n\n mut valid_instance: Vec<WindowsSdk>,\n\n) -> Vec<WindowsSdk> {\n\n let mut path_buf = PathBuf::new();\n\n path_buf.push(path);\n\n if let Some(some_path) = path_opt {\n\n path_buf.push(some_path);\n\n }\n\n debug!(\"Check {:?}\", path_buf);\n\n path_buf.push(\"Include\");\n\n if path_buf.exists() {\n\n if path_buf.read_dir().is_ok() {\n\n //TODO sort ?\n\n\n\n valid_instance.push(WindowsSdk {\n\n windows_version: SdkVersion::Win8,\n\n windows_target_platform_version: \"8.1\".into(),\n\n path: path_buf,\n\n });\n\n }\n\n } else {\n\n debug!(\"Can't get file name of {:?}\", path_buf);\n\n }\n\n valid_instance\n\n}\n\n\n", "file_path": "src/windows_sdk.rs", "rank": 12, "score": 32654.438070298478 }, { "content": "fn get_windows_sdk_path_from_regsitry_logged(reg_path: &str, reg_value: &str) -> Option<String> {\n\n match get_windows_sdk_path_from_regsitry(reg_path, reg_value) {\n\n Ok(s) => Some(s),\n\n Err(err) => {\n\n debug!(\n\n \"Can't find registry key {} and value {} : {:?}\",\n\n reg_path, reg_value, err\n\n );\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug)]\n\npub enum SdkVersion {\n\n Win8,\n\n Win10,\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug)]\n\npub struct WindowsSdk {\n\n pub windows_version: SdkVersion,\n\n pub windows_target_platform_version: String,\n\n pub path: PathBuf,\n\n}\n\n\n", "file_path": "src/windows_sdk.rs", "rank": 18, "score": 22350.163440621054 }, { "content": "fn get_windows_sdk_path_from_regsitry(reg_path: &str, reg_value: &str) -> io::Result<String> {\n\n let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);\n\n let installed_roots_subkey = hklm.open_subkey(reg_path)?;\n\n let kit_root_path: String = installed_roots_subkey.get_value(reg_value)?;\n\n Ok(kit_root_path)\n\n}\n\n\n", "file_path": "src/windows_sdk.rs", "rank": 19, "score": 21789.73433318595 }, { "content": "// Copyright © 2017 winapi-rs developers\n\n// Licensed under the Apache License, Version 2.0\n\n// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.\n\n// All files in the project carrying such notice may not be copied, modified, or distributed\n\n// except according to those terms.\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::os::windows::ffi::{OsStrExt, OsStringExt};\n\nuse std::slice::from_raw_parts;\n\nuse winapi::shared::wtypes::BSTR;\n\nuse winapi::um::oleauto::{SysFreeString, SysStringLen};\n\n\n\npub struct BStr(BSTR);\n\nimpl BStr {\n\n pub unsafe fn from_raw(s: BSTR) -> BStr {\n\n BStr(s)\n\n }\n\n pub fn to_osstring(&self) -> OsString {\n\n let len = unsafe { SysStringLen(self.0) };\n\n let slice = unsafe { from_raw_parts(self.0, len as usize) };\n\n OsStringExt::from_wide(slice)\n\n }\n\n}\n\nimpl Drop for BStr {\n\n fn drop(&mut self) {\n\n unsafe { SysFreeString(self.0) };\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 30, "score": 7.5227221666732875 }, { "content": "extern crate env_logger;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate log;\n\n#[macro_use]\n\nextern crate winapi;\n\nextern crate winreg;\n\nextern crate wio;\n\n\n\npub mod windows_sdk;\n\npub mod setup_config;\n\npub mod utils;\n\npub mod toolchain;\n", "file_path": "src/lib.rs", "rank": 31, "score": 5.35202817241424 }, { "content": "use std::ffi::OsString;\n\nuse std::path::PathBuf;\n\nuse setup_config::{SetupConfiguration, SetupInstance};\n\nuse std::cmp::Ordering;\n\n\n\n#[derive(Debug)]\n\npub struct VisualStudioInstallationInstance {\n\n instance_id: String,\n\n installation_name: String,\n\n installation_path: PathBuf,\n\n installation_version: String,\n\n product_path: PathBuf,\n\n platform_toolset: String,\n\n}\n\n\n\nimpl VisualStudioInstallationInstance {\n\n fn set_platform_toolset(&mut self, platform_toolset: String) {\n\n self.platform_toolset = platform_toolset;\n\n }\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 32, "score": 4.701766799251471 }, { "content": "use std::fs::DirEntry;\n\nuse std::path::PathBuf;\n\nuse std::io;\n\nuse winreg::RegKey;\n\nuse winreg::enums::*;\n\nuse std::env;\n\nuse std::cmp::Ordering;\n\n\n\nstatic MS_REG_PATH: &str = r\"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots\\\";\n\nstatic WOW_REG_PATH: &str = r\"WOW6432Node\\Microsoft\\Windows Kits\\Installed Roots\\\";\n\nstatic REG_VALUE_10: &str = \"KitsRoot10\";\n\nstatic REG_VALUE_81: &str = \"KitsRoot81\";\n\nstatic PROGRAM_FILES_ENV: &str = \"%ProgramFiles%\";\n\nstatic PROGRAM_FILES_X86_ENV: &str = \"%ProgramFiles(x86)%\";\n\nstatic WIN_KITS_PATH_10: &str = r\"Windows Kits\\10\";\n\nstatic WIN_KITS_PATH_81: &str = r\"Windows Kits\\8.1\";\n\n\n", "file_path": "src/windows_sdk.rs", "rank": 33, "score": 4.403963413446838 }, { "content": " get_windows_sdk()\n\n .iter()\n\n .filter(|v| v.windows_version == SdkVersion::Win8)\n\n .next()\n\n .map(|v| v.windows_target_platform_version.clone()),\n\n powershell_windows_sdk\n\n );\n\n }\n\n}\n", "file_path": "src/windows_sdk.rs", "rank": 34, "score": 3.3052014229622593 }, { "content": " let powershell_windows_sdk_output = Command::new(\"powershell\")\n\n .args(&args)\n\n .output()\n\n .expect(\"failed to execute process\");\n\n if (powershell_windows_sdk_output.stderr.len() > 0) {\n\n return None;\n\n }\n\n let powershell_windows_sdk = String::from_utf8_lossy(&powershell_windows_sdk_output.stdout);\n\n Some(powershell_windows_sdk.replace(\"\\r\\n\", \"\"))\n\n }\n\n\n\n #[test]\n\n fn same_latest_version_than_powershell() {\n\n let powershell_windows_sdk = get_powershell_windows_sdk_version(None);\n\n assert_eq!(\n\n get_windows_sdk()\n\n .iter()\n\n .next()\n\n .map(|v| v.windows_target_platform_version.clone()),\n\n powershell_windows_sdk\n", "file_path": "src/windows_sdk.rs", "rank": 35, "score": 3.0238434190360044 }, { "content": " .to_str()\n\n .map(|f| f.starts_with(\"10\"))\n\n .unwrap_or(false)\n\n })\n\n .collect();\n\n //TODO sort ?\n\n for sdk_dir in kit_paths {\n\n let mut windows_header = sdk_dir.path().clone();\n\n windows_header.push(\"um\");\n\n windows_header.push(\"windows.h\");\n\n if !windows_header.exists() {\n\n debug!(\"{:?} Not found \", windows_header);\n\n continue;\n\n }\n\n debug!(\"{:?} found \", windows_header);\n\n\n\n let mut ddk_header = sdk_dir.path().clone();\n\n ddk_header.push(\"shared\");\n\n ddk_header.push(\"sdkddkver.h\");\n\n if !ddk_header.exists() {\n", "file_path": "src/windows_sdk.rs", "rank": 36, "score": 2.989208112149307 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn same_win10_version_than_powershell() {\n\n let powershell_windows_sdk = get_powershell_windows_sdk_version(Some(\"-DisableWin81SDK\"));\n\n assert_eq!(\n\n get_windows_sdk()\n\n .iter()\n\n .filter(|v| v.windows_version == SdkVersion::Win10)\n\n .next()\n\n .map(|v| v.windows_target_platform_version.clone()),\n\n powershell_windows_sdk\n\n );\n\n }\n\n\n\n #[test]\n\n fn same_win8_version_than_powershell() {\n\n let powershell_windows_sdk = get_powershell_windows_sdk_version(Some(\"-DisableWin10SDK\"));\n\n assert_eq!(\n", "file_path": "src/windows_sdk.rs", "rank": 37, "score": 2.1998553733158643 } ]
Rust
proto-compiler/src/cmd/compile.rs
livelybug/ibc-rs
e83a2d0963fe6fa675b4125a4e8c18b13b792d88
use std::fs::remove_dir_all; use std::fs::{copy, create_dir_all}; use std::path::{Path, PathBuf}; use git2::Repository; use tempdir::TempDir; use walkdir::WalkDir; use argh::FromArgs; #[derive(Debug, FromArgs)] #[argh(subcommand, name = "compile")] pub struct CompileCmd { #[argh(option, short = 's')] sdk: PathBuf, #[argh(option, short = 'o')] out: PathBuf, } impl CompileCmd { pub fn run(&self) { let tmp = TempDir::new("ibc-proto").unwrap(); Self::output_sdk_version(&self.sdk, tmp.as_ref()); Self::compile_protos(&self.sdk, tmp.as_ref()); Self::compile_proto_services(&self.sdk, &tmp.as_ref()); Self::copy_generated_files(tmp.as_ref(), &self.out); } fn output_sdk_version(sdk_dir: &Path, out_dir: &Path) { let repo = Repository::open(sdk_dir).unwrap(); let commit = repo.head().unwrap(); let rev = commit.shorthand().unwrap(); let path = out_dir.join("COSMOS_SDK_COMMIT"); std::fs::write(path, rev).unwrap(); } fn compile_protos(sdk_dir: &Path, out_dir: &Path) { println!( "[info ] Compiling .proto files to Rust into '{}'...", out_dir.display() ); let root = env!("CARGO_MANIFEST_DIR"); let proto_paths = [ format!("{}/../proto/definitions/mock", root), format!("{}/proto/ibc", sdk_dir.display()), format!("{}/proto/cosmos/tx", sdk_dir.display()), format!("{}/proto/cosmos/base", sdk_dir.display()), format!("{}/proto/cosmos/staking", sdk_dir.display()), ]; let proto_includes_paths = [ format!("{}/../proto", root), format!("{}/proto", sdk_dir.display()), format!("{}/third_party/proto", sdk_dir.display()), ]; let mut protos: Vec<PathBuf> = vec![]; for proto_path in &proto_paths { protos.append( &mut WalkDir::new(proto_path) .into_iter() .filter_map(|e| e.ok()) .filter(|e| { e.file_type().is_file() && e.path().extension().is_some() && e.path().extension().unwrap() == "proto" }) .map(|e| e.into_path()) .collect(), ); } let includes: Vec<PathBuf> = proto_includes_paths.iter().map(PathBuf::from).collect(); let mut config = prost_build::Config::default(); config.out_dir(out_dir); config.extern_path(".tendermint", "::tendermint_proto"); config.compile_protos(&protos, &includes).unwrap(); } fn compile_proto_services(sdk_dir: impl AsRef<Path>, out_dir: impl AsRef<Path>) { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let sdk_dir = sdk_dir.as_ref().to_owned(); let proto_includes_paths = [ root.join("../proto"), sdk_dir.join("proto"), sdk_dir.join("third_party/proto"), ]; let includes = proto_includes_paths.iter().map(|p| p.as_os_str().to_os_string()).collect::<Vec<_>>(); let proto_services_path = [ sdk_dir.join("proto/cosmos/auth/v1beta1/query.proto"), sdk_dir.join("proto/cosmos/staking/v1beta1/query.proto"), ]; let services = proto_services_path.iter().map(|p| p.as_os_str().to_os_string()).collect::<Vec<_>>(); println!("[info ] Compiling proto clients for GRPC services!"); tonic_build::configure() .build_client(true) .build_server(false) .format(false) .out_dir(out_dir) .compile(&services, &includes).unwrap(); println!("[info ] => Done!"); } fn copy_generated_files(from_dir: &Path, to_dir: &Path) { println!( "[info ] Copying generated files into '{}'...", to_dir.display() ); remove_dir_all(&to_dir).unwrap_or_default(); create_dir_all(&to_dir).unwrap(); let errors = WalkDir::new(from_dir) .into_iter() .filter_map(|e| e.ok()) .filter(|e| e.file_type().is_file()) .map(|e| { copy( e.path(), format!( "{}/{}", to_dir.display(), &e.file_name().to_os_string().to_str().unwrap() ), ) }) .filter_map(|e| e.err()) .collect::<Vec<_>>(); if !errors.is_empty() { for e in errors { println!("[error] Error while copying compiled file: {}", e); } panic!("[error] Aborted."); } } }
use std::fs::remove_dir_all; use std::fs::{copy, create_dir_all}; use std::path::{Path, PathBuf}; use git2::Repository; use tempdir::TempDir; use walkdir::WalkDir; use argh::FromArgs; #[derive(Debug, FromArgs)] #[argh(subcommand, name = "compile")] pub struct CompileCmd { #[argh(option, short = 's')] sdk: PathBuf, #[argh(option, short = 'o')] out: PathBuf, } impl CompileCmd { pub fn run(&self) { let tmp = TempDir::new("ibc-proto").unwrap(); Self::output_sdk_version(&self.sdk, tmp.as_ref()); Self::compile_protos(&self.sdk, tmp.as_ref()); Self::compile_proto_services(&self.sdk, &tmp.as_ref()); Self::copy_generated_files(tmp.as_ref(), &self.out); } fn output_sdk_version(sdk_dir: &Path, out_dir: &Path) { let repo = Repository::open(sdk_dir).unwrap(); let commit = repo.head().unwrap(); let rev = commit.shorthand().unwrap(); let path = out_dir.join("COSMOS_SDK_COMMIT"); std::fs::write(path, rev).unwrap(); } fn compile_protos(sdk_dir: &Path, out_dir: &Path) { println!( "[info ] Compiling .proto files to Rust into '{}'...", out_dir.display() ); let root = env!("CARGO_MANIFEST_DIR"); let proto_paths = [ format!("{}/../proto/definitions/mock", root), format!("{}/proto/ibc", sdk_dir.display()), format!("{}/proto/cosmos/tx", sdk_dir.display()), format!("{}/proto/cosmos/base", sdk_dir.display()), format!("{}/proto/cosmos/staking", sdk_dir.display()), ]; let proto_includes_paths = [ format!("{}/../proto", root), format!("{}/proto", sdk_dir.display()), format!("{}/third_party/proto", sdk_dir.display()), ]; let mut protos: Vec<PathBuf> = vec![]; for proto_path in &proto_paths { protos.append( &mut WalkDir::new(proto_path) .into_iter() .filter_map(|e| e.ok()) .filter(|e| { e.file_type().is_file() && e.path().extension().is_some() && e.path().extension().unwrap() == "proto" }) .map(|e| e.into_path()) .collect(), ); } let includes: Vec<PathBuf> = proto_includes_paths.iter().map(PathBuf::from).collect(); let mut config = prost_build::Config::default(); config.out_dir(out_dir); config.extern_path(".tendermint", "::tendermint_proto"); config.compile_protos(&protos, &includes).unwrap(); } fn compile_proto_services(sdk_dir: impl AsRef<Path>, out_dir: impl AsRef<Path>) { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let sdk_dir = sdk_dir.as_ref().to_owned(); let proto_includes_paths = [ root.join("../proto"), sdk_dir.join("proto"), sdk_dir.join("third_party/proto"), ]; let includes = proto_includes_paths.iter().map(|p| p.as_os_str().to_os_string()).collect::<Vec<_>>(); let proto_services_path = [ sdk_dir.join("proto/cosmos/auth/v1beta1/query.proto"), sdk_dir.join("proto/cosmos/staking/v1beta1/query.proto"), ]; let services = proto_services_path.iter().map(|p| p.as_os_str().to_os_string()).collect::<Vec<_>>(); println!("[info ] Compiling proto clients for GRPC services!"); tonic_build::configure() .build_client(true) .build_server(false) .format(false) .out_dir(out_dir) .compile(&services, &includes).unwrap(); println!("[info ] => Done!"); } fn copy_generated_files(from_dir: &Path, to_dir: &Path) { println!( "[info ] Copying generated files into '{}'...", to_dir.display() ); remove_dir_all(&to_dir).unwrap_or_default(); create_dir_all(&to_dir).unwrap(); let errors = WalkDir::new(from_dir) .into_iter() .filter_map(|e| e.ok()) .filter(|e| e.file_type().is_file()) .map(|e| {
}) .filter_map(|e| e.err()) .collect::<Vec<_>>(); if !errors.is_empty() { for e in errors { println!("[error] Error while copying compiled file: {}", e); } panic!("[error] Aborted."); } } }
copy( e.path(), format!( "{}/{}", to_dir.display(), &e.file_name().to_os_string().to_str().unwrap() ), )
call_expression
[ { "content": "/// Serialize the given `Config` as TOML to the given config file.\n\npub fn store(config: &Config, path: impl AsRef<Path>) -> Result<(), Error> {\n\n let mut file = if path.as_ref().exists() {\n\n fs::OpenOptions::new().write(true).truncate(true).open(path)\n\n } else {\n\n File::create(path)\n\n }\n\n .map_err(Error::io)?;\n\n\n\n store_writer(config, &mut file)\n\n}\n\n\n\n/// Serialize the given `Config` as TOML to the given writer.\n\npub(crate) fn store_writer(config: &Config, mut writer: impl Write) -> Result<(), Error> {\n\n let toml_config = toml::to_string_pretty(&config).map_err(Error::encode)?;\n\n\n\n writeln!(writer, \"{}\", toml_config).map_err(Error::io)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/relayer/src/config.rs", "rank": 0, "score": 478323.0279035492 }, { "content": "/// Attempt to load and parse the TOML config file as a `Config`.\n\npub fn load(path: impl AsRef<Path>) -> Result<Config, Error> {\n\n let config_toml = std::fs::read_to_string(&path).map_err(Error::io)?;\n\n\n\n let config = toml::from_str::<Config>(&config_toml[..]).map_err(Error::decode)?;\n\n\n\n Ok(config)\n\n}\n\n\n", "file_path": "crates/relayer/src/config.rs", "rank": 1, "score": 472487.37160788744 }, { "content": "pub fn disable_grpc_web(config: &mut Value) -> Result<(), Error> {\n\n if let Some(field) = config.get_mut(\"grpc-web\") {\n\n field\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\"enable\".to_string(), false.into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 2, "score": 427282.48634847626 }, { "content": "pub fn apply_prefix(prefix: &CommitmentPrefix, mut path: Vec<String>) -> MerklePath {\n\n let mut key_path: Vec<String> = vec![format!(\"{:?}\", prefix)];\n\n key_path.append(&mut path);\n\n MerklePath { key_path }\n\n}\n\n\n\nimpl From<CommitmentRoot> for MerkleRoot {\n\n fn from(root: CommitmentRoot) -> Self {\n\n Self {\n\n hash: root.into_vec(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct MerkleProof {\n\n pub proofs: Vec<CommitmentProof>,\n\n}\n\n\n\n/// Convert to ics23::CommitmentProof\n", "file_path": "crates/relayer-types/src/core/ics23_commitment/merkle.rs", "rank": 3, "score": 407938.1637832237 }, { "content": "pub fn set_grpc_port(config: &mut Value, port: u16) -> Result<(), Error> {\n\n config\n\n .get_mut(\"grpc\")\n\n .ok_or_else(|| eyre!(\"expect grpc section\"))?\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\"address\".to_string(), format!(\"0.0.0.0:{}\", port).into());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 4, "score": 405219.7439838312 }, { "content": "/// Set the `consensus.timeout_commit` field in the full node config.\n\npub fn set_timeout_commit(config: &mut Value, duration: Duration) -> Result<(), Error> {\n\n config\n\n .get_mut(\"consensus\")\n\n .ok_or_else(|| eyre!(\"expect consensus section\"))?\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\n\n \"timeout_commit\".to_string(),\n\n format!(\"{}ms\", duration.as_millis()).into(),\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 5, "score": 405162.1942920741 }, { "content": "pub fn save_relayer_config(config: &Config, config_path: &Path) -> Result<(), Error> {\n\n let config_str = toml::to_string_pretty(&config)?;\n\n\n\n fs::write(&config_path, &config_str)?;\n\n\n\n info!(\n\n \"written hermes config.toml to {}:\\n{}\",\n\n config_path.display(),\n\n config_str\n\n );\n\n\n\n Ok(())\n\n}\n\n\n\nimpl BootstrapClientOptions {\n\n /// Overrides options for the foreign client connecting chain A to chain B.\n\n pub fn client_options_a_to_b(mut self, options: ClientOptions) -> Self {\n\n self.client_options_a_to_b = options;\n\n self\n\n }\n", "file_path": "tools/test-framework/src/bootstrap/binary/chain.rs", "rank": 6, "score": 397928.9810670435 }, { "content": "pub fn disable_api(config: &mut Value) -> Result<(), Error> {\n\n if let Some(field) = config.get_mut(\"api\") {\n\n field\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\"enable\".to_string(), false.into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 7, "score": 383904.6654846188 }, { "content": "pub fn write_env(path: impl AsRef<Path>, exporter: &impl ExportEnv) -> Result<(), Error> {\n\n write(path, format_env(exporter))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "tools/test-framework/src/types/env.rs", "rank": 8, "score": 378063.18538837804 }, { "content": "pub fn add_chain_config(config: &mut Config, running_node: &FullNode) -> Result<(), Error> {\n\n let chain_config = running_node.generate_chain_config(&running_node.chain_driver.chain_type)?;\n\n\n\n config.chains.push(chain_config);\n\n Ok(())\n\n}\n\n\n\n/**\n\n Save a relayer's [`Config`] to the filesystem to make it accessible\n\n through external CLI.\n\n*/\n", "file_path": "tools/test-framework/src/bootstrap/binary/chain.rs", "rank": 9, "score": 367648.30505292816 }, { "content": "pub fn set_mode(config: &mut Value, mode: &str) -> Result<(), Error> {\n\n config\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\"mode\".to_string(), mode.into());\n\n\n\n Ok(())\n\n}\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 10, "score": 363116.9032045953 }, { "content": "/// Set the `p2p` field in the full node config.\n\npub fn set_p2p_port(config: &mut Value, port: u16) -> Result<(), Error> {\n\n config\n\n .get_mut(\"p2p\")\n\n .ok_or_else(|| eyre!(\"expect p2p section\"))?\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\n\n \"laddr\".to_string(),\n\n format!(\"tcp://0.0.0.0:{}\", port).into(),\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 11, "score": 359576.9783965756 }, { "content": "/// Set the `rpc` field in the full node config.\n\npub fn set_rpc_port(config: &mut Value, port: u16) -> Result<(), Error> {\n\n config\n\n .get_mut(\"rpc\")\n\n .ok_or_else(|| eyre!(\"expect rpc section\"))?\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\n\n \"laddr\".to_string(),\n\n format!(\"tcp://0.0.0.0:{}\", port).into(),\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 12, "score": 359576.9783965756 }, { "content": "/// Set the `consensus.timeout_propose` field in the full node config.\n\npub fn set_timeout_propose(config: &mut Value, duration: Duration) -> Result<(), Error> {\n\n config\n\n .get_mut(\"consensus\")\n\n .ok_or_else(|| eyre!(\"expect consensus section\"))?\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\n\n \"timeout_propose\".to_string(),\n\n format!(\"{}ms\", duration.as_millis()).into(),\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 13, "score": 359576.87407289376 }, { "content": "pub fn set_mempool_version(config: &mut Value, version: &str) -> Result<(), Error> {\n\n config\n\n .get_mut(\"mempool\")\n\n .ok_or_else(|| eyre!(\"expect mempool section\"))?\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\"version\".to_string(), version.into());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 14, "score": 359570.9755860737 }, { "content": "/// Set the `log_level` field in the full node config.\n\npub fn set_log_level(config: &mut Value, log_level: &str) -> Result<(), Error> {\n\n config\n\n .as_table_mut()\n\n .ok_or_else(|| eyre!(\"expect object\"))?\n\n .insert(\"log_level\".to_string(), log_level.into());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/config.rs", "rank": 15, "score": 356135.2514378173 }, { "content": "/// Spawns a chain runtime for the chain in the configuration identified by given a chain identifier.\n\n///\n\n/// This function will use the default [`ChainHandle`] implementation, ie. the [`BaseChainHandle`].\n\n///\n\n/// Returns the corresponding handle if successful.\n\npub fn spawn_chain_runtime(config: &Config, chain_id: &ChainId) -> Result<impl ChainHandle, Error> {\n\n spawn_chain_runtime_generic::<BaseChainHandle>(config, chain_id)\n\n}\n\n\n", "file_path": "crates/relayer-cli/src/cli_utils.rs", "rank": 16, "score": 351838.53978398844 }, { "content": "/// Default configuration file path\n\npub fn default_config_file() -> Option<PathBuf> {\n\n dirs_next::home_dir().map(|home| home.join(DEFAULT_CONFIG_PATH))\n\n}\n\n\n\n/// Cli Subcommands\n\n#[derive(Command, Parser, Debug, Runnable)]\n\npub enum CliCmd {\n\n /// Validate Hermes configuration file\n\n #[clap(subcommand)]\n\n Config(ConfigCmd),\n\n\n\n /// Manage keys in the relayer for each chain\n\n #[clap(subcommand)]\n\n Keys(KeysCmd),\n\n\n\n /// Create objects (client, connection, or channel) on chains\n\n #[clap(subcommand)]\n\n Create(CreateCmds),\n\n\n\n /// Update objects (clients) on chains\n", "file_path": "crates/relayer-cli/src/commands.rs", "rank": 17, "score": 339594.0188895563 }, { "content": "/// Method for syntactic validation of the input configuration file.\n\npub fn validate_config(config: &Config) -> Result<(), Diagnostic<Error>> {\n\n // Check for duplicate chain configuration and invalid trust thresholds\n\n let mut unique_chain_ids = BTreeSet::new();\n\n for c in config.chains.iter() {\n\n let already_present = !unique_chain_ids.insert(c.id.clone());\n\n if already_present {\n\n return Err(Diagnostic::Error(Error::duplicate_chains(c.id.clone())));\n\n }\n\n\n\n validate_trust_threshold(&c.id, c.trust_threshold)?;\n\n\n\n // Validate gas-related settings\n\n validate_gas_settings(&c.id, c)?;\n\n }\n\n\n\n // Check for invalid mode config\n\n validate_mode(&config.mode)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/relayer-cli/src/config.rs", "rank": 18, "score": 332046.6170983389 }, { "content": "pub fn handle_generic_error(e: impl Into<Report>) -> Error {\n\n Error::generic(e.into())\n\n}\n\n\n", "file_path": "tools/test-framework/src/error.rs", "rank": 19, "score": 320578.4483229698 }, { "content": "/// Get the path to configuration file\n\npub fn config_path() -> Option<PathBuf> {\n\n let app = app_reader();\n\n app.config_path().cloned()\n\n}\n\n\n\n// Specifies all the possible syntactic errors\n\n// that a Hermes configuration file could contain.\n\ndefine_error! {\n\n Error {\n\n ZeroChain\n\n |_| { \"config file does not specify any chain\" },\n\n\n\n InvalidLogDirective\n\n { directive: String, }\n\n [ TraceError<ParseError> ]\n\n |e| {\n\n format!(\"invalid log directive: {0:?}\", e.directive)\n\n },\n\n\n\n InvalidMode\n", "file_path": "crates/relayer-cli/src/config.rs", "rank": 20, "score": 319094.1680377942 }, { "content": "pub fn handle_exec_error(command: &str) -> impl FnOnce(IoError) -> Error + '_ {\n\n |e| match e.kind() {\n\n IoErrorKind::NotFound => Error::command_not_found(command.to_string(), e),\n\n _ => Error::io(e),\n\n }\n\n}\n\n\n\nimpl From<Report> for Error {\n\n fn from(e: Report) -> Self {\n\n Error::generic(e)\n\n }\n\n}\n\n\n\nimpl From<IoError> for Error {\n\n fn from(e: IoError) -> Self {\n\n Error::io(e)\n\n }\n\n}\n\n\n\nimpl From<RelayerError> for Error {\n", "file_path": "tools/test-framework/src/error.rs", "rank": 21, "score": 315636.5642301148 }, { "content": "// TODO move to ics23\n\nfn calculate_non_existence_root(proof: &NonExistenceProof) -> Result<Vec<u8>, Error> {\n\n if let Some(left) = &proof.left {\n\n calculate_existence_root::<ics23::HostFunctionsManager>(left)\n\n .map_err(|_| Error::invalid_merkle_proof())\n\n } else if let Some(right) = &proof.right {\n\n calculate_existence_root::<ics23::HostFunctionsManager>(right)\n\n .map_err(|_| Error::invalid_merkle_proof())\n\n } else {\n\n Err(Error::invalid_merkle_proof())\n\n }\n\n}\n\n\n\n// Merkle Proof serialization notes:\n\n// \"Proof\" id currently defined in a number of forms and included in a number of places\n\n// - TmProof: in tendermint-rs/src/merkle/proof.rs:Proof\n\n// - RawProofOps: in tendermint-proto/tendermint.cyrpto.rs:ProofOps\n\n// - RawMerkleProof: in ibc-proto/ibc.core.commitment.v1.rs:MerkleProof\n\n// - structure that includes a RawProofOps in its only `proof` field.\n\n// #[derive(Clone, PartialEq, ::prost::Message)]\n\n// pub struct MerkleProof {\n", "file_path": "crates/relayer-types/src/core/ics23_commitment/merkle.rs", "rank": 22, "score": 296032.16788666346 }, { "content": "pub fn collect_gen_txs(chain_id: &str, command_path: &str, home_path: &str) -> Result<(), Error> {\n\n simple_exec(\n\n chain_id,\n\n command_path,\n\n &[\"--home\", home_path, \"collect-gentxs\"],\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/test-framework/src/chain/cli/bootstrap.rs", "rank": 23, "score": 289832.19175391237 }, { "content": "pub fn assert_same_dimension<T>(size: usize, list: &Vec<Vec<T>>) -> Result<(), Error> {\n\n if list.len() != size {\n\n return Err(Error::generic(eyre!(\n\n \"expect nested vector to have the dimension {} x {}\",\n\n size,\n\n size\n\n )));\n\n }\n\n\n\n for list_b in list.iter() {\n\n if list_b.len() != size {\n\n return Err(Error::generic(eyre!(\n\n \"expect nested vector to have the dimension {} x {}\",\n\n size,\n\n size\n\n )));\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "tools/test-framework/src/util/array.rs", "rank": 24, "score": 288802.13554591616 }, { "content": "fn disk_store_path(folder_name: &str) -> Result<PathBuf, Error> {\n\n let home = dirs_next::home_dir().ok_or_else(Error::home_location_unavailable)?;\n\n\n\n let folder = Path::new(home.as_path())\n\n .join(KEYSTORE_DEFAULT_FOLDER)\n\n .join(folder_name)\n\n .join(KEYSTORE_DISK_BACKEND);\n\n\n\n Ok(folder)\n\n}\n\n\n", "file_path": "crates/relayer/src/keyring.rs", "rank": 25, "score": 287823.18195968354 }, { "content": "#[instrument(skip_all, level = \"error\", fields(chain = %config.id))]\n\npub fn listen(config: &ChainConfig, filters: &[EventFilter]) -> eyre::Result<()> {\n\n let rt = Arc::new(TokioRuntime::new()?);\n\n let (event_monitor, rx) = subscribe(config, rt)?;\n\n\n\n info!(\n\n \"listening for queries: {}\",\n\n event_monitor.queries().iter().format(\", \"),\n\n );\n\n\n\n thread::spawn(|| event_monitor.run());\n\n\n\n while let Ok(event_batch) = rx.recv() {\n\n match event_batch {\n\n Ok(batch) => {\n\n let _span =\n\n tracing::error_span!(\"event_batch\", batch_height = %batch.height).entered();\n\n\n\n let matching_events = batch\n\n .events\n\n .into_iter()\n", "file_path": "crates/relayer-cli/src/commands/listen.rs", "rank": 26, "score": 284668.5192043012 }, { "content": "pub fn list_keys(config: ChainConfig) -> eyre::Result<Vec<(String, KeyEntry)>> {\n\n let keyring = KeyRing::new(Store::Test, &config.account_prefix, &config.id)?;\n\n let keys = keyring.keys()?;\n\n Ok(keys)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::KeysListCmd;\n\n\n\n use abscissa_core::clap::Parser;\n\n use ibc_relayer_types::core::ics24_host::identifier::ChainId;\n\n\n\n #[test]\n\n fn test_keys_list() {\n\n assert_eq!(\n\n KeysListCmd {\n\n chain_id: ChainId::from_string(\"chain_id\")\n\n },\n\n KeysListCmd::parse_from(&[\"test\", \"--chain\", \"chain_id\"])\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_keys_list_no_chain() {\n\n assert!(KeysListCmd::try_parse_from(&[\"test\"]).is_err())\n\n }\n\n}\n", "file_path": "crates/relayer-cli/src/commands/keys/list.rs", "rank": 27, "score": 284267.93200589804 }, { "content": "/// Converts error codes originating from `broadcast_tx_sync` responses\n\n/// into IBC relayer domain-type errors.\n\n/// See [`tendermint_rpc::endpoint::broadcast::tx_sync::Response`].\n\n/// Cf: <https://github.com/cosmos/cosmos-sdk/blob/v0.42.10/types/errors/errors.go>\n\npub fn sdk_error_from_tx_sync_error_code(code: u32) -> SdkError {\n\n match code {\n\n // The primary reason (we know of) causing broadcast_tx_sync to fail\n\n // is due to \"out of gas\" errors. These are unrecoverable at the moment\n\n // on Hermes side. We'll inform the user to check for misconfiguration.\n\n 11 => SdkError::out_of_gas(code),\n\n 13 => SdkError::insufficient_fee(code),\n\n _ => SdkError::unknown_tx_sync(code),\n\n }\n\n}\n", "file_path": "crates/relayer/src/sdk_error.rs", "rank": 28, "score": 283797.8527209969 }, { "content": "// Converts the error in a TxResult into SdkError with the same\n\n// mapping as defined in ibc-go and cosmos-sdk. This assumes the\n\n// target chain we are interacting with are using cosmos-sdk and ibc-go.\n\n//\n\n// TODO: investigate ways to automatically generate the mapping by parsing\n\n// the errors.go source code directly\n\npub fn sdk_error_from_tx_result(result: &TxResult) -> SdkError {\n\n match result.code {\n\n Code::Ok => SdkError::unexpected_ok(),\n\n Code::Err(code) => {\n\n let codespace = result.codespace.to_string();\n\n if codespace == \"client\" {\n\n SdkError::client(client_error_from_code(code))\n\n } else {\n\n // TODO: Implement mapping for other codespaces in ibc-go\n\n SdkError::unknown_sdk(codespace, code)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/relayer/src/sdk_error.rs", "rank": 29, "score": 282551.59809277975 }, { "content": "pub fn init_test() -> Result<TestConfig, Error> {\n\n let no_color_log = env::var(\"NO_COLOR_LOG\")\n\n .ok()\n\n .map(|val| val == \"1\")\n\n .unwrap_or(false);\n\n\n\n INIT.call_once(|| {\n\n if enable_ansi() && !no_color_log {\n\n color_eyre::install().unwrap();\n\n }\n\n install_logger(!no_color_log);\n\n });\n\n\n\n let chain_command_path = env::var(\"CHAIN_COMMAND_PATH\").unwrap_or_else(|_| \"gaiad\".to_string());\n\n\n\n let base_chain_store_dir = env::var(\"CHAIN_STORE_DIR\").unwrap_or_else(|_| \"data\".to_string());\n\n\n\n let account_prefix = env::var(\"ACCOUNT_PREFIX\").unwrap_or_else(|_| \"cosmos\".to_string());\n\n\n\n let chain_store_dir = format!(\"{}/test-{}\", base_chain_store_dir, random_u32());\n", "file_path": "tools/test-framework/src/bootstrap/init.rs", "rank": 30, "score": 279835.98875203537 }, { "content": "/// Returns `true` if the relayer should filter based on\n\n/// client state attributes, e.g., trust threshold.\n\n/// Returns `false` otherwise.\n\nfn client_filter_enabled(_config: &Config) -> bool {\n\n // we currently always enable the client filter\n\n true\n\n}\n\n\n", "file_path": "crates/relayer/src/supervisor.rs", "rank": 31, "score": 278603.60130240914 }, { "content": "pub fn assemble_version_info(sender: &channel::Sender<Request>) -> Vec<VersionInfo> {\n\n // Fetch the relayer library version\n\n let lib_version = submit_request(sender, |reply_to| Request::Version { reply_to })\n\n .map_err(|e| {\n\n error!(\n\n \"[rest-server] failed while fetching relayer lib version info: {}\",\n\n e\n\n )\n\n })\n\n .unwrap_or(VersionInfo {\n\n name: \"[ibc relayer library]\".to_string(),\n\n version: \"[failed to fetch the version]\".to_string(),\n\n });\n\n // Append the REST API version info\n\n let rest_api_version = VersionInfo {\n\n name: NAME.to_string(),\n\n version: VER.to_string(),\n\n };\n\n\n\n vec![lib_version, rest_api_version]\n\n}\n", "file_path": "crates/relayer-rest/src/handle.rs", "rank": 32, "score": 271384.3238614669 }, { "content": "pub fn parse_itf_from_json(itf_path: &str) -> Vec<State> {\n\n let itf_json = std::fs::read_to_string(itf_path).expect(\"itf file does not exist. did you run `apalache check --inv=Invariant --run-dir=run main.tla` first?\");\n\n\n\n let trace: InformalTrace<State> =\n\n serde_json::from_str(&itf_json).expect(\"deserialization error\");\n\n\n\n trace.states\n\n}\n\n\n", "file_path": "tools/integration-test/src/mbt/utils.rs", "rank": 33, "score": 269258.0203834154 }, { "content": "/// Obtain a read-only (multi-reader) lock on the application configuration.\n\n///\n\n/// Panics if the application configuration has not been loaded.\n\npub fn app_config() -> config::Reader<Config> {\n\n APPLICATION.config.read()\n\n}\n\n\n\n/// Cli Application\n\n#[derive(Debug)]\n\npub struct CliApp {\n\n /// Application configuration.\n\n config: CfgCell<Config>,\n\n\n\n /// Application state.\n\n state: application::State<Self>,\n\n\n\n /// Toggle json output on/off. Changed with the global config option `-j` / `--json`.\n\n json_output: bool,\n\n\n\n /// Path to the config file.\n\n config_path: Option<PathBuf>,\n\n}\n\n\n", "file_path": "crates/relayer-cli/src/application.rs", "rank": 34, "score": 266867.04861239676 }, { "content": "pub fn delete_key(config: &ChainConfig, key_name: &str) -> eyre::Result<()> {\n\n let mut keyring = KeyRing::new(Store::Test, &config.account_prefix, &config.id)?;\n\n keyring.remove_key(key_name)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/relayer-cli/src/commands/keys/delete.rs", "rank": 35, "score": 265549.9850182455 }, { "content": "/// Default validator function for Client identifiers.\n\n///\n\n/// A valid identifier must be between 9-64 characters and only contain lowercase\n\n/// alphabetic characters,\n\npub fn validate_client_identifier(id: &str) -> Result<(), Error> {\n\n validate_identifier(id, 9, 64)\n\n}\n\n\n", "file_path": "crates/relayer-types/src/core/ics24_host/validate.rs", "rank": 36, "score": 264816.0283405822 }, { "content": "fn auth_info_and_bytes(signer_info: SignerInfo, fee: Fee) -> Result<(AuthInfo, Vec<u8>), Error> {\n\n let auth_info = AuthInfo {\n\n signer_infos: vec![signer_info],\n\n fee: Some(fee),\n\n\n\n // Since Cosmos SDK v0.46.0\n\n tip: None,\n\n };\n\n\n\n // A protobuf serialization of a AuthInfo\n\n let mut auth_buf = Vec::new();\n\n\n\n prost::Message::encode(&auth_info, &mut auth_buf)\n\n .map_err(|e| Error::protobuf_encode(String::from(\"AuthInfo\"), e))?;\n\n\n\n Ok((auth_info, auth_buf))\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/encode.rs", "rank": 37, "score": 263879.11910025764 }, { "content": "fn run_and_collect_events<F>(ev_list: &mut Vec<IbcEvent>, f: F)\n\nwhere\n\n F: FnOnce() -> Result<Vec<IbcEvent>, LinkError>,\n\n{\n\n match f() {\n\n Ok(mut ev) => ev_list.append(&mut ev),\n\n Err(e) => Output::error(Error::link(e)).exit(),\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::ClearPacketsCmd;\n\n\n\n use std::str::FromStr;\n\n\n\n use abscissa_core::clap::Parser;\n\n use ibc_relayer_types::core::ics24_host::identifier::{ChainId, ChannelId, PortId};\n\n\n\n #[test]\n", "file_path": "crates/relayer-cli/src/commands/clear.rs", "rank": 38, "score": 257522.47030885177 }, { "content": "// The error code mapping follows the Go code at\n\n// ibc-go/modules/core/02-client/types/errors.go\n\nfn client_error_from_code(code: u32) -> ClientError {\n\n match code {\n\n 2 => ClientError::light_client_already_exists(),\n\n 3 => ClientError::invalid_light_client(),\n\n 4 => ClientError::light_client_not_found(),\n\n 5 => ClientError::frozen_light_client(),\n\n 6 => ClientError::invalid_client_metadata(),\n\n 7 => ClientError::consensus_state_not_found(),\n\n 8 => ClientError::invalid_consensus_state(),\n\n 9 => ClientError::client_type_not_found(),\n\n 10 => ClientError::invalid_client_type(),\n\n 11 => ClientError::commitment_root_not_found(),\n\n 12 => ClientError::invalid_client_header(),\n\n 13 => ClientError::invalid_light_client_misbehavior(),\n\n 14 => ClientError::client_state_verification_failed(),\n\n 15 => ClientError::client_consensus_state_verification_failed(),\n\n 16 => ClientError::connection_state_verification_failed(),\n\n 17 => ClientError::client_state_verification_failed(),\n\n 18 => ClientError::packet_commitment_verification_failed(),\n\n 19 => ClientError::packet_acknowledgement_verification_failed(),\n", "file_path": "crates/relayer/src/sdk_error.rs", "rank": 39, "score": 257133.4530883813 }, { "content": "/// Generate packet filters from Vec<IBCPath> and load them in a Map(chain_name -> filter).\n\nfn construct_packet_filters(ibc_paths: Vec<IBCPath>) -> HashMap<String, PacketFilter> {\n\n let mut packet_filters = HashMap::new();\n\n\n\n for path in ibc_paths {\n\n for channel in path.channels {\n\n let chain_1 = path.chain_1.chain_name.to_owned();\n\n let chain_2 = path.chain_2.chain_name.to_owned();\n\n\n\n let filters_1 = packet_filters.entry(chain_1).or_insert(Vec::new());\n\n\n\n filters_1.push((\n\n FilterPattern::Exact(channel.chain_1.port_id.clone()),\n\n FilterPattern::Exact(channel.chain_1.channel_id.clone()),\n\n ));\n\n\n\n let filters_2 = packet_filters.entry(chain_2).or_insert(Vec::new());\n\n\n\n filters_2.push((\n\n FilterPattern::Exact(channel.chain_2.port_id.clone()),\n\n FilterPattern::Exact(channel.chain_2.channel_id.clone()),\n", "file_path": "crates/relayer-cli/src/chain_registry.rs", "rank": 40, "score": 255795.83336512157 }, { "content": "pub fn initialize(chain_id: &str, command_path: &str, home_path: &str) -> Result<(), Error> {\n\n simple_exec(\n\n chain_id,\n\n command_path,\n\n &[\n\n \"--home\",\n\n home_path,\n\n \"--chain-id\",\n\n chain_id,\n\n \"init\",\n\n chain_id,\n\n ],\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "tools/test-framework/src/chain/cli/bootstrap.rs", "rank": 41, "score": 252487.0434895006 }, { "content": "pub fn extract_client_id(event: &IbcEvent) -> Result<&ClientId, ForeignClientError> {\n\n match event {\n\n IbcEvent::CreateClient(ev) => Ok(ev.client_id()),\n\n IbcEvent::UpdateClient(ev) => Ok(ev.client_id()),\n\n _ => Err(ForeignClientError::missing_client_id_from_event(\n\n event.clone(),\n\n )),\n\n }\n\n}\n", "file_path": "crates/relayer/src/foreign_client.rs", "rank": 42, "score": 251385.56578812157 }, { "content": "pub fn try_into_array<T, const SIZE: usize>(list: Vec<T>) -> Result<[T; SIZE], Error> {\n\n list.try_into()\n\n .map_err(|_| Error::generic(eyre!(\"vector is not of length {}\", SIZE)))\n\n}\n\n\n\n/**\n\n Converts a dynamic-sized nested vector `Vec<Vec<T>>` into a fixed-sized\n\n nested array `[[T; SIZE]; SIZE]`. Fails if the nested vector is not of\n\n `SIZE`x`SIZE` length.\n\n*/\n", "file_path": "tools/test-framework/src/util/array.rs", "rank": 43, "score": 251302.6604611411 }, { "content": "/// This method is the workhorse for deserializing\n\n/// the `key` field from a public key.\n\nfn deserialize_key<'de, D>(deser: D) -> Result<Vec<u8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n // The key is a byte array that is base64-encoded\n\n // and then marshalled into a JSON String.\n\n let based64_encoded: Result<String, _> = Deserialize::deserialize(deser);\n\n let value = base64::decode(based64_encoded?)\n\n .map_err(|e| serde::de::Error::custom(format!(\"error in decoding: {}\", e)))?;\n\n\n\n Ok(value)\n\n}\n\n\n\nimpl FromStr for EncodedPubKey {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n // Try to deserialize into a JSON Value.\n\n let maybe_json: Result<ProtoAny, _> = serde_json::from_str(s);\n\n\n", "file_path": "crates/relayer/src/keyring/pub_key.rs", "rank": 44, "score": 249010.0442452476 }, { "content": "/// The default amount of gas the relayer is willing to pay for a transaction,\n\n/// when it cannot simulate the tx and therefore estimate the gas amount needed.\n\npub fn default_gas_from_config(config: &ChainConfig) -> u64 {\n\n config\n\n .default_gas\n\n .unwrap_or_else(|| max_gas_from_config(config))\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/types/gas.rs", "rank": 45, "score": 247087.68599498548 }, { "content": "/// The maximum amount of gas the relayer is willing to pay for a transaction\n\npub fn max_gas_from_config(config: &ChainConfig) -> u64 {\n\n config.max_gas.unwrap_or(DEFAULT_MAX_GAS)\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/types/gas.rs", "rank": 46, "score": 247087.68599498548 }, { "content": "/// The gas multiplier\n\npub fn gas_multiplier_from_config(config: &ChainConfig) -> f64 {\n\n config.gas_multiplier.unwrap_or_default().to_f64()\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/types/gas.rs", "rank": 47, "score": 247087.68599498548 }, { "content": "pub fn decode_header<B: Buf>(buf: B) -> Result<Header, Error> {\n\n RawHeader::decode(buf).map_err(Error::decode)?.try_into()\n\n}\n\n\n\nimpl From<Header> for RawHeader {\n\n fn from(value: Header) -> Self {\n\n RawHeader {\n\n signed_header: Some(value.signed_header.into()),\n\n validator_set: Some(value.validator_set.into()),\n\n trusted_height: Some(value.trusted_height.into()),\n\n trusted_validators: Some(value.trusted_validator_set.into()),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"mocks\"))]\n\npub mod test_util {\n\n use alloc::vec;\n\n\n\n use subtle_encoding::hex;\n", "file_path": "crates/relayer-types/src/clients/ics07_tendermint/header.rs", "rank": 48, "score": 246770.51007384676 }, { "content": "fn parse_sdk_version(version_info: &VersionInfo) -> Result<semver::Version, Error> {\n\n let module = version_info\n\n .build_deps\n\n .iter()\n\n .find(|&m| m.path.contains(SDK_MODULE_NAME))\n\n .ok_or_else(|| {\n\n Error::sdk_module_not_found(SDK_MODULE_NAME.to_string(), AppInfo::from(version_info))\n\n })?;\n\n\n\n // The raw version number has a leading 'v', trim it out;\n\n let plain_version = module.version.trim_start_matches('v');\n\n\n\n // Parse the module version\n\n let mut version = semver::Version::parse(plain_version).map_err(|e| {\n\n Error::version_parsing_failed(\n\n module.path.clone(),\n\n module.version.clone(),\n\n e.to_string(),\n\n AppInfo::from(version_info),\n\n )\n\n })?;\n\n\n\n // Remove the pre-release version to ensure we treat pre-releases of the SDK\n\n // as their normal version, eg. 0.42.0-rc2 should satisfy >=0.41.3, <= 0.42.6.\n\n version.pre = semver::Prerelease::EMPTY;\n\n\n\n Ok(version)\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/version.rs", "rank": 49, "score": 246650.9613252448 }, { "content": "pub fn convert_tm_to_ics_merkle_proof(tm_proof: &TendermintProof) -> Result<MerkleProof, Error> {\n\n let mut proofs = Vec::new();\n\n\n\n for op in &tm_proof.ops {\n\n let mut parsed = ibc_proto::ics23::CommitmentProof { proof: None };\n\n prost::Message::merge(&mut parsed, op.data.as_slice())\n\n .map_err(Error::commitment_proof_decoding_failed)?;\n\n\n\n proofs.push(parsed);\n\n }\n\n\n\n Ok(MerkleProof::from(RawMerkleProof { proofs }))\n\n}\n", "file_path": "crates/relayer-types/src/core/ics23_commitment/merkle.rs", "rank": 50, "score": 244613.28293734652 }, { "content": "#[test]\n\npub fn example_test() -> Result<(), Error> {\n\n run_binary_channel_test(&ExampleTest)\n\n}\n\n\n\npub struct ExampleTest;\n\n\n\nimpl TestOverrides for ExampleTest {}\n\n\n\nimpl BinaryChannelTest for ExampleTest {\n\n fn run<ChainA: ChainHandle, ChainB: ChainHandle>(\n\n &self,\n\n _config: &TestConfig,\n\n _relayer: RelayerDriver,\n\n _chains: ConnectedChains<ChainA, ChainB>,\n\n _channel: ConnectedChannel<ChainA, ChainB>,\n\n ) -> Result<(), Error> {\n\n suspend()\n\n }\n\n}\n", "file_path": "tools/integration-test/src/tests/example.rs", "rank": 51, "score": 242197.50145021267 }, { "content": "pub fn all_chain_ids(sender: &channel::Sender<Request>) -> Result<Vec<ChainId>, RestApiError> {\n\n submit_request(sender, |reply_to| Request::GetChains { reply_to })\n\n}\n\n\n", "file_path": "crates/relayer-rest/src/handle.rs", "rank": 52, "score": 241848.68038917956 }, { "content": "pub fn extract_header_from_tx(event: &AbciEvent) -> Result<Box<dyn Header>, Error> {\n\n for tag in &event.attributes {\n\n let key = tag.key.as_ref();\n\n let value = tag.value.as_ref();\n\n if key == HEADER_ATTRIBUTE_KEY {\n\n return AnyHeader::decode_from_string(value).map(AnyHeader::into_box);\n\n }\n\n }\n\n Err(Error::missing_raw_header())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use ibc_relayer_types::core::ics02_client::client_type::ClientType;\n\n use ibc_relayer_types::mock::header::MockHeader;\n\n use ibc_relayer_types::Height;\n\n\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "crates/relayer/src/chain/cosmos/types/events/client.rs", "rank": 53, "score": 241376.49939765118 }, { "content": "pub fn get_dummy_proof() -> Vec<u8> {\n\n \"Y29uc2Vuc3VzU3RhdGUvaWJjb25lY2xpZW50LzIy\"\n\n .as_bytes()\n\n .to_vec()\n\n}\n\n\n", "file_path": "crates/relayer-types/src/test_utils.rs", "rank": 54, "score": 240658.09453442082 }, { "content": "pub fn new_registry(config: Config) -> SharedRegistry<CountingAndCachingChainHandle> {\n\n <SharedRegistry<CountingAndCachingChainHandle>>::new(config)\n\n}\n\n\n\n/**\n\n Generate [`ChainConfig`](ibc_relayer::config::ChainConfig) from a running\n\n [`FullNode`] and add it to the relayer's [`Config`].\n\n*/\n", "file_path": "tools/test-framework/src/bootstrap/binary/chain.rs", "rank": 55, "score": 240501.85671010643 }, { "content": "pub fn spawn(config: Config) -> (ServerHandle, channel::Receiver<Request>) {\n\n let (req_tx, req_rx) = channel::unbounded::<Request>();\n\n\n\n info!(\"starting REST API server listening at http://{}\", config);\n\n let handle = run(config, req_tx);\n\n\n\n (handle, req_rx)\n\n}\n\n\n", "file_path": "crates/relayer-rest/src/server.rs", "rank": 56, "score": 240300.41928709252 }, { "content": "/// Process incoming REST requests.\n\n///\n\n/// Non-blocking receiving of requests from\n\n/// the REST server, and tries to handle them locally.\n\n///\n\n/// Any request that cannot be handled locally here is propagated\n\n/// as a [`Command`] to the caller, which the supervisor itself should handle.\n\npub fn process_incoming_requests(config: &Config, channel: &Receiver) -> Option<Command> {\n\n match channel.try_recv() {\n\n Ok(request) => match request {\n\n Request::Version { reply_to } => {\n\n trace!(\"Version\");\n\n\n\n let v = VersionInfo {\n\n name: NAME.to_string(),\n\n version: VER.to_string(),\n\n };\n\n\n\n reply_to\n\n .send(Ok(v))\n\n .unwrap_or_else(|e| error!(\"error replying to a REST request {}\", e));\n\n }\n\n\n\n Request::GetChains { reply_to } => {\n\n trace!(\"GetChains\");\n\n\n\n reply_to\n", "file_path": "crates/relayer/src/rest.rs", "rank": 57, "score": 240300.41928709252 }, { "content": "/// Verifies that every template macro call in the file can be replaced by a valid Hermes command.\n\n/// Returns the number of invalid commands found.\n\nfn verify_file(path: &Path) -> i32 {\n\n let mut errors_found = 0;\n\n let file = File::open(path);\n\n let reader =\n\n BufReader::new(file.unwrap_or_else(|_| panic!(\"File not found: {}\", path.display())));\n\n let mut line_number = 1;\n\n\n\n for line in reader.lines() {\n\n let line = line\n\n .unwrap_or_else(|_| panic!(\"{} : Failed to read line {}\", path.display(), line_number));\n\n errors_found += verify_line(&line, path, line_number);\n\n line_number += 1;\n\n }\n\n errors_found\n\n}\n\n\n", "file_path": "tools/check-guide/src/main.rs", "rank": 58, "score": 239034.43590816815 }, { "content": "/// Perform a health check on all connected chains\n\nfn health_check<Chain: ChainHandle>(config: &Config, registry: &mut Registry<Chain>) {\n\n use HealthCheck::*;\n\n\n\n let chains = &config.chains;\n\n\n\n for config in chains {\n\n let id = &config.id;\n\n let _span = error_span!(\"health_check\", chain = %id).entered();\n\n\n\n let chain = registry.get_or_spawn(id);\n\n\n\n match chain {\n\n Ok(chain) => match chain.health_check() {\n\n Ok(Healthy) => info!(\"chain is healthy\"),\n\n Ok(Unhealthy(e)) => warn!(\"chain is not healthy: {}\", e),\n\n Err(e) => error!(\"failed to perform health check: {}\", e),\n\n },\n\n Err(e) => {\n\n error!(\n\n \"skipping health check, reason: failed to spawn chain runtime with error: {}\",\n\n e\n\n );\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Subscribe to the events emitted by the chains the supervisor is connected to.\n", "file_path": "crates/relayer/src/supervisor.rs", "rank": 59, "score": 236962.7331660592 }, { "content": "pub fn pipe_to_file(\n\n mut source: impl io::Read + Send + 'static,\n\n file_path: &str,\n\n) -> Result<(), Error> {\n\n let mut file = fs::OpenOptions::new()\n\n .append(true)\n\n .create(true)\n\n .open(file_path)?;\n\n\n\n thread::spawn(move || {\n\n std::io::copy(&mut source, &mut file).unwrap();\n\n });\n\n\n\n Ok(())\n\n}\n", "file_path": "tools/test-framework/src/util/file.rs", "rank": 60, "score": 236930.86186938442 }, { "content": "/// Returns the lists of supported versions\n\npub fn get_compatible_versions() -> Vec<Version> {\n\n vec![Version::default()]\n\n}\n\n\n", "file_path": "crates/relayer-types/src/core/ics03_connection/version.rs", "rank": 61, "score": 235727.0744346924 }, { "content": "pub fn delete_all_keys(config: &ChainConfig) -> eyre::Result<()> {\n\n let mut keyring = KeyRing::new(Store::Test, &config.account_prefix, &config.id)?;\n\n let keys = keyring.keys()?;\n\n for key in keys {\n\n keyring.remove_key(&key.0)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::KeysDeleteCmd;\n\n\n\n use abscissa_core::clap::Parser;\n\n use ibc_relayer_types::core::ics24_host::identifier::ChainId;\n\n\n\n #[test]\n\n fn test_keys_delete_key_name() {\n\n assert_eq!(\n\n KeysDeleteCmd {\n", "file_path": "crates/relayer-cli/src/commands/keys/delete.rs", "rank": 62, "score": 235540.99179990406 }, { "content": "pub fn simple_exec(desc: &str, command_path: &str, args: &[&str]) -> Result<ExecOutput, Error> {\n\n debug!(\n\n \"Executing command for {}: {} {}\",\n\n desc,\n\n command_path,\n\n itertools::join(args, \" \")\n\n );\n\n\n\n let output = Command::new(&command_path)\n\n .args(args)\n\n .output()\n\n .map_err(handle_exec_error(command_path))?;\n\n\n\n if output.status.success() {\n\n let stdout = str::from_utf8(&output.stdout)\n\n .map_err(handle_generic_error)?\n\n .to_string();\n\n\n\n let stderr = str::from_utf8(&output.stderr)\n\n .map_err(handle_generic_error)?\n", "file_path": "tools/test-framework/src/chain/exec.rs", "rank": 63, "score": 235503.8734402453 }, { "content": "fn extract_chains_and_keys(chain_names: &[String]) -> Vec<(String, Option<String>)> {\n\n let mut captured_names = chain_names\n\n .iter()\n\n .map(|chain_key| {\n\n chain_key\n\n .split_once(':')\n\n .map(|(name, key)| (name.to_string(), Some(key.to_string())))\n\n .unwrap_or_else(|| (chain_key.to_string(), None))\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n captured_names.sort_by(|a, b| a.0.cmp(&b.0));\n\n captured_names\n\n}\n\n\n\nimpl Runnable for AutoCmd {\n\n fn run(&self) {\n\n // Assert that for every chain, a key name is provided\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n", "file_path": "crates/relayer-cli/src/commands/config/auto.rs", "rank": 64, "score": 233646.43688859927 }, { "content": "#[cfg(not(feature = \"telemetry\"))]\n\nfn spawn_telemetry_server(config: &Config) -> Result<(), Box<dyn Error + Send + Sync>> {\n\n if config.telemetry.enabled {\n\n warn!(\n\n \"telemetry enabled in the config but Hermes was built without telemetry support, \\\n\n build Hermes with --features=telemetry to enable telemetry support.\"\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/relayer-cli/src/commands/start.rs", "rank": 65, "score": 231828.31381604625 }, { "content": "fn validate_gas_settings(id: &ChainId, config: &ChainConfig) -> Result<(), Diagnostic<Error>> {\n\n // Check that the gas_adjustment option is not set\n\n if let Some(gas_adjustment) = config.gas_adjustment {\n\n let gas_multiplier = gas_adjustment + 1.0;\n\n\n\n return Err(Diagnostic::Error(Error::deprecated_gas_adjustment(\n\n gas_adjustment,\n\n gas_multiplier,\n\n id.clone(),\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/relayer-cli/src/config.rs", "rank": 66, "score": 231007.50748457125 }, { "content": "pub fn collect_events(\n\n config: &Config,\n\n workers: &WorkerMap,\n\n src_chain: &impl ChainHandle,\n\n batch: &EventBatch,\n\n) -> CollectedEvents {\n\n let mut collected =\n\n CollectedEvents::new(batch.height, batch.chain_id.clone(), batch.tracking_id);\n\n\n\n let mode = config.mode;\n\n\n\n for event_with_height in &batch.events {\n\n match &event_with_height.event {\n\n IbcEvent::NewBlock(_) => {\n\n collected.new_block = Some(event_with_height.event.clone());\n\n }\n\n IbcEvent::UpdateClient(update) => {\n\n collect_event(\n\n &mut collected,\n\n event_with_height.clone(),\n", "file_path": "crates/relayer/src/supervisor.rs", "rank": 67, "score": 230359.0021774823 }, { "content": "struct OutputBuffer(Vec<UpgradeClientsForChainResult>);\n\n\n\nimpl Display for OutputBuffer {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> {\n\n fn sep<'a>(pos: usize, len: usize, other: &'a str, last: &'a str) -> &'a str {\n\n if pos != len - 1 {\n\n other\n\n } else {\n\n last\n\n }\n\n }\n\n\n\n let outer_results = &self.0;\n\n writeln!(f, \".\")?;\n\n for (o, outer_result) in outer_results.iter().enumerate() {\n\n write!(f, \"{}\", sep(o, outer_results.len(), \"├─\", \"└─\"))?;\n\n match outer_result {\n\n Ok(inner_results) => {\n\n writeln!(f, \".\")?;\n\n for (i, inner_result) in inner_results.iter().enumerate() {\n", "file_path": "crates/relayer-cli/src/commands/tx/client.rs", "rank": 68, "score": 228135.56824457337 }, { "content": "/// Returns `true` if the relayer should filter based on\n\n/// channel identifiers.\n\n/// Returns `false` otherwise.\n\nfn channel_filter_enabled(_config: &Config) -> bool {\n\n // we currently always enable the channel filter\n\n true\n\n}\n\n\n", "file_path": "crates/relayer/src/supervisor.rs", "rank": 69, "score": 228011.7500805881 }, { "content": "/// Returns the sequences of the packet commitments on a given chain and channel (port_id + channel_id).\n\n/// These are the sequences of the packets that were either:\n\n/// - not yet received by the counterparty chain, or\n\n/// - received on counterparty chain but not yet acknowledged by this chain,\n\npub fn commitments_on_chain(\n\n chain: &impl ChainHandle,\n\n port_id: &PortId,\n\n channel_id: &ChannelId,\n\n) -> Result<(Vec<Sequence>, Height), Error> {\n\n // get the packet commitments on the counterparty/ source chain\n\n let (mut commit_sequences, response_height) = chain\n\n .query_packet_commitments(QueryPacketCommitmentsRequest {\n\n port_id: port_id.clone(),\n\n channel_id: channel_id.clone(),\n\n pagination: Some(PageRequest::all()),\n\n })\n\n .map_err(Error::relayer)?;\n\n\n\n commit_sequences.sort_unstable();\n\n\n\n Ok((commit_sequences, response_height))\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/counterparty.rs", "rank": 70, "score": 227288.0365883138 }, { "content": "pub fn chain_config(\n\n sender: &channel::Sender<Request>,\n\n chain_id: &str,\n\n) -> Result<ChainConfig, RestApiError> {\n\n submit_request(sender, |reply_to| Request::GetChain {\n\n chain_id: ChainId::from_string(chain_id),\n\n reply_to,\n\n })\n\n}\n\n\n", "file_path": "crates/relayer-rest/src/handle.rs", "rank": 71, "score": 227155.11619735794 }, { "content": "fn decode_bech32(input: &str) -> Result<Vec<u8>, Error> {\n\n use bech32::FromBase32;\n\n\n\n let bytes = bech32::decode(input)\n\n .and_then(|(_, data, _)| Vec::from_base32(&data))\n\n .map_err(Error::bech32_account)?;\n\n\n\n Ok(bytes)\n\n}\n\n\n", "file_path": "crates/relayer/src/keyring.rs", "rank": 72, "score": 226886.0401132399 }, { "content": "pub fn gas_amount_to_fee(config: &GasConfig, gas_amount: u64) -> Fee {\n\n let adjusted_gas_limit = adjust_estimated_gas(AdjustGas {\n\n gas_multiplier: config.gas_multiplier,\n\n max_gas: config.max_gas,\n\n gas_amount,\n\n });\n\n\n\n // The fee in coins based on gas amount\n\n let amount = calculate_fee(adjusted_gas_limit, &config.gas_price);\n\n\n\n Fee {\n\n amount: vec![amount],\n\n gas_limit: adjusted_gas_limit,\n\n payer: \"\".to_string(),\n\n granter: config.fee_granter.clone(),\n\n }\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/gas.rs", "rank": 73, "score": 226737.090892038 }, { "content": "#[test]\n\nfn test_client_options() -> Result<(), Error> {\n\n run_binary_chain_test(&ClientOptionsTest)\n\n}\n\n\n", "file_path": "tools/integration-test/src/tests/client_settings.rs", "rank": 74, "score": 225940.55957647972 }, { "content": "#[test]\n\nfn test_client_defaults() -> Result<(), Error> {\n\n run_binary_chain_test(&ClientDefaultsTest)\n\n}\n\n\n\n/// A test to exercise customization of foreign client settings.\n", "file_path": "tools/integration-test/src/tests/client_settings.rs", "rank": 75, "score": 225940.55957647972 }, { "content": "pub fn extract_header_from_tx(event: &AbciEvent) -> Result<Box<dyn Header>, ClientError> {\n\n for tag in &event.attributes {\n\n let key = tag.key.as_ref();\n\n let value = tag.value.as_ref();\n\n if key == HEADER_ATTRIBUTE_KEY {\n\n return AnyHeader::decode_from_string(value).map(AnyHeader::into_box);\n\n }\n\n }\n\n Err(ClientError::missing_raw_header())\n\n}\n\n\n", "file_path": "crates/relayer/src/event.rs", "rank": 76, "score": 225022.77611407635 }, { "content": "/// A basic worker retry strategy.\n\n///\n\n/// The backoff delay is initially 200ms and grows\n\n/// by 100ms at each step. The backoff delay is\n\n/// capped at 500ms.\n\n/// The overall amount of time spent backing off\n\n/// is capped to 2 seconds.\n\n/// See the `default_strategy` test below.\n\npub fn worker_default_strategy() -> impl Iterator<Item = Duration> {\n\n let strategy = ConstantGrowth::new(Duration::from_millis(200), Duration::from_millis(100));\n\n clamp_total(strategy, Duration::from_millis(500), Duration::from_secs(2))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::time::Duration;\n\n\n\n use crate::worker::retry_strategy::worker_default_strategy;\n\n\n\n #[test]\n\n fn default_strategy() {\n\n let strategy = worker_default_strategy();\n\n let delays = strategy.take(10).collect::<Vec<_>>();\n\n // This strategy has exactly 6 retry steps\n\n assert_eq!(\n\n delays,\n\n vec![\n\n Duration::from_millis(200),\n\n Duration::from_millis(300),\n\n Duration::from_millis(400),\n\n Duration::from_millis(500),\n\n Duration::from_millis(500),\n\n Duration::from_millis(100),\n\n ]\n\n );\n\n }\n\n}\n", "file_path": "crates/relayer/src/worker/retry_strategy.rs", "rank": 77, "score": 224800.1532446991 }, { "content": "pub fn format_env(exporter: &impl ExportEnv) -> String {\n\n let mut envs = BTreeMap::new();\n\n exporter.export_env(&mut envs);\n\n\n\n envs.iter()\n\n .map(|(key, value)| format!(\"{}={}\", key, value))\n\n .join(\"\\n\")\n\n}\n\n\n\n/**\n\n Retrieve the environment variables exported by a type implementing\n\n `ExportEnv`, and save them as a `.env` file to the given file path.\n\n*/\n", "file_path": "tools/test-framework/src/types/env.rs", "rank": 78, "score": 224800.1532446991 }, { "content": "pub fn gas_config_for_test() -> GasConfig {\n\n let max_gas = 3000000;\n\n let gas_multiplier = 1.1;\n\n let gas_price = GasPrice::new(0.001, \"stake\".to_string());\n\n\n\n let default_gas = max_gas;\n\n let fee_granter = \"\".to_string();\n\n\n\n let max_fee = Fee {\n\n amount: vec![calculate_fee(max_gas, &gas_price)],\n\n gas_limit: max_gas,\n\n payer: \"\".to_string(),\n\n granter: fee_granter.clone(),\n\n };\n\n\n\n GasConfig {\n\n default_gas,\n\n max_gas,\n\n gas_multiplier,\n\n gas_price,\n\n max_fee,\n\n fee_granter,\n\n }\n\n}\n\n\n", "file_path": "tools/test-framework/src/relayer/tx.rs", "rank": 79, "score": 224243.63012330397 }, { "content": "/// Returns the [`ChannelConnectionClient`] associated with the\n\n/// provided port and channel id.\n\npub fn channel_connection_client(\n\n chain: &impl ChainHandle,\n\n port_id: &PortId,\n\n channel_id: &ChannelId,\n\n) -> Result<ChannelConnectionClient, Error> {\n\n let (channel_end, _) = chain\n\n .query_channel(\n\n QueryChannelRequest {\n\n port_id: port_id.clone(),\n\n channel_id: channel_id.clone(),\n\n height: QueryHeight::Latest,\n\n },\n\n IncludeProof::No,\n\n )\n\n .map_err(Error::relayer)?;\n\n\n\n if channel_end.state_matches(&State::Uninitialized) {\n\n return Err(Error::channel_uninitialized(\n\n port_id.clone(),\n\n channel_id.clone(),\n", "file_path": "crates/relayer/src/chain/counterparty.rs", "rank": 80, "score": 223689.23966414994 }, { "content": "pub fn wait_for_client() {\n\n let sleep_time = CLIENT_EXPIRY + Duration::from_secs(5);\n\n\n\n info!(\n\n \"Sleeping for {} seconds to wait for IBC client to expire\",\n\n sleep_time.as_secs()\n\n );\n\n\n\n sleep(sleep_time);\n\n}\n\n\n", "file_path": "tools/integration-test/src/mbt/utils.rs", "rank": 81, "score": 223683.59432149178 }, { "content": "#[test]\n\nfn test_client_default_refresh() -> Result<(), Error> {\n\n run_binary_chain_test(&ClientDefaultsTest)\n\n}\n\n\n", "file_path": "tools/integration-test/src/tests/client_refresh.rs", "rank": 82, "score": 223608.49723327905 }, { "content": "#[test]\n\nfn test_create_on_expired_client() -> Result<(), Error> {\n\n run_binary_chain_test(&CreateOnExpiredClientTest)\n\n}\n\n\n", "file_path": "tools/integration-test/src/tests/client_expiration.rs", "rank": 83, "score": 223608.49723327905 }, { "content": "#[test]\n\nfn test_client_fail_refresh() -> Result<(), Error> {\n\n run_binary_chain_test(&ClientFailsTest)\n\n}\n\n\n", "file_path": "tools/integration-test/src/tests/client_refresh.rs", "rank": 84, "score": 223608.49723327905 }, { "content": "pub fn update_client_try_from_abci_event(\n\n abci_event: &AbciEvent,\n\n) -> Result<client_events::UpdateClient, ClientError> {\n\n client_extract_attributes_from_tx(abci_event).map(|attributes| client_events::UpdateClient {\n\n common: attributes,\n\n header: extract_header_from_tx(abci_event).ok(),\n\n })\n\n}\n\n\n", "file_path": "crates/relayer/src/event.rs", "rank": 85, "score": 220911.04849634442 }, { "content": "pub fn create_client_try_from_abci_event(\n\n abci_event: &AbciEvent,\n\n) -> Result<client_events::CreateClient, ClientError> {\n\n client_extract_attributes_from_tx(abci_event).map(client_events::CreateClient)\n\n}\n\n\n", "file_path": "crates/relayer/src/event.rs", "rank": 86, "score": 220911.04849634442 }, { "content": "pub fn upgrade_client_try_from_abci_event(\n\n abci_event: &AbciEvent,\n\n) -> Result<client_events::UpgradeClient, ClientError> {\n\n client_extract_attributes_from_tx(abci_event).map(client_events::UpgradeClient)\n\n}\n\n\n", "file_path": "crates/relayer/src/event.rs", "rank": 87, "score": 220911.04849634442 }, { "content": "pub fn client_misbehaviour_try_from_abci_event(\n\n abci_event: &AbciEvent,\n\n) -> Result<client_events::ClientMisbehaviour, ClientError> {\n\n client_extract_attributes_from_tx(abci_event).map(client_events::ClientMisbehaviour)\n\n}\n\n\n", "file_path": "crates/relayer/src/event.rs", "rank": 88, "score": 220911.04849634442 }, { "content": "pub fn into_nested_vec<T, const SIZE: usize>(array: [[T; SIZE]; SIZE]) -> Vec<Vec<T>> {\n\n array.map(|array_b| array_b.into()).into()\n\n}\n\n\n\n/**\n\n Map the elements in the fixed-sized array `[[T; SIZE]; SIZE]`.\n\n*/\n", "file_path": "tools/test-framework/src/util/array.rs", "rank": 89, "score": 219415.54260209049 }, { "content": "/// Default validator function for Channel identifiers.\n\n///\n\n/// A valid identifier must be between 8-64 characters and only contain\n\n/// alphabetic characters,\n\npub fn validate_channel_identifier(id: &str) -> Result<(), Error> {\n\n validate_identifier(id, 8, 64)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::core::ics24_host::validate::{\n\n validate_channel_identifier, validate_client_identifier, validate_connection_identifier,\n\n validate_identifier, validate_port_identifier,\n\n };\n\n use test_log::test;\n\n\n\n #[test]\n\n fn parse_invalid_port_id_min() {\n\n // invalid min port id\n\n let id = validate_port_identifier(\"p\");\n\n assert!(id.is_err())\n\n }\n\n\n\n #[test]\n", "file_path": "crates/relayer-types/src/core/ics24_host/validate.rs", "rank": 90, "score": 219125.48040918453 }, { "content": "/// Default validator function for Port identifiers.\n\n///\n\n/// A valid Identifier must be between 2-128 characters and only contain lowercase\n\n/// alphabetic characters,\n\npub fn validate_port_identifier(id: &str) -> Result<(), Error> {\n\n validate_identifier(id, 2, 128)\n\n}\n\n\n", "file_path": "crates/relayer-types/src/core/ics24_host/validate.rs", "rank": 91, "score": 219125.48040918453 }, { "content": "/// Default validator function for Connection identifiers.\n\n///\n\n/// A valid Identifier must be between 10-64 characters and only contain lowercase\n\n/// alphabetic characters,\n\npub fn validate_connection_identifier(id: &str) -> Result<(), Error> {\n\n validate_identifier(id, 10, 64)\n\n}\n\n\n", "file_path": "crates/relayer-types/src/core/ics24_host/validate.rs", "rank": 92, "score": 219125.48040918453 }, { "content": "pub fn new_tx_config_for_test(\n\n chain_id: ChainId,\n\n raw_rpc_address: String,\n\n raw_grpc_address: String,\n\n address_type: AddressType,\n\n) -> Result<TxConfig, Error> {\n\n let rpc_address = Url::from_str(&raw_rpc_address).map_err(handle_generic_error)?;\n\n\n\n let rpc_client = HttpClient::new(rpc_address.clone()).map_err(handle_generic_error)?;\n\n\n\n let grpc_address = Uri::from_str(&raw_grpc_address).map_err(handle_generic_error)?;\n\n\n\n let gas_config = gas_config_for_test();\n\n\n\n let rpc_timeout = Duration::from_secs(30);\n\n\n\n let extension_options = Default::default();\n\n\n\n Ok(TxConfig {\n\n chain_id,\n", "file_path": "tools/test-framework/src/relayer/tx.rs", "rank": 93, "score": 218800.64163548307 }, { "content": "type UpgradeClientsForChainResult = Result<Vec<UpgradeClientResult>, Error>;\n\n\n", "file_path": "crates/relayer-cli/src/commands/tx/client.rs", "rank": 94, "score": 218148.66139425157 }, { "content": "fn parse_client_paths(components: &[&str]) -> Option<Path> {\n\n let first = match components.first() {\n\n Some(f) => *f,\n\n None => return None,\n\n };\n\n\n\n if first != \"clients\" {\n\n return None;\n\n }\n\n\n\n let client_id = match ClientId::from_str(components[1]) {\n\n Ok(s) => s,\n\n Err(_) => return None,\n\n };\n\n\n\n if components.len() == 3 {\n\n match components[2] {\n\n \"clientType\" => Some(ClientTypePath(client_id).into()),\n\n \"clientState\" => Some(ClientStatePath(client_id).into()),\n\n \"connections\" => Some(ClientConnectionsPath(client_id).into()),\n", "file_path": "crates/relayer-types/src/core/ics24_host/path.rs", "rank": 95, "score": 218116.7437585603 }, { "content": "fn encode_key_bytes(key: &KeyEntry) -> Result<Vec<u8>, Error> {\n\n let mut pk_buf = Vec::new();\n\n\n\n prost::Message::encode(&key.public_key.to_pub().to_bytes(), &mut pk_buf)\n\n .map_err(|e| Error::protobuf_encode(\"PublicKey\".into(), e))?;\n\n\n\n Ok(pk_buf)\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/encode.rs", "rank": 96, "score": 217341.0408171086 }, { "content": "type UpgradeClientResult = Result<Vec<IbcEvent>, Error>;\n", "file_path": "crates/relayer-cli/src/commands/tx/client.rs", "rank": 97, "score": 215837.35247431824 }, { "content": "fn encode_tx_raw(tx_raw: TxRaw) -> Result<Vec<u8>, Error> {\n\n let mut tx_bytes = Vec::new();\n\n prost::Message::encode(&tx_raw, &mut tx_bytes)\n\n .map_err(|e| Error::protobuf_encode(\"Transaction\".to_string(), e))?;\n\n\n\n Ok(tx_bytes)\n\n}\n\n\n", "file_path": "crates/relayer/src/chain/cosmos/encode.rs", "rank": 98, "score": 215144.40350754256 }, { "content": "### Initialization\n\n\n\nThe relayer performs initialization based on the content of the configuration file:\n\n- the file is parsed and semantically validated\n\n- the chains, connections, ports, channels for which relaying is enabled are stored in the Config structure\n\n\n\n```rust\n\npub struct Config {\n\n pub global: GlobalConfig,\n\n pub chains: Vec<ChainConfig>,\n\n pub connections: Option<Vec<Connection>>,\n\n}\n\n\n\npub struct GlobalConfig {\n\n /// All valid log levels, as defined in tracing:\n\n /// https://docs.rs/tracing-core/0.1.17/tracing_core/struct.Level.html\n\n pub log_level: String,\n\n}\n\n\n\npub struct ChainConfig {\n\n pub id: ChainId,\n\n pub rpc_addr: tendermint_rpc::Url,\n\n pub websocket_addr: tendermint_rpc::Url,\n\n pub grpc_addr: tendermint_rpc::Url,\n\n pub rpc_timeout: Duration,\n\n pub account_prefix: String,\n\n pub key_name: String,\n\n pub client_ids: Vec<String>,\n\n pub gas: u64,\n\n pub trusting_period: Duration,\n\n}\n\n\n\npub struct Connection {\n\n pub src: Option<ConnectionEnd>, // use any source\n\n pub dest: Option<ConnectionEnd>, // use any destination\n\n pub paths: Option<Vec<RelayPath>>, // use any port, direction bidirectional\n\n}\n\n\n\npub struct ConnectionEnd {\n\n pub client_id: String,\n\n pub connection_id: Option<String>, // use all connections to this client\n\n}\n\n\n\npub enum Direction {\n\n Unidirectional,\n\n Bidirectional,\n\n}\n\n\n\npub struct RelayPath {\n\n pub src_port: Option<String>, // default from any source port\n\n pub dest_port: Option<String>, // default from any dest port\n\n pub src_channel: Option<String>, // default from any source port\n\n pub dest_channel: Option<String>, // default from any dest port\n\n pub direction: Direction, // default bidirectional\n\n}\n\n```\n\nAll `Option` fields with `None` values mean \"any\" values. For `direction`, default is bidirectional.\n\nAll non-`Option` fields are mandatory and must appear in the configuration file.\n\nIf the relayer is started with an invalid configuration file, an error is displayed and the realyer process exits.\n\n\n\n### Relayer Commands\n\n\n", "file_path": "docs/architecture/adr-002-ibc-relayer.md", "rank": 99, "score": 48.57111694425606 } ]
Rust
src/http_client/client_impl.rs
ngi-nix/etebase-rs
33447aa22a2d8d2a93863312d6f61e341682ebfe
use serde::Deserialize; use crate::error::{Error, Result}; pub trait ClientImplementation { fn get(&self, url: &str, auth_token: Option<&str>) -> Response; fn post(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response; fn put(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response; fn patch(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response; fn delete(&self, url: &str, auth_token: Option<&str>) -> Response; } #[derive(Clone)] pub struct Response { bytes: Vec<u8>, status: u16, err: Option<Error>, } impl Response { pub fn new(bytes: Vec<u8>, status: u16) -> Self { Self { bytes, status, err: None, } } pub fn new_err(err: Error) -> Self { Self { bytes: vec![], status: 0, err: Some(err), } } pub fn reset_ok(&mut self, bytes: Vec<u8>, status: u16) { self.bytes = bytes; self.status = status; self.err = None; } pub fn reset_err(&mut self, err: Error) { self.err = Some(err); } pub fn bytes(&self) -> &[u8] { &self.bytes } pub fn status(&self) -> u16 { self.status } pub fn error_for_status(&self) -> Result<()> { if self.status >= 200 && self.status < 300 { return Ok(()); } #[derive(Deserialize)] struct ErrorResponse<'a> { pub code: Option<&'a str>, pub detail: Option<&'a str>, } let content: ErrorResponse = rmp_serde::from_read_ref(self.bytes()).unwrap_or(ErrorResponse { code: None, detail: None, }); Err(match self.status { 300..=399 => Error::NotFound("Got a redirect - should never happen".to_string()), 401 => Error::Unauthorized(content.detail.unwrap_or("Unauthorized").to_string()), 403 => { Error::PermissionDenied(content.detail.unwrap_or("PermissionDenied").to_string()) } 404 => Error::NotFound(content.detail.unwrap_or("NotFound").to_string()), 409 => Error::Conflict(content.detail.unwrap_or("Conflict").to_string()), 502..=504 => Error::TemporaryServerError( content .detail .unwrap_or("Temporary server error") .to_string(), ), 500..=501 | 505..=599 => { Error::ServerError(content.detail.unwrap_or("Server error").to_string()) } status => Error::Http(format!( "HTTP error {}! Code: '{}'. Detail: '{}'", status, content.code.unwrap_or("null"), content.detail.unwrap_or("null") )), }) } #[deprecated(since = "0.5.1", note = "please use `into_result` instead")] #[allow(clippy::wrong_self_convention)] pub fn as_result(self) -> Result<Self> { self.into_result() } pub fn into_result(self) -> Result<Self> { match self.err { Some(err) => Err(err), None => Ok(self), } } }
use serde::Deserialize; use crate::error::{Error, Result}; pub trait ClientImplementation { fn get(&self, url: &str, auth_token: Option<&str>) -> Response; fn post(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response; fn put(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response; fn patch(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response; fn delete(&self, url: &str, auth_token: Option<&str>) -> Response; } #[derive(Clone)] pub struct Response { bytes: Vec<u8>, status: u16, err: Option<Error>, } impl Response { pub fn new(bytes: Vec<u8>, status: u16) -> Self { Self { bytes, status, err: None, } } pub fn new_err(err: Error) -> Self { Self { bytes: vec![], status: 0, err: Some(err), } } pub fn reset_ok(&mut self, bytes: Vec<u8>, status: u16) { self.bytes = bytes; self.status = status; self.err = None; } pub fn reset_err(&mut self, err: Error) { self.err = Some(err); } pub fn bytes(&self) -> &[u8] { &self.bytes } pub fn status(&self) -> u16 { self.status } pub fn error_for_status(&self) -> Result<()> { if self.status >= 200 && self.status < 300 { return Ok(()); } #[derive(Deserialize)] struct ErrorResponse<'a> { pub code: Option<&'a str>, pub detail: Option<&'a str>, } let con
.detail .unwrap_or("Temporary server error") .to_string(), ), 500..=501 | 505..=599 => { Error::ServerError(content.detail.unwrap_or("Server error").to_string()) } status => Error::Http(format!( "HTTP error {}! Code: '{}'. Detail: '{}'", status, content.code.unwrap_or("null"), content.detail.unwrap_or("null") )), }) } #[deprecated(since = "0.5.1", note = "please use `into_result` instead")] #[allow(clippy::wrong_self_convention)] pub fn as_result(self) -> Result<Self> { self.into_result() } pub fn into_result(self) -> Result<Self> { match self.err { Some(err) => Err(err), None => Ok(self), } } }
tent: ErrorResponse = rmp_serde::from_read_ref(self.bytes()).unwrap_or(ErrorResponse { code: None, detail: None, }); Err(match self.status { 300..=399 => Error::NotFound("Got a redirect - should never happen".to_string()), 401 => Error::Unauthorized(content.detail.unwrap_or("Unauthorized").to_string()), 403 => { Error::PermissionDenied(content.detail.unwrap_or("PermissionDenied").to_string()) } 404 => Error::NotFound(content.detail.unwrap_or("NotFound").to_string()), 409 => Error::Conflict(content.detail.unwrap_or("Conflict").to_string()), 502..=504 => Error::TemporaryServerError( content
random
[ { "content": "pub fn derive_key(salt: &[u8], password: &str) -> Result<Vec<u8>> {\n\n let mut key = vec![0; 32];\n\n let salt = &salt[..argon2id13::SALTBYTES];\n\n let salt: &[u8; argon2id13::SALTBYTES] =\n\n to_enc_error!(salt.try_into(), \"Expect salt to be at least 16 bytes\")?;\n\n let password = password.as_bytes();\n\n\n\n let ret = argon2id13::derive_key(\n\n &mut key,\n\n password,\n\n &argon2id13::Salt(*salt),\n\n argon2id13::OPSLIMIT_SENSITIVE,\n\n argon2id13::MEMLIMIT_MODERATE,\n\n );\n\n Ok(to_enc_error!(ret, \"pwhash failed\")?.as_ref().to_vec())\n\n}\n\n\n\npub struct CryptoManager {\n\n pub version: u8,\n\n cipher_key: [u8; 32],\n", "file_path": "src/crypto.rs", "rank": 0, "score": 198838.4914359434 }, { "content": "/// Convert a Base64 URL encoded string to a buffer\n\n///\n\n/// # Arguments:\n\n/// * `string` - the Base64 URL encoded string\n\npub fn from_base64(string: &StrBase64) -> Result<Vec<u8>> {\n\n match base64::decode(string, base64::Variant::UrlSafeNoPadding) {\n\n Ok(bytes) => Ok(bytes),\n\n Err(_) => Err(Error::Base64(\"Failed decoding base64 string\")),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 186978.2197330059 }, { "content": "pub fn init() -> error::Result<()> {\n\n crypto::init()\n\n}\n", "file_path": "src/lib.rs", "rank": 2, "score": 161776.85268845706 }, { "content": "/// Convert a buffer to a Base64 URL encoded string\n\n///\n\n/// # Arguments:\n\n/// * `bytes` - the buffer to convert\n\npub fn to_base64(bytes: &[u8]) -> Result<StringBase64> {\n\n Ok(base64::encode(bytes, base64::Variant::UrlSafeNoPadding))\n\n}\n\n\n\n// Fisher–Yates shuffle - an unbiased shuffler\n\n// The returend indices of where item is now.\n\n// So if the first item moved to position 3: ret[0] = 3\n\npub(crate) fn shuffle<T>(a: &mut Vec<T>) -> Vec<usize> {\n\n let len = a.len();\n\n let mut shuffled_indices: Vec<usize> = (0..len).collect();\n\n\n\n for i in 0..len {\n\n let j = i + sodiumoxide::randombytes::randombytes_uniform((len - i) as u32) as usize;\n\n a.swap(i, j);\n\n shuffled_indices.swap(i, j);\n\n }\n\n\n\n let mut ret = vec![0; len];\n\n for i in 0..len {\n\n ret[shuffled_indices[i]] = i;\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 159684.05877928648 }, { "content": "/// Return a buffer filled with cryptographically random bytes\n\n///\n\n/// # Arguments:\n\n/// * `size` - the size of the returned buffer (in bytes)\n\npub fn randombytes(size: usize) -> Vec<u8> {\n\n sodiumoxide::randombytes::randombytes(size)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 4, "score": 145686.08085385064 }, { "content": "fn generichash_quick(msg: &[u8], key: Option<&[u8]>) -> Result<Vec<u8>> {\n\n let mut state = to_enc_error!(generichash::State::new(32, key), \"Failed to init hash\")?;\n\n to_enc_error!(state.update(msg), \"Failed to update hash\")?;\n\n Ok(to_enc_error!(state.finalize(), \"Failed to finalize hash\")?\n\n .as_ref()\n\n .to_vec())\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 5, "score": 145118.572742657 }, { "content": "/// Return a buffer filled with deterministically cryptographically random bytes\n\n///\n\n/// This function is similar to [randombytes] but always returns the same data for the same seed.\n\n/// Useful for testing purposes.\n\n///\n\n/// # Arguments:\n\n/// * `seed` - the seed to generate the random data from\n\n/// * `size` - the size of the returned buffer (in bytes)\n\npub fn randombytes_deterministic(size: usize, seed: &[u8; 32]) -> Vec<u8> {\n\n // Not exactly like the sodium randombytes_deterministic but close enough\n\n let nonce =\n\n sodiumoxide::crypto::stream::xchacha20::Nonce(*b\"LibsodiumDRG\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\");\n\n let key = sodiumoxide::crypto::stream::xchacha20::Key(*seed);\n\n\n\n sodiumoxide::crypto::stream::xchacha20::stream(size, &nonce, &key)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 6, "score": 140627.3202634475 }, { "content": "pub fn test_reset(client: &Client, body_struct: SignupBody) -> Result<()> {\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n\n let url = client.api_base.join(\"api/v1/test/authentication/reset/\")?;\n\n\n\n let res = client.post(url.as_str(), body)?;\n\n\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n}\n\n\n\n/// A collection for which the user lost access\n\n///\n\n/// Deleted collections are marked using [crate::Collection::is_deleted]. However, when we just lose access\n\n/// to a collection and it hasn't been deleted, we get this object.\n\n#[derive(Deserialize, Clone)]\n\npub struct RemovedCollection {\n\n uid: StringBase64,\n\n}\n\n\n", "file_path": "src/online_managers.rs", "rank": 7, "score": 134262.6107604373 }, { "content": "pub fn init() -> Result<()> {\n\n to_enc_error!(sodiumoxide::init(), \"Failed initialising libsodium\")\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 8, "score": 125749.40264777982 }, { "content": "/// A constant-time comparison function\n\n///\n\n/// Use this when comparing secret data in order to prevent side-channel attacks.\n\n///\n\n/// # Arguments:\n\n/// * `x` - the first buffer\n\n/// * `y` - the second buffer\n\npub fn memcmp(x: &[u8], y: &[u8]) -> bool {\n\n sodiumoxide::utils::memcmp(x, y)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 9, "score": 124796.10524185825 }, { "content": "pub fn apply_fetch_options(url: Url, options: Option<&FetchOptions>) -> Url {\n\n let options = match options {\n\n Some(options) => options,\n\n None => return url,\n\n };\n\n\n\n let mut url = url;\n\n {\n\n let mut query = url.query_pairs_mut();\n\n if let Some(limit) = options.limit {\n\n query.append_pair(\"limit\", &limit.to_string());\n\n }\n\n if let Some(prefetch) = options.prefetch {\n\n let prefetch = match prefetch {\n\n PrefetchOption::Auto => \"auto\",\n\n PrefetchOption::Medium => \"medium\",\n\n };\n\n query.append_pair(\"prefetch\", prefetch);\n\n }\n\n if let Some(with_collection) = options.with_collection {\n", "file_path": "src/online_managers.rs", "rank": 10, "score": 113911.10762855585 }, { "content": "/// Return a pretty formatted fingerprint of the content\n\n///\n\n/// For example:\n\n/// ```shell\n\n/// 45680 71497 88570 93128\n\n/// 19189 84243 25687 20837\n\n/// 47924 46071 54113 18789\n\n/// ```\n\n///\n\n/// # Arguments:\n\n/// * `content` - the content to create a fingerprint for\n\npub fn pretty_fingerprint(content: &[u8]) -> String {\n\n let delimiter = \" \";\n\n let fingerprint = generichash_quick(content, None).unwrap();\n\n\n\n /* We use 3 bytes each time to generate a 5 digit number - this means 10 pairs for bytes 0-29\n\n * We then use bytes 29-31 for another number, and then the 3 most significant bits of each first byte for the last.\n\n */\n\n let mut last_num: u32 = 0;\n\n let parts = (0..10).into_iter().map(|i| {\n\n let suffix = if i % 4 == 3 { \"\\n\" } else { delimiter };\n\n\n\n last_num = (last_num << 3) | ((fingerprint[i] as u32) & 0xE0) >> 5;\n\n get_encoded_chunk(&fingerprint[i * 3..], suffix)\n\n });\n\n\n\n let last_num = (0..10).into_iter().fold(0, |accum, i| {\n\n (accum << 3) | ((fingerprint[i] as u32) & 0xE0) >> 5\n\n }) % 100000;\n\n let last_num = format!(\"{:0>5}\", last_num);\n\n let parts = parts\n\n .chain(std::iter::once(get_encoded_chunk(\n\n &fingerprint[29..],\n\n delimiter,\n\n )))\n\n .chain(std::iter::once(last_num));\n\n parts.collect::<String>()\n\n}\n", "file_path": "src/crypto.rs", "rank": 12, "score": 109859.53669122685 }, { "content": "/// The `unpad()` function computes the original, unpadded length of a message previously padded\n\n/// using [`pad()`]. The original length is returned upon success.\n\npub fn unpad(buf: &[u8], padded_buflen: usize, blocksize: usize) -> Result<usize, ()> {\n\n let mut unpadded_buflen_p: usize = 0;\n\n unsafe {\n\n if 0 == ffi::sodium_unpad(\n\n &mut unpadded_buflen_p,\n\n buf.as_ptr() as *const _,\n\n padded_buflen,\n\n blocksize,\n\n ) {\n\n Ok(unpadded_buflen_p)\n\n } else {\n\n Err(())\n\n }\n\n }\n\n}\n", "file_path": "src/utils/sodium_padding.rs", "rank": 13, "score": 107093.79964595834 }, { "content": "#[test]\n\nfn get_dashboard_url() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n\n\n match etebase.fetch_dashboard_url() {\n\n Ok(url) => assert!(url.len() > 0),\n\n err => assert_err!(err, Error::Http(_)),\n\n };\n\n\n\n etebase.logout()\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 14, "score": 104954.80712330033 }, { "content": "/// The `pad()` function adds padding data to a buffer buf whose original size is `unpadded_buflen`\n\n/// in order to extend its total length to a multiple of blocksize.\n\n///\n\n/// The function returns `Err(())` if the padded buffer length would exceed `max_buflen`, or if the\n\n/// block size is 0. It returns a result containing the new padded length upon success.\n\npub fn pad(buf: &mut [u8], unpadded_buflen: usize, blocksize: usize) -> Result<usize, ()> {\n\n let mut padded_buflen_p: usize = 0;\n\n unsafe {\n\n if 0 == ffi::sodium_pad(\n\n &mut padded_buflen_p,\n\n buf.as_mut_ptr() as *mut _,\n\n unpadded_buflen,\n\n blocksize,\n\n buf.len(),\n\n ) {\n\n Ok(padded_buflen_p)\n\n } else {\n\n Err(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils/sodium_padding.rs", "rank": 15, "score": 102575.59602832266 }, { "content": "fn get_encoded_chunk(content: &[u8], suffix: &str) -> String {\n\n let num =\n\n (((content[0] as u32) << 16) + ((content[1] as u32) << 8) + (content[2] as u32)) % 100000;\n\n return format!(\"{:0>5}{}\", num, suffix);\n\n}\n\n\n", "file_path": "src/crypto.rs", "rank": 16, "score": 101791.20601220075 }, { "content": "pub fn test_url() -> String {\n\n let server = env::var(\"ETEBASE_TEST_HOST\").expect(\"Set ETEBASE_TEST_HOST to run tests\");\n\n format!(\"http://{}\", server)\n\n}\n\n\n\n#[allow(non_snake_case)]\n\npub struct TestUser {\n\n pub username: &'static str,\n\n pub email: &'static str,\n\n pub password: &'static str,\n\n pub pubkey: &'static str,\n\n pub encryptedContent: &'static str,\n\n\n\n pub loginPubkey: &'static str,\n\n pub key: &'static str,\n\n pub salt: &'static str,\n\n\n\n pub storedSession: &'static str,\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 17, "score": 101551.38051164092 }, { "content": "/// A trait for serializing and deserializing to MsgPack\n\npub trait MsgPackSerilization {\n\n /// The type of the struct implementing this trait\n\n type Output;\n\n\n\n /// Convert self to a msgpack encoded buffer\n\n fn to_msgpack(&self) -> Result<Vec<u8>>;\n\n\n\n /// Create the struct from a MsgPack encoded buffer\n\n ///\n\n /// # Arguments:\n\n /// * `data` - the MsgPack buffer\n\n fn from_msgpack(data: &[u8]) -> Result<Self::Output>;\n\n}\n", "file_path": "src/utils.rs", "rank": 18, "score": 94792.21193068725 }, { "content": "fn verify_collection(col: &Collection, meta: &ItemMetadata, content: &[u8]) -> Result<()> {\n\n col.verify()?;\n\n assert_eq!(&col.meta()?, meta);\n\n assert_eq!(col.content()?, content);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 19, "score": 88314.35202282511 }, { "content": "fn verify_item(item: &Item, meta: &ItemMetadata, content: &[u8]) -> Result<()> {\n\n item.verify()?;\n\n assert_eq!(&item.meta()?, meta);\n\n assert_eq!(item.content()?, content);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 20, "score": 88314.35202282511 }, { "content": "#[test]\n\nfn collection_as_item() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let mut col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n // Verify with_collection works\n\n {\n\n let items = it_mgr.list(None)?;\n\n assert_eq!(items.data().len(), 0);\n", "file_path": "tests/service.rs", "rank": 21, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn is_etebase_server() -> Result<()> {\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n assert!(Account::is_etebase_server(&client)?);\n\n\n\n let test_url = format!(\"{}/api/\", test_url());\n\n let client = Client::new(CLIENT_NAME, &test_url)?;\n\n assert!(!Account::is_etebase_server(&client)?);\n\n\n\n let client = Client::new(CLIENT_NAME, \"http://doesnotexist\")?;\n\n assert!(Account::is_etebase_server(&client).is_err());\n\n\n\n // Verify we also fail correctly for login\n\n let client = Client::new(CLIENT_NAME, &test_url)?;\n\n assert_err!(\n\n Account::login(client.clone(), USER2.username, USER2.password),\n\n Error::NotFound(_)\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 22, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn item_revisions() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n let meta = ItemMetadata::new().set_name(Some(\"Item Orig\")).clone();\n\n let content = b\"\";\n\n let mut item = it_mgr.create(&meta, content)?;\n\n\n\n for i in 0..5 {\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(&format!(\"Item {}\", i)))\n\n .clone();\n\n item.set_meta(&meta)?;\n", "file_path": "tests/service.rs", "rank": 23, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn collection_types() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n {\n\n let collections = col_mgr.list(\"some.coltype\", None)?;\n\n assert_eq!(collections.data().len(), 0);\n\n }\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n {\n", "file_path": "tests/service.rs", "rank": 24, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn deterministic_encrypt() -> Result<()> {\n\n etebase::init().unwrap();\n\n\n\n let key = from_base64(USER.key)?;\n\n\n\n let context = b\"Col \";\n\n let crypto_manager = crypto::CryptoManager::new(\n\n &key[0..32].try_into().unwrap(),\n\n context,\n\n etebase::CURRENT_VERSION,\n\n )\n\n .unwrap();\n\n\n\n // Deterministic encryption\n\n let clear_text = b\"This Is Some Test Cleartext.\";\n\n let cipher = crypto_manager\n\n .deterministic_encrypt(clear_text, None)\n\n .unwrap();\n\n let decrypted = crypto_manager.deterministic_decrypt(&cipher, None).unwrap();\n\n assert_eq!(clear_text, &decrypted[..]);\n", "file_path": "tests/crypto.rs", "rank": 25, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn iterating_invitations() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n\n\n let etebase2 = init_test(&USER2)?;\n\n let invite_mgr2 = etebase2.invitation_manager()?;\n\n\n\n let invite_mgr = etebase.invitation_manager()?;\n\n let user2_profile = invite_mgr.fetch_user_profile(USER2.username)?;\n\n\n\n for i in 0..3 {\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(&format!(\"Item {}\", i)))\n\n .clone();\n\n let content = b\"\";\n\n let col = col_mgr.create(\"some.coltype\", &meta, content).unwrap();\n\n col_mgr.upload(&col, None)?;\n\n invite_mgr.invite(\n\n &col,\n\n USER2.username,\n", "file_path": "tests/service.rs", "rank": 26, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn collection_invitations() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let items: Vec<Item> = (0..5)\n\n .into_iter()\n\n .map(|i| {\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(&format!(\"Item {}\", i)))\n\n .clone();\n\n let content = b\"\";\n\n it_mgr.create(&meta, content).unwrap()\n", "file_path": "tests/service.rs", "rank": 27, "score": 82385.72860755982 }, { "content": "fn main() -> Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() < 4 {\n\n println!(\"Help: ./etebase_test USERNAME PASSWORD SERVER_URL [COLLECTION_UID]\");\n\n std::process::exit(1);\n\n }\n\n\n\n let username = &args[1];\n\n let password = &args[2];\n\n let server_url = &args[3];\n\n\n\n let client = Client::new(CLIENT_NAME, server_url)?;\n\n let etebase = Account::login(client, username, password)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n if args.len() >= 5 {\n\n let col_uid = &args[4];\n\n let col = col_mgr.fetch(col_uid, None)?;\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n let items = it_mgr.list(None)?;\n", "file_path": "examples/etebase_test.rs", "rank": 28, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn item_transactions() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n let meta = ItemMetadata::new().set_name(Some(\"Item 1\")).clone();\n\n let content = b\"\";\n\n let mut item = it_mgr.create(&meta, content)?;\n\n\n\n let deps = vec![&item];\n\n it_mgr.transaction(deps.clone().into_iter(), None)?;\n\n\n\n let item_old = it_mgr.fetch(item.uid(), None)?;\n\n let mut item_old2 = it_mgr.fetch(item.uid(), None)?;\n", "file_path": "tests/service.rs", "rank": 29, "score": 82385.72860755982 }, { "content": "#[test]\n\n#[ignore]\n\nfn signup_with_key() -> Result<()> {\n\n // FIXME: Doesn't work at the moment, because we can't signup with the same user. We need reset\n\n // to support wiping.\n\n\n\n // Reset user manually because we want to signup\n\n etebase::init()?;\n\n user_reset(&USER)?;\n\n\n\n let main_key = from_base64(USER.key)?;\n\n\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let user = User::new(USER.username, USER.email);\n\n let etebase = Account::signup_key(client, &user, &main_key)?;\n\n\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n", "file_path": "tests/service.rs", "rank": 30, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn empty_content() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n {\n\n let col2 = col_mgr.fetch(col.uid(), None)?;\n\n verify_collection(&col2, &col_meta, col_content)?;\n\n }\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().set_name(Some(\"Item 1\")).clone();\n\n let content = b\"\";\n\n\n", "file_path": "tests/service.rs", "rank": 31, "score": 82385.72860755982 }, { "content": "#[test]\n\nfn cache_collections_and_items() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().set_name(Some(\"Item\")).clone();\n\n let content = b\"SomeItemContent\";\n\n let item = it_mgr.create(&meta, content)?;\n\n\n", "file_path": "tests/service.rs", "rank": 32, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn simple_collection_handling() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let content = b\"SomeContent\";\n\n\n\n let mut col = col_mgr.create(\"some.coltype\", &meta, content)?;\n\n assert_eq!(col.collection_type()?, \"some.coltype\");\n\n verify_collection(&col, &meta, content)?;\n\n\n\n let meta2 = meta.clone().set_name(Some(\"Collection meta2\")).clone();\n\n col.set_meta(&meta2)?;\n\n verify_collection(&col, &meta2, content)?;\n\n\n\n assert!(!col.is_deleted());\n\n col.delete()?;\n\n assert!(col.is_deleted());\n\n verify_collection(&col, &meta2, content)?;\n\n\n\n etebase.logout()\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 33, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn simple_collection_sync() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let content = b\"SomeContent\";\n\n\n\n let mut col = col_mgr.create(\"some.coltype\", &meta, content)?;\n\n verify_collection(&col, &meta, content)?;\n\n\n\n let collections = col_mgr.list(\"some.coltype\", None)?;\n\n assert_eq!(collections.data().len(), 0);\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let collections = col_mgr.list(\"some.coltype\", None)?;\n\n assert_eq!(collections.data().len(), 1);\n", "file_path": "tests/service.rs", "rank": 34, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn simple_item_handling() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().set_name(Some(\"Item 1\")).clone();\n\n let content = b\"ItemContent\";\n\n let mut item = it_mgr.create(&meta, content)?;\n\n verify_item(&item, &meta, content)?;\n\n\n\n let meta2 = ItemMetadata::new().set_name(Some(\"Item 2\")).clone();\n\n item.set_meta(&meta2)?;\n\n verify_item(&item, &meta2, content)?;\n\n\n\n assert!(!item.is_deleted());\n\n item.delete()?;\n\n assert!(item.is_deleted());\n\n verify_item(&item, &meta2, content)?;\n\n\n\n etebase.logout()\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 35, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn collection_and_item_deletion() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let mut col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().set_name(Some(\"Item 1\")).clone();\n\n let content = b\"Content 1\";\n\n\n\n let mut item = it_mgr.create(&meta, content)?;\n\n\n\n it_mgr.batch(iter::once(&item), None)?;\n\n\n\n let items = it_mgr.list(None)?;\n", "file_path": "tests/service.rs", "rank": 36, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn simple_item_sync() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().set_name(Some(\"Item 1\")).clone();\n\n let content = b\"Content 1\";\n\n\n\n let mut item = it_mgr.create(&meta, content)?;\n", "file_path": "tests/service.rs", "rank": 37, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn collection_access_level() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let items: Vec<Item> = (0..5)\n\n .into_iter()\n\n .map(|i| {\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(&format!(\"Item {}\", i)))\n\n .clone();\n\n let content = b\"\";\n\n it_mgr.create(&meta, content).unwrap()\n\n })\n", "file_path": "tests/service.rs", "rank": 38, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn item_batch_stoken() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n let meta = ItemMetadata::new().set_name(Some(\"Item Orig\")).clone();\n\n let content = b\"\";\n\n let mut item = it_mgr.create(&meta, content)?;\n\n\n\n it_mgr.batch(iter::once(&item), None)?;\n\n\n\n let mut item2 = it_mgr.fetch(item.uid(), None)?;\n\n\n\n let items: Vec<Item> = (0..5)\n", "file_path": "tests/service.rs", "rank": 39, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn session_save_and_restore() -> Result<()> {\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let etebase = init_test(&USER)?;\n\n\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n col_mgr.upload(&col, None)?;\n\n\n\n // Verify we can store and restore without an encryption key\n\n {\n\n let saved = etebase.save(None)?;\n\n let etebase2 = Account::restore(client.clone(), &saved, None)?;\n\n\n", "file_path": "tests/service.rs", "rank": 40, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn chunk_preupload_and_download() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().set_name(Some(\"Item\")).clone();\n\n let content = b\"SomeItemContent\";\n\n let item = it_mgr.create(&meta, content)?;\n\n assert!(!item.is_missing_content());\n", "file_path": "tests/service.rs", "rank": 41, "score": 79468.28479011814 }, { "content": "#[test]\n\n#[ignore]\n\nfn login_and_password_change() -> Result<()> {\n\n // Reset both users\n\n let etebase = init_test(&USER)?;\n\n etebase.logout()?;\n\n let etebase2 = init_test(&USER2)?;\n\n etebase2.logout()?;\n\n\n\n let another_password = \"AnotherPassword\";\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let mut etebase2 = Account::login(client.clone(), USER2.username, USER2.password)?;\n\n\n\n let col_mgr2 = etebase2.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr2.create(\"some.coltype\", &col_meta, col_content)?;\n", "file_path": "tests/service.rs", "rank": 42, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn item_fetch_updates() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n let meta = ItemMetadata::new().set_name(Some(\"Item Orig\")).clone();\n\n let content = b\"\";\n\n let item = it_mgr.create(&meta, content)?;\n\n\n\n it_mgr.batch(iter::once(&item), None)?;\n\n\n\n let items: Vec<Item> = (0..5)\n\n .into_iter()\n\n .map(|i| {\n", "file_path": "tests/service.rs", "rank": 43, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn list_response_correctness() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new().set_name(Some(\"Collection\")).clone();\n\n let col_content = b\"\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n {\n\n let col2 = col_mgr.fetch(col.uid(), None)?;\n\n verify_collection(&col2, &col_meta, col_content)?;\n\n }\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let items: Vec<Item> = (0..5)\n\n .into_iter()\n\n .map(|i| {\n", "file_path": "tests/service.rs", "rank": 44, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn chunking_large_data() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col_content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, col_content)?;\n\n\n\n col_mgr.upload(&col, None)?;\n\n\n\n let it_mgr = col_mgr.item_manager(&col)?;\n\n\n\n let meta = ItemMetadata::new().clone();\n\n let content = randombytes_deterministic(120 * 1024, &[0; 32]); // 120kb of pseuedorandom data\n\n\n\n let mut item = it_mgr.create(&meta, &content)?;\n", "file_path": "tests/service.rs", "rank": 45, "score": 79468.28479011814 }, { "content": "#[test]\n\nfn login_and_password_change_with_key() -> Result<()> {\n\n // Reset user\n\n let etebase = init_test(&USER)?;\n\n\n\n let col_mgr = etebase.collection_manager()?;\n\n let col_meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let col = col_mgr.create(\"some.coltype\", &col_meta, b\"\")?;\n\n col_mgr.upload(&col, None)?;\n\n\n\n etebase.logout()?;\n\n\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let main_key = from_base64(USER.key)?;\n\n let etebase = Account::login_key(client, USER.username, &main_key)?;\n\n\n\n let col_mgr = etebase.collection_manager()?;\n\n let col2 = col_mgr.fetch(col.uid(), None)?;\n\n verify_collection(&col2, &col_meta, b\"\")?;\n\n\n\n etebase.logout()\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 46, "score": 76856.99630225747 }, { "content": "#[test]\n\nfn simple_cache_handling() -> Result<()> {\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let etebase = init_test_local(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let meta = ItemMetadata::new()\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let content = b\"SomeContent\";\n\n\n\n let col = col_mgr.create(\"some.coltype\", &meta, content)?;\n\n\n\n let temp_dir = TempDir::new()?;\n\n let fs_cache = FileSystemCache::new(temp_dir.path(), USER.username)?;\n\n\n\n assert!(fs_cache.load_account(&client, None).is_err());\n\n fs_cache.save_account(&etebase, None)?;\n\n fs_cache.load_account(&client, None)?;\n\n\n", "file_path": "tests/fs_cache.rs", "rank": 47, "score": 76856.99630225747 }, { "content": "#[derive(Serialize)]\n\nstruct LoginBody<'a> {\n\n #[serde(with = \"serde_bytes\")]\n\n response: &'a [u8],\n\n #[serde(with = \"serde_bytes\")]\n\n signature: &'a [u8],\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct LoginBodyResponse<'a> {\n\n pub username: &'a str,\n\n #[serde(with = \"serde_bytes\")]\n\n pub challenge: &'a [u8],\n\n pub host: &'a str,\n\n pub action: &'a str,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct LoginResponseUser {\n\n pub username: String,\n", "file_path": "src/online_managers.rs", "rank": 48, "score": 76357.292181752 }, { "content": "#[test]\n\nfn loading_cache_without_collection_type() -> Result<()> {\n\n let etebase = init_test(&USER)?;\n\n let col_mgr = etebase.collection_manager()?;\n\n let meta = ItemMetadata::new()\n\n .set_item_type(Some(\"type\"))\n\n .set_name(Some(\"Collection\"))\n\n .set_description(Some(\"Mine\"))\n\n .set_color(Some(\"#aabbcc\"))\n\n .clone();\n\n let content = b\"SomeContent\";\n\n\n\n let col = col_mgr.cache_load(&etebase::utils::from_base64(\"kgHcBTvMlMyVzNkgTl9vMjNITDFsVlQ1VG5Dd1QzVkx0VDN5TUtBbDZ0UDYBzMDMlMy2QktLMk50R2NCY1pDRDBTcllKeHBzQczEUzXMyVxJSQptzPDMyMzNPMzhAVbMrsyjzPfMjU0pzODMq2lmzMAUEBLM_My-zOhGMsyREmHM6Mz3zO7MwVF8F2tnzOXM3TnM9MypzOQ9CczHfMzGCywPzOPMlsyTzOE-zIRAKczQJzVxRMz9NMz3MczbzMHM5zY_MDrMwsyRzJLM2StoaDhPTTFiQmR5bWZUTlZlanhwcWtPOExWLUpIUDFsUGRST2lCWXhnaEFnzMUEKMy4KGNOJ8z3zN1pzNFGzNLM_8yiCczpzLIxzJQSzNjM58yACS0sP8yCzKDM-GLM0MyCzJMPL8z-Hsz-QMzRT1BCRcyjWszRzLPMkxjMzFYmzMJ8IE1_zMMnCczyzIlUzIbMhxDM3XHMskHM9x9uaWhDzM3Myk1pzI7M78yWzOMrOFHMw04WzK7M-iXMm3tdzKAazMVpzOXMzczqR38KWMyPGEvMzkoyWmodzOvMwczSzLXMplkGzPIDzKvMxGEfQ8zjMcyIdAB4JczLcczfd8z5Vsz4ccyRdMzXzKU1zKcRzP_MycynzLhQzOUvJ8z3FMyHzOpMzLDMm8y6HAw1zKQgJl8CzNrM6XQFRsySFH7Mrn81zPPM4W0HzIPMh1PMpMyUzPNdzNsSzNE6ZQPM9cyGC04eDw7MrR9CaAnM73ZNzJYWOUzMr8zkzK_MxczAzKDMysz5zOt8LlN9zK3MynNizKDM0czXzNhBzNEdNhU8zNTMzTFIb8zmVszHzIfMiALMlkzM68zbzINAzNbMl8z8zMh_T8zHc8zJci7MtkDMl8ywcWIoe1zMuczCRFbMpcy3zJhvzK5PCz4EzMvMlszcAMyWzKIEAMzozPrMlh4JK8z3zMDM8XHMnWLMiMybAijMzczzzOtVaSPM-GbMvXrMksyyzK8_TQ7M9xzMw1PM-lvM68yrXHXM1y5AC0LM6iROcMzmzPzMlBHMtwrM50bMtlrMzX11zIvM4DM4zNIpDC1TEsybQkcuecynQMzCzK_M0sz-zOvMrhdNzJpuzKbM6j7Mjn7M6MzSYszrMzjM5szszIEJYWtfdVs7JVIvS0AQQcyBennMjMzgY8z6zORezOnM5CAZY8z3WWAKPcykT8yVzJDM4BjMz8zNzONtJglpzKRNQQHMxRPM7MymzPnMpy5RzNNIIsyfzKXM9hvMlUbMuczfzK8xzLTM08zlMcyczLLMlGzMrsyOAz7MvMzZzJjMwMyHHsygJMzczMnM1cybzLDMyszqDXrMmGZ3zJTM4nhqZhRhC8yTzMTMtHjM41UTIxvMtMyFzOZ-zIZyHszlzJM9WszhEUbM9VXMmxoNzKfMzUoKKUg9PjYOFMzgzO8jPQzM02TM8BcXzP7Mz8zUfkTMnVRJzIFCzK1ZH8yyzIrMvcy-zP5PzKJjFMygzKvM415LYszXQB_MnCfMzkJmbl_MnmDM42HM88zezOQibhHMuczczMfM2ixwzMvMpczDYRtWzLLM9cydIkjMrxDMj8yYzKkKzLtRzMM1zPbMyS5JYGnM08ylDmdUzL0ANMzgYSfM4jTMm8zvPE0dzL3Mw8zmzOxyzP3M93PM78z1NnoZVxfM7loNfMzxzPHM4j7MpB4NPD0rIczKzN4zKXnMww3M3czlADnMqczxeszuzMDMx8y_B8zyzPYTEjpZPD3M5cz6OALMxH3MgMznzPIXWkMZRzHMuHXMxMzrHEDM9szLaWYSWx4LdMz0zIjM5szMzN3M9syAzKUmWMzbf08QaEc9bMypzOA5bMzjzMQQzJNtVcyCzKJqNmLM0sy1NDMDRcyxzLrMyAzM8hPMv8y5zKoizOzMwEByGszRzIs4HcztNszTzP7M-syyzL_Ms8zFzK5EOk0mYwBGC8yTzO_MynPMrsyxzNMeHszvzI4uzKZ9FTvMzQM2zO0yzNNkzIrM58zfzP7My3rMnMz0zLt9zMgIW8z3zL0ucMy0zK5OZsypT8zOzJnMmMzgGA_M6xLMn8zXzOZ8UnogzL7MtcygzKfMtszUzLXMjUcwbAUQWyzMv8yfLczGcUrM68zdSFlVzJhFzP_M0m3MohzMsljMz8yQGMz-HnBgzJbM3syUzPo7zM3MjknMriHM7QF7YicvBkltX8zeLMyBG3vMq8zNzPPMzcymzIvM4czmEMzTEcyrzK7MtMzszJXMj8z3W8ywzI_MtjHM68yTzKNBEGjMt8y-dTxcbMzATnjMyMzqXHRHHMznzKrMqMy4zKgozLxZDGlZzN7Ml8yAzP99zMdzDwdYTsy2zKYczMcgzKlmasyQzPlDzLRJzMMVzOvM2k3MswbMpjnM53XM-sykzN8QY2YQzMPMjELMwAHMxEjMo8zfFw7MzQEBbcybzJ7M2czrzMTM9x3Mmsy1zILM42xqzIo-MQ_M5czXzLrMxcyozNhHzNsNzLFHzI1vzL3MoyTM3AlKFxzM0WZ0GjnMs8yXURjMm2xhQ8yONGfM28zVzKPMtMzlzIHM6VjM2BLMwA\")?)?;\n\n assert_eq!(col.collection_type()?, \"type\");\n\n verify_collection(&col, &meta, content)?;\n\n\n\n etebase.logout()\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 49, "score": 74506.07397443125 }, { "content": "#[derive(Serialize)]\n\nstruct ItemBatchBody<'a> {\n\n items: &'a Vec<&'a EncryptedItem>,\n\n deps: Option<Vec<ItemBatchBodyDep<'a>>>,\n\n}\n\n\n\npub struct ItemManagerOnline {\n\n api_base: Url,\n\n client: Arc<Client>,\n\n}\n\n\n\nimpl ItemManagerOnline {\n\n pub fn new(client: Arc<Client>, col: &EncryptedCollection) -> Self {\n\n Self {\n\n api_base: client\n\n .api_base\n\n .join(&format!(\"api/v1/collection/{}/item/\", col.uid()))\n\n .unwrap(),\n\n client,\n\n }\n\n }\n", "file_path": "src/online_managers.rs", "rank": 50, "score": 73371.87075067524 }, { "content": "pub fn gen_uid_base64() -> StringBase64 {\n\n to_base64(&randombytes(24)).unwrap()\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct CachedContent {\n\n version: u8,\n\n data: Vec<u8>,\n\n}\n\n\n\npub struct AccountCryptoManager(pub CryptoManager);\n\n\n\nimpl AccountCryptoManager {\n\n const COLTYPE_PAD_SIZE: usize = 32;\n\n\n\n pub fn new(key: &[u8; 32], version: u8) -> Result<Self> {\n\n let context = b\"Acct \";\n\n\n\n Ok(Self {\n\n 0: CryptoManager::new(key, &context, version)?,\n", "file_path": "src/encrypted_models.rs", "rank": 51, "score": 72896.41923895209 }, { "content": "#[derive(Serialize)]\n\nstruct ItemBatchBodyDep<'a> {\n\n uid: &'a str,\n\n #[serde(skip_serializing_if = \"std::option::Option::is_none\")]\n\n etag: Option<String>,\n\n}\n\n\n", "file_path": "src/online_managers.rs", "rank": 52, "score": 70699.7381914202 }, { "content": "/// Return the recommended padding length for a buffer of specific length\n\n///\n\n/// Padding data before encrypting it is important for preventing fingerprint analysis attacks.\n\n/// This function aims to return the optimal balance between space efficiently and fingerprint\n\n/// resistance. The returned values may change between versions.\n\n///\n\n/// # Arguments:\n\n/// * `length` - the length of the buffer to pad\n\npub fn get_padding(length: u32) -> u32 {\n\n // Use the padme padding scheme for efficiently\n\n // https://www.petsymposium.org/2019/files/papers/issue4/popets-2019-0056.pdf\n\n\n\n // We want a minimum pad size of 4k\n\n if length < (1 << 14) {\n\n let size = (1 << 10) - 1;\n\n // We add 1 so we always have some padding\n\n return (length | size) + 1;\n\n }\n\n\n\n let e = (length as f64).log2().floor();\n\n let s = (e.log2().floor() as u32) + 1;\n\n let last_bits = (e as u32) - s;\n\n let bit_mask = (1 << last_bits) - 1;\n\n\n\n (length + bit_mask) & !bit_mask\n\n}\n\n\n\n// FIXME: we should properly pad the meta and probably change these functions\n", "file_path": "src/utils.rs", "rank": 53, "score": 70358.08216729706 }, { "content": "fn user_reset(user: &TestUser) -> Result<()> {\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let acct_user = etebase::User::new(user.username, user.email);\n\n // sign-up the account if necessary, ignoring errors as we're about to reset it anyway\n\n let _ = Account::signup_key(\n\n client.clone(),\n\n &acct_user,\n\n b\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\",\n\n );\n\n let body_struct = etebase::test_helpers::SignupBody {\n\n user: &acct_user,\n\n salt: &from_base64(user.salt)?,\n\n pubkey: &from_base64(user.pubkey)?,\n\n login_pubkey: &from_base64(user.loginPubkey)?,\n\n encrypted_content: &from_base64(user.encryptedContent)?,\n\n };\n\n test_reset(&client, body_struct)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 54, "score": 69469.89955373919 }, { "content": "fn init_test(user: &TestUser) -> Result<Account> {\n\n etebase::init()?;\n\n user_reset(&user)?;\n\n\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let session_key = from_base64(sessionStorageKey)?;\n\n\n\n let mut ret = Account::restore(client, user.storedSession, Some(&session_key))?;\n\n ret.force_server_url(&test_url())?;\n\n ret.fetch_token()?;\n\n\n\n Ok(ret)\n\n}\n\n\n", "file_path": "tests/service.rs", "rank": 55, "score": 65414.57216721926 }, { "content": "fn init_test_local(user: &TestUser) -> Result<Account> {\n\n etebase::init()?;\n\n\n\n let client = Client::new(CLIENT_NAME, &test_url())?;\n\n let session_key = from_base64(sessionStorageKey)?;\n\n\n\n let ret = Account::restore(client, user.storedSession, Some(&session_key))?;\n\n\n\n Ok(ret)\n\n}\n\n\n", "file_path": "tests/fs_cache.rs", "rank": 56, "score": 61712.90481966741 }, { "content": "struct StorageCryptoManager(CryptoManager);\n\n\n\nimpl StorageCryptoManager {\n\n pub fn new(key: &[u8; 32], version: u8) -> Result<Self> {\n\n let context = b\"Stor \";\n\n\n\n Ok(Self {\n\n 0: CryptoManager::new(key, &context, version)?,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct AccountData<'a> {\n\n pub version: u8,\n\n #[serde(with = \"serde_bytes\")]\n\n pub key: &'a [u8],\n\n pub user: LoginResponseUser,\n\n pub server_url: &'a str,\n", "file_path": "src/service.rs", "rank": 57, "score": 46561.43546854775 }, { "content": "struct MainCryptoManager(CryptoManager);\n\n\n\nimpl MainCryptoManager {\n\n pub fn new(key: &[u8; 32], version: u8) -> Result<MainCryptoManager> {\n\n let context = b\"Main \";\n\n\n\n Ok(MainCryptoManager {\n\n 0: CryptoManager::new(key, &context, version)?,\n\n })\n\n }\n\n\n\n pub fn login_crypto_manager(&self) -> Result<LoginCryptoManager> {\n\n LoginCryptoManager::keygen(&self.0.asym_key_seed)\n\n }\n\n\n\n pub fn account_crypto_manager(&self, key: &[u8; 32]) -> Result<AccountCryptoManager> {\n\n AccountCryptoManager::new(key, self.0.version)\n\n }\n\n\n\n pub fn identity_crypto_manager(&self, privkey: &[u8; 32]) -> Result<BoxCryptoManager> {\n\n BoxCryptoManager::from_privkey(privkey)\n\n }\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 58, "score": 46561.43546854775 }, { "content": "#[test]\n\nfn padding() {\n\n etebase::init().unwrap();\n\n\n\n // Because of how we use padding (unpadding) we need to make sure padding is always larger than the content\n\n // Otherwise we risk the unpadder to fail thinking it should unpad when it shouldn't.\n\n\n\n for i in 1..(1 << 14) {\n\n if get_padding(i) <= i {\n\n println!(\"Yo\");\n\n assert_eq!(format!(\"Failed for {}\", i), \"\");\n\n }\n\n }\n\n\n\n assert_eq!(get_padding(2343242), 2359296);\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 59, "score": 42750.18358371115 }, { "content": "#[test]\n\nfn pretty_fingerprint() {\n\n etebase::init().unwrap();\n\n\n\n let pubkey = from_base64(USER.pubkey).unwrap();\n\n\n\n let fingerprint = crypto::pretty_fingerprint(&pubkey);\n\n assert_eq!(fingerprint, \"45680 71497 88570 93128\\n19189 84243 25687 20837\\n47924 46071 54113 18789\");\n\n}\n\n\n", "file_path": "tests/crypto.rs", "rank": 60, "score": 40901.53090662994 }, { "content": "#[test]\n\nfn derive_key() {\n\n etebase::init().unwrap();\n\n\n\n let derived = crypto::derive_key(&from_base64(USER.salt).unwrap(), USER.password).unwrap();\n\n let expected = from_base64(USER.key).unwrap();\n\n assert_eq!(&derived[..], &expected[..]);\n\n}\n\n\n", "file_path": "tests/crypto.rs", "rank": 61, "score": 40901.53090662994 }, { "content": "#[test]\n\nfn crypto_mac() {\n\n etebase::init().unwrap();\n\n\n\n let key = from_base64(USER.key).unwrap();\n\n\n\n let mut crypto_mac = crypto::CryptoMac::new(None).unwrap();\n\n crypto_mac.update(&[0; 4]).unwrap();\n\n crypto_mac.update_with_len_prefix(&[0; 8]).unwrap();\n\n assert_eq!(\n\n crypto_mac.finalize().unwrap(),\n\n from_base64(\"P-Hpzo86RG6Ps4R1gGXmQrzmdJC2OotqqreKmB8G45A\").unwrap()\n\n );\n\n\n\n let mut crypto_mac = crypto::CryptoMac::new(Some(&key)).unwrap();\n\n crypto_mac.update(&[0; 4]).unwrap();\n\n crypto_mac.update_with_len_prefix(&[0; 8]).unwrap();\n\n assert_eq!(\n\n crypto_mac.finalize().unwrap(),\n\n from_base64(\"rgL6d_XDiBfbzevFdtktc61XB5-PkS1uQ1cj5DgfFc8\").unwrap()\n\n );\n\n}\n\n\n", "file_path": "tests/crypto.rs", "rank": 62, "score": 40901.53090662994 }, { "content": "#[test]\n\nfn pad_unpad() {\n\n etebase::init().unwrap();\n\n\n\n let buf = [0; 1076];\n\n let padded = test_buffer_pad(&buf).unwrap();\n\n let unpadded = test_buffer_unpad(&padded[..]).unwrap();\n\n assert_eq!(unpadded, &buf[..]);\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 63, "score": 40901.53090662994 }, { "content": "#[test]\n\nfn crypto_manager() {\n\n etebase::init().unwrap();\n\n\n\n let key = from_base64(USER.key).unwrap();\n\n let context = b\"Col \";\n\n let crypto_manager = crypto::CryptoManager::new(\n\n &key[0..32].try_into().unwrap(),\n\n context,\n\n etebase::CURRENT_VERSION,\n\n )\n\n .unwrap();\n\n let subkey = crypto_manager.derive_subkey(&[0; 32]).unwrap();\n\n\n\n assert_eq!(\n\n subkey,\n\n from_base64(\"4w-VCSTETv26JjVlVlD2VaACcb6aQSD2JbF-e89xnaA\").unwrap()\n\n );\n\n\n\n let hash = crypto_manager.calculate_mac(&[0; 32]).unwrap();\n\n assert_eq!(\n", "file_path": "tests/crypto.rs", "rank": 64, "score": 40901.53090662994 }, { "content": "#[test]\n\nfn login_crypto_manager() {\n\n etebase::init().unwrap();\n\n\n\n let login_crypto_manager = crypto::LoginCryptoManager::keygen(&[0; 32]).unwrap();\n\n\n\n let msg = b\"This Is Some Test Cleartext.\";\n\n let signature = login_crypto_manager.sign_detached(msg).unwrap();\n\n let pubkey = login_crypto_manager.pubkey();\n\n assert!(login_crypto_manager\n\n .verify_detached(msg, &signature, (&pubkey[..]).try_into().unwrap())\n\n .unwrap());\n\n}\n\n\n", "file_path": "tests/crypto.rs", "rank": 65, "score": 39269.619839012295 }, { "content": "#[test]\n\nfn box_crypto_manager() {\n\n etebase::init().unwrap();\n\n\n\n let box_crypto_manager = crypto::BoxCryptoManager::keygen(None).unwrap();\n\n let box_crypto_manager2 = crypto::BoxCryptoManager::keygen(None).unwrap();\n\n\n\n let msg = b\"This Is Some Test Cleartext.\";\n\n let cipher = box_crypto_manager\n\n .encrypt(msg, (&box_crypto_manager2.pubkey()[..]).try_into().unwrap())\n\n .unwrap();\n\n let decrypted = box_crypto_manager2\n\n .decrypt(\n\n &cipher[..],\n\n (&box_crypto_manager.pubkey()[..]).try_into().unwrap(),\n\n )\n\n .unwrap();\n\n assert_eq!(decrypted, msg);\n\n}\n\n\n", "file_path": "tests/crypto.rs", "rank": 66, "score": 39269.619839012295 }, { "content": "#[test]\n\nfn pad_unpad_fixed() {\n\n etebase::init().unwrap();\n\n\n\n let blocksize = 32;\n\n for i in 0..(blocksize * 2) {\n\n let buf = vec![60; i];\n\n let padded = test_buffer_pad_fixed(&buf, blocksize).unwrap();\n\n let unpadded = test_buffer_unpad_fixed(&padded[..], blocksize).unwrap();\n\n assert_eq!(unpadded, &buf[..]);\n\n }\n\n}\n", "file_path": "tests/utils.rs", "rank": 67, "score": 39269.619839012295 }, { "content": "fn print_item(item: &Item) {\n\n println!(\"UID: {}\", &item.uid());\n\n println!(\"Meta: {:?}\", &item.meta().unwrap());\n\n println!();\n\n}\n\n\n", "file_path": "examples/etebase_test.rs", "rank": 68, "score": 34099.27685259732 }, { "content": "fn print_collection(collection: &Collection) {\n\n println!(\"UID: {}\", &collection.uid());\n\n println!(\"Meta: {:?}\", &collection.meta().unwrap());\n\n println!();\n\n}\n\n\n", "file_path": "examples/etebase_test.rs", "rank": 69, "score": 34099.27685259732 }, { "content": "// SPDX-FileCopyrightText: © 2020 Etebase Authors\n\n// SPDX-License-Identifier: LGPL-2.1-only\n\n\n\nuse std::error;\n\nuse std::fmt;\n\n\n\n/// A short-hand version of a [std::result::Result] that always returns an Etebase [Error].\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// The error type returned from the Etebase API\n\n#[derive(Debug, Clone)]\n\npub enum Error {\n\n /// A generic error\n\n Generic(String),\n\n /// An error with parsing the a URL (e.g. from the server URL)\n\n UrlParse(String),\n\n /// An error related to msgpack serialization and de-serialization\n\n MsgPack(String),\n\n /// A programming error that indicates the developers are using the API wrong\n\n ProgrammingError(&'static str),\n", "file_path": "src/error.rs", "rank": 70, "score": 32698.48264797126 }, { "content": " }\n\n}\n\n\n\nimpl From<String> for Error {\n\n fn from(err: String) -> Error {\n\n Error::Generic(err)\n\n }\n\n}\n\n\n\nimpl From<url::ParseError> for Error {\n\n fn from(err: url::ParseError) -> Error {\n\n Error::UrlParse(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<std::ffi::NulError> for Error {\n\n fn from(err: std::ffi::NulError) -> Error {\n\n Error::Generic(err.to_string())\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 71, "score": 32697.110363503303 }, { "content": "\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Error {\n\n Error::UrlParse(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<rmp_serde::encode::Error> for Error {\n\n fn from(err: rmp_serde::encode::Error) -> Error {\n\n Error::MsgPack(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<rmp_serde::decode::Error> for Error {\n\n fn from(err: rmp_serde::decode::Error) -> Error {\n\n Error::MsgPack(err.to_string())\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 72, "score": 32695.03787342224 }, { "content": " Error::Unauthorized(s) => s.fmt(f),\n\n Error::Conflict(s) => s.fmt(f),\n\n\n\n Error::Connection(s) => s.fmt(f),\n\n Error::TemporaryServerError(s) => s.fmt(f),\n\n Error::ServerError(s) => s.fmt(f),\n\n Error::Http(s) => s.fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Error> for String {\n\n fn from(err: Error) -> String {\n\n err.to_string()\n\n }\n\n}\n\n\n\nimpl error::Error for Error {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n None\n", "file_path": "src/error.rs", "rank": 73, "score": 32693.836661848658 }, { "content": " TemporaryServerError(String),\n\n /// There was a server error when processing the request (usually a bug in the server)\n\n ServerError(String),\n\n /// A generic error with the server request\n\n Http(String),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::Generic(s) => s.fmt(f),\n\n Error::UrlParse(s) => s.fmt(f),\n\n Error::MsgPack(s) => s.fmt(f),\n\n Error::ProgrammingError(s) => s.fmt(f),\n\n Error::MissingContent(s) => s.fmt(f),\n\n Error::Padding(s) => s.fmt(f),\n\n Error::Base64(s) => s.fmt(f),\n\n Error::Encryption(s) => s.fmt(f),\n\n Error::PermissionDenied(s) => s.fmt(f),\n\n Error::NotFound(s) => s.fmt(f),\n", "file_path": "src/error.rs", "rank": 74, "score": 32689.927211697766 }, { "content": " /// An attempt to fetch the content of an item that doesn't have the content yet\n\n MissingContent(&'static str),\n\n /// An issue with the padding of the encrypted content\n\n Padding(&'static str),\n\n /// An issue with the Base64 decoding\n\n Base64(&'static str),\n\n /// An issue with the encryption\n\n Encryption(&'static str),\n\n /// An authorization issue from the server\n\n Unauthorized(String),\n\n /// A conflict issue returned from the server, e.g. if a transaction failed\n\n Conflict(String),\n\n /// The operation was not allowed due to permissions\n\n PermissionDenied(String),\n\n /// The requested resource was not found\n\n NotFound(String),\n\n\n\n /// There was an issue with the connection (e.g. DNS lookup)\n\n Connection(String),\n\n /// There was an temporary server error (e.g. maintenance, or gateway issues)\n", "file_path": "src/error.rs", "rank": 75, "score": 32684.842462875647 }, { "content": "\n\n let ret: LoginResponse = rmp_serde::from_read_ref(&res)?;\n\n\n\n Ok(ret)\n\n }\n\n\n\n pub fn logout(&self) -> Result<()> {\n\n let url = self.api_base.join(\"logout/\")?;\n\n let res = self.client.post(url.as_str(), vec![])?;\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn change_password(&self, response: &[u8], signature: &[u8]) -> Result<()> {\n\n let body_struct = LoginBody {\n\n response,\n\n signature,\n\n };\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n", "file_path": "src/online_managers.rs", "rank": 83, "score": 30.69590863125075 }, { "content": " let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n let res = res.bytes();\n\n\n\n let ret: LoginResponse = rmp_serde::from_read_ref(&res)?;\n\n\n\n Ok(ret)\n\n }\n\n\n\n pub fn login(&self, response: &[u8], signature: &[u8]) -> Result<LoginResponse> {\n\n let body_struct = LoginBody {\n\n response,\n\n signature,\n\n };\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n\n\n\n let url = self.api_base.join(\"login/\")?;\n\n let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n let res = res.bytes();\n", "file_path": "src/online_managers.rs", "rank": 84, "score": 30.583609840142223 }, { "content": " let status = resp.status().as_u16();\n\n let ret = Response::new(resp.bytes()?.to_vec(), status);\n\n Ok(ret)\n\n }\n\n}\n\n\n\nimpl ClientImplementation for Client {\n\n fn get(&self, url: &str, auth_token: Option<&str>) -> Response {\n\n match self.get_inner(url, auth_token) {\n\n Ok(resp) => resp,\n\n Err(err) => Response::new_err(err),\n\n }\n\n }\n\n\n\n fn post(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response {\n\n match self.post_inner(url, auth_token, body) {\n\n Ok(resp) => resp,\n\n Err(err) => Response::new_err(err),\n\n }\n\n }\n", "file_path": "src/http_client/reqwest_client.rs", "rank": 85, "score": 29.076393294815784 }, { "content": " let body = rmp_serde::to_vec_named(&body_struct)?;\n\n\n\n let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn reject(&self, invitation: &SignedInvitation) -> Result<()> {\n\n let url = self\n\n .api_base\n\n .join(&format!(\"incoming/{}/\", invitation.uid()))?;\n\n\n\n let res = self.client.delete(url.as_str())?;\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn fetch_user_profile(&self, username: &str) -> Result<UserProfile> {\n", "file_path": "src/online_managers.rs", "rank": 86, "score": 28.200641053908722 }, { "content": " Ok(true)\n\n }\n\n\n\n pub fn get_login_challenge(&self, username: &str) -> Result<LoginChallange> {\n\n #[derive(Serialize)]\n\n struct Body<'a> {\n\n username: &'a str,\n\n }\n\n\n\n let body_struct = Body { username };\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n\n\n\n let url = self.api_base.join(\"login_challenge/\")?;\n\n let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n let res = res.bytes();\n\n\n\n let ret: LoginChallange = rmp_serde::from_read_ref(&res)?;\n\n\n\n Ok(ret)\n", "file_path": "src/online_managers.rs", "rank": 87, "score": 27.75268946409738 }, { "content": " /// Return the server url associated with this client\n\n pub fn server_url(&self) -> &Url {\n\n &self.api_base\n\n }\n\n\n\n pub(crate) fn get(&self, url: &str) -> Result<Response> {\n\n self.imp.get(url, self.auth_token.as_deref()).into_result()\n\n }\n\n\n\n pub(crate) fn post(&self, url: &str, body: Vec<u8>) -> Result<Response> {\n\n self.imp\n\n .post(url, self.auth_token.as_deref(), body)\n\n .into_result()\n\n }\n\n\n\n pub(crate) fn put(&self, url: &str, body: Vec<u8>) -> Result<Response> {\n\n self.imp\n\n .put(url, self.auth_token.as_deref(), body)\n\n .into_result()\n\n }\n", "file_path": "src/http_client/mod.rs", "rank": 88, "score": 27.72710096294723 }, { "content": " let res = self.client.post(url.as_str(), vec![])?;\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn modify_access_level(\n\n &self,\n\n username: &str,\n\n access_level: CollectionAccessLevel,\n\n ) -> Result<()> {\n\n let url = self.api_base.join(&format!(\"{}/\", username))?;\n\n\n\n #[derive(Serialize)]\n\n #[serde(rename_all = \"camelCase\")]\n\n struct Body {\n\n access_level: CollectionAccessLevel,\n\n }\n\n\n\n let body_struct = Body { access_level };\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n\n\n\n let res = self.client.patch(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/online_managers.rs", "rank": 89, "score": 26.910647017869177 }, { "content": " let req = self.prep_client(self.req_client.get(url), auth_token);\n\n let resp = req.send()?;\n\n let status = resp.status().as_u16();\n\n let ret = Response::new(resp.bytes()?.to_vec(), status);\n\n Ok(ret)\n\n }\n\n\n\n fn post_inner(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Result<Response> {\n\n let req = self\n\n .prep_client(self.req_client.post(url), auth_token)\n\n .body(body);\n\n let resp = req.send()?;\n\n let status = resp.status().as_u16();\n\n let ret = Response::new(resp.bytes()?.to_vec(), status);\n\n Ok(ret)\n\n }\n\n\n\n fn put_inner(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Result<Response> {\n\n let req = self\n\n .prep_client(self.req_client.put(url), auth_token)\n", "file_path": "src/http_client/reqwest_client.rs", "rank": 90, "score": 26.899589473168387 }, { "content": " .body(body);\n\n let resp = req.send()?;\n\n let status = resp.status().as_u16();\n\n let ret = Response::new(resp.bytes()?.to_vec(), status);\n\n Ok(ret)\n\n }\n\n\n\n fn patch_inner(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Result<Response> {\n\n let req = self\n\n .prep_client(self.req_client.patch(url), auth_token)\n\n .body(body);\n\n let resp = req.send()?;\n\n let status = resp.status().as_u16();\n\n let ret = Response::new(resp.bytes()?.to_vec(), status);\n\n Ok(ret)\n\n }\n\n\n\n fn delete_inner(&self, url: &str, auth_token: Option<&str>) -> Result<Response> {\n\n let req = self.prep_client(self.req_client.delete(url), auth_token);\n\n let resp = req.send()?;\n", "file_path": "src/http_client/reqwest_client.rs", "rank": 91, "score": 26.504021757327692 }, { "content": "\n\n fn put(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response {\n\n match self.put_inner(url, auth_token, body) {\n\n Ok(resp) => resp,\n\n Err(err) => Response::new_err(err),\n\n }\n\n }\n\n\n\n fn patch(&self, url: &str, auth_token: Option<&str>, body: Vec<u8>) -> Response {\n\n match self.patch_inner(url, auth_token, body) {\n\n Ok(resp) => resp,\n\n Err(err) => Response::new_err(err),\n\n }\n\n }\n\n\n\n fn delete(&self, url: &str, auth_token: Option<&str>) -> Response {\n\n match self.delete_inner(url, auth_token) {\n\n Ok(resp) => resp,\n\n Err(err) => Response::new_err(err),\n\n }\n\n }\n\n}\n", "file_path": "src/http_client/reqwest_client.rs", "rank": 92, "score": 25.367002161038926 }, { "content": "\n\n pub(crate) fn patch(&self, url: &str, body: Vec<u8>) -> Result<Response> {\n\n self.imp\n\n .patch(url, self.auth_token.as_deref(), body)\n\n .into_result()\n\n }\n\n\n\n pub(crate) fn delete(&self, url: &str) -> Result<Response> {\n\n self.imp\n\n .delete(url, self.auth_token.as_deref())\n\n .into_result()\n\n }\n\n}\n", "file_path": "src/http_client/mod.rs", "rank": 93, "score": 24.61915181284955 }, { "content": "\n\n let url = self.api_base.join(\"change_password/\")?;\n\n let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn fetch_dashboard_url(&self) -> Result<String> {\n\n #[derive(Deserialize)]\n\n struct Ret {\n\n pub url: String,\n\n }\n\n\n\n let url = self.api_base.join(\"dashboard_url/\")?;\n\n let res = self.client.post(url.as_str(), vec![])?;\n\n res.error_for_status()?;\n\n let res = res.bytes();\n\n\n\n let ret: Ret = rmp_serde::from_read_ref(&res)?;\n", "file_path": "src/online_managers.rs", "rank": 94, "score": 24.386466754972155 }, { "content": " let body_struct = Body { collection_types };\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n\n\n\n let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n let res = res.bytes();\n\n\n\n let serialized: CollectionListResponse<EncryptedCollection> =\n\n rmp_serde::from_read_ref(&res)?;\n\n serialized.data.iter().for_each(|x| x.mark_saved());\n\n\n\n Ok(serialized)\n\n }\n\n\n\n pub fn create(\n\n &self,\n\n collection: &EncryptedCollection,\n\n options: Option<&FetchOptions>,\n\n ) -> Result<()> {\n\n let url = apply_fetch_options(self.api_base.clone(), options);\n", "file_path": "src/online_managers.rs", "rank": 95, "score": 24.2003767112605 }, { "content": " ///\n\n /// The client object manages the connection to the Etebase server\n\n ///\n\n /// # Arguments:\n\n /// * `client_name` - a string identifier for the client\n\n /// * `server_url` - the Etebase server URL\n\n #[cfg(feature = \"networking\")]\n\n pub fn new(client_name: &str, server_url: &str) -> Result<Self> {\n\n let imp = ReqwestImpl::new(client_name)?;\n\n Ok(Self {\n\n api_base: Self::normalize_url(server_url)?,\n\n auth_token: None,\n\n imp: Arc::new(imp),\n\n })\n\n }\n\n\n\n #[cfg(not(feature = \"networking\"))]\n\n pub fn new_with_impl(server_url: &str, imp: Box<dyn ClientImplementation>) -> Result<Self> {\n\n Ok(Self {\n\n api_base: Self::normalize_url(server_url)?,\n", "file_path": "src/http_client/mod.rs", "rank": 96, "score": 24.02885789781851 }, { "content": " let body_struct = ItemBatchBody {\n\n items: &items,\n\n deps,\n\n };\n\n let body = rmp_serde::to_vec_named(&body_struct)?;\n\n\n\n let res = self.client.post(url.as_str(), body)?;\n\n res.error_for_status()?;\n\n\n\n for item in items {\n\n item.mark_saved();\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn transaction<'a, I, J>(\n\n &self,\n\n items: I,\n\n deps: J,\n", "file_path": "src/online_managers.rs", "rank": 97, "score": 23.507630687105387 }, { "content": " options: Option<&FetchOptions>,\n\n ) -> Result<Vec<u8>> {\n\n let url = apply_fetch_options(\n\n self.api_base\n\n .join(&format!(\"{}/chunk/{}/download/\", item_uid, chunk_uid))?,\n\n options,\n\n );\n\n let res = self.client.get(url.as_str())?;\n\n res.error_for_status()?;\n\n\n\n Ok(res.bytes().to_vec())\n\n }\n\n}\n\n\n\n/// A member of a collection\n\n///\n\n/// Obtained using [CollectionManager::list](crate::managers::CollectionManager::list)\n\n#[derive(Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct CollectionMember {\n", "file_path": "src/online_managers.rs", "rank": 98, "score": 22.387845661363055 }, { "content": " /// Unlike [Self::login], this uses a strong key instead of a password\n\n ///\n\n /// # Arguments:\n\n /// * `username` - the user's username. This is not the same as the user's email.\n\n /// * `client` - the already setup [Client] object\n\n /// * `main_key` - the key to signup with\n\n pub fn login_key(client: Client, username: &str, main_key: &[u8]) -> Result<Self> {\n\n super::init()?;\n\n\n\n if main_key.len() < SYMMETRIC_KEY_SIZE {\n\n return Err(Error::ProgrammingError(\n\n \"Key should be at least 32 bytes long.\",\n\n ));\n\n }\n\n\n\n let authenticator = Authenticator::new(&client);\n\n let login_challenge = match authenticator.get_login_challenge(username) {\n\n Err(Error::Unauthorized(s)) => {\n\n // FIXME: fragile, we should have a proper error value or actually use codes\n\n if s == \"User not properly init\" {\n", "file_path": "src/service.rs", "rank": 99, "score": 22.30806762221205 } ]
Rust
src/http.rs
mdheller/monolith
625c529cf1409848aa3ce42a74991d0d69c75d88
use regex::Regex; use reqwest::header::{CONTENT_TYPE, USER_AGENT}; use reqwest::{Client, RedirectPolicy}; use std::time::Duration; use url::{ParseError, Url}; use utils::data_to_dataurl; lazy_static! { static ref REGEX_URL: Regex = Regex::new(r"^https?://").unwrap(); } pub fn is_data_url(url: &str) -> Result<bool, String> { match Url::parse(url) { Ok(parsed_url) => Ok(parsed_url.scheme() == "data"), Err(err) => Err(format!("{}", err)), } } pub fn is_valid_url(path: &str) -> bool { REGEX_URL.is_match(path) } pub fn resolve_url(from: &str, to: &str) -> Result<String, ParseError> { let result = if is_valid_url(to) { to.to_string() } else { Url::parse(from)?.join(to)?.to_string() }; Ok(result) } pub fn retrieve_asset( url: &str, as_dataurl: bool, as_mime: &str, opt_user_agent: &str, ) -> Result<String, reqwest::Error> { if is_data_url(&url).unwrap() { Ok(url.to_string()) } else { let client = Client::builder() .redirect(RedirectPolicy::limited(3)) .timeout(Duration::from_secs(10)) .build() .unwrap(); let mut response = client .get(url) .header(USER_AGENT, opt_user_agent) .send() .unwrap(); if as_dataurl { let mut data: Vec<u8> = vec![]; response.copy_to(&mut data)?; let mimetype = if as_mime == "" { response .headers() .get(CONTENT_TYPE) .and_then(|header| header.to_str().ok()) .unwrap_or(&as_mime) } else { as_mime }; Ok(data_to_dataurl(&mimetype, &data)) } else { Ok(response.text().unwrap()) } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_is_url() { assert!(is_valid_url("https://www.rust-lang.org/")); assert!(is_valid_url("http://kernel.org")); assert!(!is_valid_url("./index.html")); assert!(!is_valid_url("some-local-page.htm")); assert!(!is_valid_url("ftp://1.2.3.4/www/index.html")); assert!(!is_valid_url( "data:text/html;base64,V2VsY29tZSBUbyBUaGUgUGFydHksIDxiPlBhbDwvYj4h" )); } #[test] fn test_resolve_url() -> Result<(), ParseError> { let resolved_url = resolve_url( "https://www.kernel.org", "../category/signatures.html", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/category/signatures.html" ); let resolved_url = resolve_url( "https://www.kernel.org", "category/signatures.html", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/category/signatures.html" ); let resolved_url = resolve_url( "saved_page.htm", "https://www.kernel.org/category/signatures.html", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/category/signatures.html" ); let resolved_url = resolve_url( "https://www.kernel.org", "//www.kernel.org/theme/images/logos/tux.png", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/theme/images/logos/tux.png" ); let resolved_url = resolve_url( "https://www.kernel.org", "//another-host.org/theme/images/logos/tux.png", )?; assert_eq!( resolved_url.as_str(), "https://another-host.org/theme/images/logos/tux.png" ); let resolved_url = resolve_url( "https://www.kernel.org/category/signatures.html", "/theme/images/logos/tux.png", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/theme/images/logos/tux.png" ); let resolved_url = resolve_url( "https://www.w3schools.com/html/html_iframe.asp", "default.asp", )?; assert_eq!( resolved_url.as_str(), "https://www.w3schools.com/html/default.asp" ); Ok(()) } #[test] fn test_is_data_url() { assert!( is_data_url("data:text/html;base64,V2VsY29tZSBUbyBUaGUgUGFydHksIDxiPlBhbDwvYj4h") .unwrap_or(false) ); assert!(!is_data_url("https://kernel.org").unwrap_or(false)); assert!(!is_data_url("//kernel.org").unwrap_or(false)); } }
use regex::Regex; use reqwest::header::{CONTENT_TYPE, USER_AGENT}; use reqwest::{Client, RedirectPolicy}; use std::time::Duration; use url::{ParseError, Url}; use utils::data_to_dataurl; lazy_static! { static ref REGEX_URL: Regex = Regex::new(r"^https?://").unwrap(); } pub fn is_data_url(url: &str) -> Result<bool, String> { match Url::parse(url) { Ok(parsed_url) => Ok(parsed_url.scheme() == "data"), Err(err) => Err(format!("{}", err)), } } pub fn is_valid_url(path: &str) -> bool { REGEX_URL.is_match(path) } pub fn resolve_url(from: &str, to: &str) -> Result<String, ParseError> { let result = if is_valid_url(to) { to.to_string() } else { Url::parse(from)?.join(to)?.to_string() }; Ok(result) } pub fn retrieve_asset( url: &str, as_dataurl: bool, as_mime: &str, opt_user_agent: &str, ) -> Result<String, reqwest::Error> { if is_data_url(&url).unwrap() { Ok(url.to_string()) } else { let client = Client::builder() .redirect(RedirectPolicy::limited(3)) .timeout(Duration::from_secs(10)) .build() .unwrap(); let mut response = client .get(url) .header(USER_AGENT, opt_user_agent) .send() .unwrap(); if as_dataurl { let mut data: Vec<u8> = vec![]; response.copy_to(&mut data)?; let mimetype = if as_mime == "" { response .headers() .get(CONTENT_TYPE) .and_then(|header| header.to_str().ok()) .unwrap_or(&as_mime) } else { as_mime }; Ok(data_to_dataurl(&mimetype, &data)) } else { Ok(response.text().unwrap()) } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_is_url() { assert!(is_valid_url("https://www.rust-lang.org/")); assert!(is_valid_url("http://kernel.org")); assert!(!is_valid_url("./index.html")); assert!(!is_valid_url("some-local-page.htm")); assert!(!is_valid_url("ftp://1.2.3.4/www/index.html")); assert!(!is_valid_url( "data:text/html;base64,V2VsY29tZSBUbyBUaGUgUGFydHksIDxiPlBhbDwvYj4h" )); } #[test] fn test_resolve_url() -> Result<(), ParseError> { let resolved_url = resolve_url( "https://www.kernel.org", "../category/signatures.html", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/category/signatures.html" ); let resolved_url = resolve_url( "https://www.kernel.org", "category/signatures.html", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/category/signatures.html" ); let resolved_url = resolve_url( "saved_page.htm", "https://www.kernel.org/category/signatures.html", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/category/signatures.html" );
assert_eq!( resolved_url.as_str(), "https://www.kernel.org/theme/images/logos/tux.png" ); let resolved_url = resolve_url( "https://www.kernel.org", "//another-host.org/theme/images/logos/tux.png", )?; assert_eq!( resolved_url.as_str(), "https://another-host.org/theme/images/logos/tux.png" ); let resolved_url = resolve_url( "https://www.kernel.org/category/signatures.html", "/theme/images/logos/tux.png", )?; assert_eq!( resolved_url.as_str(), "https://www.kernel.org/theme/images/logos/tux.png" ); let resolved_url = resolve_url( "https://www.w3schools.com/html/html_iframe.asp", "default.asp", )?; assert_eq!( resolved_url.as_str(), "https://www.w3schools.com/html/default.asp" ); Ok(()) } #[test] fn test_is_data_url() { assert!( is_data_url("data:text/html;base64,V2VsY29tZSBUbyBUaGUgUGFydHksIDxiPlBhbDwvYj4h") .unwrap_or(false) ); assert!(!is_data_url("https://kernel.org").unwrap_or(false)); assert!(!is_data_url("//kernel.org").unwrap_or(false)); } }
let resolved_url = resolve_url( "https://www.kernel.org", "//www.kernel.org/theme/images/logos/tux.png", )?;
assignment_statement
[ { "content": "pub fn data_to_dataurl(mime: &str, data: &[u8]) -> String {\n\n let mimetype = if mime == \"\" {\n\n detect_mimetype(data)\n\n } else {\n\n mime.to_string()\n\n };\n\n format!(\"data:{};base64,{}\", mimetype, encode(data))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 2, "score": 117320.25826834035 }, { "content": "fn detect_mimetype(data: &[u8]) -> String {\n\n let mut re = String::new();\n\n\n\n for item in MAGIC.iter() {\n\n if data.starts_with(item[0]) {\n\n re = String::from_utf8(item[1].to_vec()).unwrap();\n\n break;\n\n }\n\n }\n\n\n\n re\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_data_to_dataurl() {\n\n let mime = \"application/javascript\";\n", "file_path": "src/utils.rs", "rank": 4, "score": 94466.96544421851 }, { "content": "pub fn html_to_dom(data: &str) -> html5ever::rcdom::RcDom {\n\n parse_document(RcDom::default(), Default::default())\n\n .from_utf8()\n\n .read_from(&mut data.as_bytes())\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/html.rs", "rank": 5, "score": 84954.00394761766 }, { "content": "fn is_icon(attr_value: &str) -> bool {\n\n attr_value == \"icon\"\n\n || attr_value == \"shortcut icon\"\n\n || attr_value == \"mask-icon\"\n\n || attr_value == \"apple-touch-icon\"\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_is_icon() {\n\n assert_eq!(is_icon(\"icon\"), true);\n\n assert_eq!(is_icon(\"stylesheet\"), false);\n\n }\n\n}\n", "file_path": "src/html.rs", "rank": 6, "score": 76326.94195056424 }, { "content": "pub fn walk_and_embed_assets(\n\n url: &str,\n\n node: &Handle,\n\n opt_no_js: bool,\n\n opt_no_images: bool,\n\n opt_user_agent: &str,\n\n) {\n\n match node.data {\n\n NodeData::Document => {\n\n // Dig deeper\n\n for child in node.children.borrow().iter() {\n\n walk_and_embed_assets(\n\n &url, child,\n\n opt_no_js,\n\n opt_no_images,\n\n opt_user_agent,\n\n );\n\n }\n\n }\n\n\n", "file_path": "src/html.rs", "rank": 8, "score": 49817.74889738714 }, { "content": "pub fn print_dom(handle: &Handle) {\n\n serialize(&mut io::stdout(), handle, SerializeOpts::default()).unwrap();\n\n}\n\n\n", "file_path": "src/html.rs", "rank": 9, "score": 45968.12204553149 }, { "content": "fn main() {\n\n let command = App::new(\"monolith\")\n\n .version(crate_version!())\n\n .author(crate_authors!(\"\\n\"))\n\n .about(crate_description!())\n\n .arg(\n\n Arg::with_name(\"url\")\n\n .required(true)\n\n .takes_value(true)\n\n .index(1)\n\n .help(\"URL to download\"),\n\n )\n\n .args_from_usage(\"-j, --no-js 'Excludes JavaScript'\")\n\n .args_from_usage(\"-i, --no-images 'Removes images'\")\n\n .args_from_usage(\"-u, --user-agent=[Iceweasel] 'Custom User-Agent string'\")\n\n .get_matches();\n\n\n\n // Process the command\n\n let arg_target = command.value_of(\"url\").unwrap();\n\n let opt_no_js = command.is_present(\"no-js\");\n", "file_path": "src/main.rs", "rank": 10, "score": 25896.37476531885 }, { "content": "enum NodeMatch {\n\n Icon,\n\n Image,\n\n StyleSheet,\n\n Anchor,\n\n Script,\n\n Form,\n\n IFrame,\n\n Other,\n\n}\n\n\n\nconst TRANSPARENT_PIXEL: &str = \"data:image/png;base64,\\\n\niVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=\";\n\n\n\nconst JS_DOM_EVENT_ATTRS: [&str; 21] = [\n\n // Input\n\n \"onfocus\",\n\n \"onblur\",\n\n \"onselect\",\n\n \"onchange\",\n", "file_path": "src/html.rs", "rank": 11, "score": 19497.251260816553 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\nextern crate html5ever;\n\nextern crate regex;\n\nextern crate reqwest;\n\nextern crate url;\n\n\n\npub mod html;\n\npub mod http;\n\npub mod utils;\n", "file_path": "src/lib.rs", "rank": 14, "score": 11.144748735820778 }, { "content": " attr.value.clear();\n\n attr.value.push_slice(href_full_url.unwrap().as_str());\n\n }\n\n }\n\n }\n\n NodeMatch::IFrame => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"src\" {\n\n let src_full_url = resolve_url(&url, &attr.value.to_string()).unwrap();\n\n let iframe_data = retrieve_asset(\n\n &src_full_url,\n\n false,\n\n \"text/html\",\n\n opt_user_agent,\n\n );\n\n let dom = html_to_dom(&iframe_data.unwrap());\n\n walk_and_embed_assets(\n\n &src_full_url,\n\n &dom.document,\n\n opt_no_js,\n", "file_path": "src/html.rs", "rank": 15, "score": 9.82606280621344 }, { "content": "\n\n match found {\n\n NodeMatch::Icon => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"href\" {\n\n let href_full_url = resolve_url(&url, &attr.value.to_string());\n\n let favicon_datauri = retrieve_asset(\n\n &href_full_url.unwrap(),\n\n true,\n\n \"\",\n\n opt_user_agent,\n\n );\n\n attr.value.clear();\n\n attr.value.push_slice(favicon_datauri.unwrap().as_str());\n\n }\n\n }\n\n }\n\n NodeMatch::Image => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"src\" {\n", "file_path": "src/html.rs", "rank": 16, "score": 9.299705652419327 }, { "content": " &src_full_url.unwrap(),\n\n true,\n\n \"application/javascript\",\n\n opt_user_agent,\n\n );\n\n attr.value.clear();\n\n attr.value.push_slice(js_datauri.unwrap().as_str());\n\n }\n\n }\n\n }\n\n }\n\n NodeMatch::Form => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"action\" {\n\n // Do not touch action props which are set to a URL\n\n if is_valid_url(&attr.value) {\n\n continue;\n\n }\n\n\n\n let href_full_url = resolve_url(&url, &attr.value.to_string());\n", "file_path": "src/html.rs", "rank": 17, "score": 8.574032713642858 }, { "content": " if opt_no_images {\n\n attr.value.clear();\n\n attr.value.push_slice(TRANSPARENT_PIXEL);\n\n } else {\n\n let src_full_url = resolve_url(&url, &attr.value.to_string());\n\n let img_datauri = retrieve_asset(\n\n &src_full_url.unwrap(),\n\n true,\n\n \"\",\n\n opt_user_agent,\n\n );\n\n attr.value.clear();\n\n attr.value.push_slice(img_datauri.unwrap().as_str());\n\n }\n\n }\n\n }\n\n }\n\n NodeMatch::Anchor => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"href\" {\n", "file_path": "src/html.rs", "rank": 20, "score": 8.216938145506797 }, { "content": " opt_no_images,\n\n opt_user_agent,\n\n );\n\n let mut buf: Vec<u8> = Vec::new();\n\n serialize(&mut buf, &dom.document, SerializeOpts::default()).unwrap();\n\n let iframe_datauri = data_to_dataurl(\"text/html\", &buf);\n\n attr.value.clear();\n\n attr.value.push_slice(iframe_datauri.as_str());\n\n }\n\n }\n\n }\n\n NodeMatch::Other => {}\n\n }\n\n\n\n if opt_no_js {\n\n // Get rid of JS event attributes\n\n for attr in attrs_mut.iter_mut() {\n\n if JS_DOM_EVENT_ATTRS.contains(&attr.name.local.to_lowercase().as_str()) {\n\n attr.value.clear();\n\n }\n", "file_path": "src/html.rs", "rank": 21, "score": 8.191712624100267 }, { "content": " // Don't touch email links or hrefs which begin with a hash sign\n\n if attr.value.starts_with('#') || attr.value.starts_with(\"mailto:\") {\n\n continue;\n\n }\n\n\n\n let href_full_url = resolve_url(&url, &attr.value.to_string());\n\n attr.value.clear();\n\n attr.value.push_slice(href_full_url.unwrap().as_str());\n\n }\n\n }\n\n }\n\n NodeMatch::StyleSheet => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"href\" {\n\n let href_full_url = resolve_url(&url, &attr.value.to_string());\n\n let css_datauri = retrieve_asset(\n\n &href_full_url.unwrap(),\n\n true,\n\n \"text/css\",\n\n opt_user_agent,\n", "file_path": "src/html.rs", "rank": 22, "score": 8.117074001211437 }, { "content": " );\n\n attr.value.clear();\n\n attr.value.push_slice(css_datauri.unwrap().as_str());\n\n }\n\n }\n\n }\n\n NodeMatch::Script => {\n\n if opt_no_js {\n\n // Get rid of src and inner content of SCRIPT tags\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"src\" {\n\n attr.value.clear();\n\n }\n\n }\n\n node.children.borrow_mut().clear();\n\n } else {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"src\" {\n\n let src_full_url = resolve_url(&url, &attr.value.to_string());\n\n let js_datauri = retrieve_asset(\n", "file_path": "src/html.rs", "rank": 23, "score": 7.794746785675814 }, { "content": " NodeData::Doctype { .. } => {}\n\n\n\n NodeData::Text { .. } => {}\n\n\n\n NodeData::Comment { .. } => {\n\n // Note: in case of opt_no_js being set to true, there's no need to worry about\n\n // getting rid of comments that may contain scripts, e.g. <!--[if IE]><script>...\n\n // since that's not part of W3C standard and therefore gets ignored\n\n // by browsers other than IE [5, 9]\n\n }\n\n\n\n NodeData::Element {\n\n ref name,\n\n ref attrs,\n\n ..\n\n } => {\n\n let attrs_mut = &mut attrs.borrow_mut();\n\n let mut found = NodeMatch::Other;\n\n\n\n match name.local.as_ref() {\n", "file_path": "src/html.rs", "rank": 24, "score": 7.758136805005591 }, { "content": "#[macro_use]\n\nextern crate clap;\n\nextern crate monolith;\n\n\n\nuse clap::{App, Arg};\n\nuse monolith::html::{html_to_dom, print_dom, walk_and_embed_assets};\n\nuse monolith::http::{is_valid_url, retrieve_asset};\n\n\n\nstatic DEFAULT_USER_AGENT: &str =\n\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:66.0) Gecko/20100101 Firefox/66.0\";\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 5.890883376889726 }, { "content": "use http::{is_valid_url, resolve_url, retrieve_asset};\n\nuse std::default::Default;\n\nuse std::io;\n\nuse utils::data_to_dataurl;\n\n\n\nuse html5ever::parse_document;\n\nuse html5ever::rcdom::{Handle, NodeData, RcDom};\n\nuse html5ever::serialize::{serialize, SerializeOpts};\n\nuse html5ever::tendril::TendrilSink;\n\n\n", "file_path": "src/html.rs", "rank": 27, "score": 5.796734695517056 }, { "content": " \"link\" => {\n\n for attr in attrs_mut.iter_mut() {\n\n if &attr.name.local == \"rel\" {\n\n if is_icon(&attr.value.to_string()) {\n\n found = NodeMatch::Icon;\n\n break;\n\n } else if attr.value.to_string() == \"stylesheet\" {\n\n found = NodeMatch::StyleSheet;\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n \"img\" => { found = NodeMatch::Image; }\n\n \"a\" => { found = NodeMatch::Anchor; }\n\n \"script\" => { found = NodeMatch::Script; }\n\n \"form\" => { found = NodeMatch::Form; }\n\n \"iframe\" => { found = NodeMatch::IFrame; }\n\n _ => {}\n\n }\n", "file_path": "src/html.rs", "rank": 28, "score": 5.77202462202498 }, { "content": " let data = \"var word = 'hello';\\nalert(word);\\n\";\n\n let datauri = data_to_dataurl(mime, data.as_bytes());\n\n assert_eq!(\n\n &datauri,\n\n \"data:application/javascript;base64,dmFyIHdvcmQgPSAnaGVsbG8nOwphbGVydCh3b3JkKTsK\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_detect_mimetype() {\n\n // Image\n\n assert_eq!(detect_mimetype(b\"GIF87a\"), \"image/gif\");\n\n assert_eq!(detect_mimetype(b\"GIF89a\"), \"image/gif\");\n\n assert_eq!(detect_mimetype(b\"\\xFF\\xD8\\xFF\"), \"image/jpeg\");\n\n assert_eq!(detect_mimetype(b\"\\x89PNG\\x0D\\x0A\\x1A\\x0A\"), \"image/png\");\n\n assert_eq!(detect_mimetype(b\"<?xml \"), \"image/svg+xml\");\n\n assert_eq!(detect_mimetype(b\"<svg \"), \"image/svg+xml\");\n\n assert_eq!(detect_mimetype(b\"RIFF....WEBPVP8 \"), \"image/webp\");\n\n assert_eq!(detect_mimetype(b\"\\x00\\x00\\x01\\x00\"), \"image/x-icon\");\n\n // Audio\n", "file_path": "src/utils.rs", "rank": 29, "score": 5.313723918935047 }, { "content": " let opt_no_images = command.is_present(\"no-images\");\n\n let opt_user_agent = command.value_of(\"user-agent\").unwrap_or(DEFAULT_USER_AGENT);\n\n\n\n if is_valid_url(arg_target) {\n\n let data = retrieve_asset(&arg_target, false, \"\", opt_user_agent).unwrap();\n\n let dom = html_to_dom(&data);\n\n\n\n walk_and_embed_assets(\n\n &arg_target,\n\n &dom.document,\n\n opt_no_js,\n\n opt_no_images,\n\n opt_user_agent,\n\n );\n\n\n\n print_dom(&dom.document);\n\n println!(); // Ensure newline at end of output\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 30, "score": 4.084997371713607 }, { "content": " }\n\n }\n\n\n\n // Dig deeper\n\n for child in node.children.borrow().iter() {\n\n walk_and_embed_assets(\n\n &url,\n\n child,\n\n opt_no_js,\n\n opt_no_images,\n\n opt_user_agent,\n\n );\n\n }\n\n }\n\n\n\n NodeData::ProcessingInstruction { .. } => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/html.rs", "rank": 33, "score": 2.764249087327243 }, { "content": " assert_eq!(detect_mimetype(b\"ID3\"), \"audio/mpeg\");\n\n assert_eq!(detect_mimetype(b\"\\xFF\\x0E\"), \"audio/mpeg\");\n\n assert_eq!(detect_mimetype(b\"\\xFF\\x0F\"), \"audio/mpeg\");\n\n assert_eq!(detect_mimetype(b\"OggS\"), \"audio/ogg\");\n\n assert_eq!(detect_mimetype(b\"RIFF....WAVEfmt \"), \"audio/wav\");\n\n assert_eq!(detect_mimetype(b\"fLaC\"), \"audio/x-flac\");\n\n // Video\n\n assert_eq!(detect_mimetype(b\"RIFF....AVI LIST\"), \"video/avi\");\n\n assert_eq!(detect_mimetype(b\"....ftyp\"), \"video/mp4\");\n\n assert_eq!(detect_mimetype(b\"\\x00\\x00\\x01\\x0B\"), \"video/mpeg\");\n\n assert_eq!(detect_mimetype(b\"....moov\"), \"video/quicktime\");\n\n assert_eq!(detect_mimetype(b\"\\x1A\\x45\\xDF\\xA3\"), \"video/webm\");\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 34, "score": 2.6147526071836937 }, { "content": "extern crate base64;\n\n\n\nuse self::base64::encode;\n\n\n\nstatic MAGIC: [[&[u8]; 2]; 19] = [\n\n // Image\n\n [b\"GIF87a\", b\"image/gif\"],\n\n [b\"GIF89a\", b\"image/gif\"],\n\n [b\"\\xFF\\xD8\\xFF\", b\"image/jpeg\"],\n\n [b\"\\x89PNG\\x0D\\x0A\\x1A\\x0A\", b\"image/png\"],\n\n [b\"<?xml \", b\"image/svg+xml\"],\n\n [b\"<svg \", b\"image/svg+xml\"],\n\n [b\"RIFF....WEBPVP8 \", b\"image/webp\"],\n\n [b\"\\x00\\x00\\x01\\x00\", b\"image/x-icon\"],\n\n // Audio\n\n [b\"ID3\", b\"audio/mpeg\"],\n\n [b\"\\xFF\\x0E\", b\"audio/mpeg\"],\n\n [b\"\\xFF\\x0F\", b\"audio/mpeg\"],\n\n [b\"OggS\", b\"audio/ogg\"],\n\n [b\"RIFF....WAVEfmt \", b\"audio/wav\"],\n\n [b\"fLaC\", b\"audio/x-flac\"],\n\n // Video\n\n [b\"RIFF....AVI LIST\", b\"video/avi\"],\n\n [b\"....ftyp\", b\"video/mp4\"],\n\n [b\"\\x00\\x00\\x01\\x0B\", b\"video/mpeg\"],\n\n [b\"....moov\", b\"video/quicktime\"],\n\n [b\"\\x1A\\x45\\xDF\\xA3\", b\"video/webm\"],\n\n];\n\n\n", "file_path": "src/utils.rs", "rank": 35, "score": 1.7035908438910878 }, { "content": "# monolith\n\n\n\nA data hoarder's dream come true: bundle any web page into a single HTML file. \n\nYou can finally replace that gazillion of open tabs with a gazillion of .html files stored somewhere on your precious little drive.\n\n\n\nUnlike conventional \"Save page as…\", `monolith` not only saves the target document,\n\nit embeds CSS, image, and JavaScript assets **all at once**, producing a single HTML5 document that is a joy to store and share.\n\n\n\nIf compared to saving websites with `wget -mpk`, `monolith` embeds all assets as data URLs and therefore displays the saved page exactly the same, being completely separated from the Internet.\n\n\n\n<!-- `This program works both on remote and local targets. -->\n\n\n\n### Installation\n\n $ git clone https://github.com/Y2Z/monolith.git\n\n $ cd monolith\n\n $ cargo install --path .\n\n\n\n### Usage\n\n $ monolith https://lyrics.github.io/db/p/portishead/dummy/roads/ > portishead-roads-lyrics.html\n\n\n\n### Options\n\n - `-i`: Remove images\n\n - `-j`: Exclude JavaScript\n\n - `-u`: Specify custom User-Agent\n\n\n\n### License\n\nThe Unlicense\n\n\n\n<!-- Microtext -->\n\n<sub>Keep in mind that `monolith` is not aware of your browser's session</sub>\n", "file_path": "README.md", "rank": 36, "score": 1.070330139623277 } ]
Rust
src/board/builder.rs
veeso/chess-engine-harmon
463474a593e10695281309293677f2d96a06ddd6
use super::{Board, Color, Piece, Square, BLACK, WHITE}; pub struct BoardBuilder { board: Board, } impl From<Board> for BoardBuilder { fn from(board: Board) -> Self { Self { board } } } impl Default for BoardBuilder { fn default() -> Self { let mut board = Board::empty(); board.white_castling_rights.disable_all(); board.black_castling_rights.disable_all(); Self { board } } } impl BoardBuilder { pub fn row(mut self, piece: Piece) -> Self { let mut pos = piece.get_pos(); while pos.get_col() > 0 { pos = pos.next_left() } for _ in 0..8 { *self.board.get_square(pos) = Square::from(piece.move_to(pos)); pos = pos.next_right(); } self } pub fn column(mut self, piece: Piece) -> Self { let mut pos = piece.get_pos(); while pos.get_row() > 0 { pos = pos.next_below() } for _ in 0..8 { *self.board.get_square(pos) = Square::from(piece.move_to(pos)); pos = pos.next_above(); } self } pub fn piece(mut self, piece: Piece) -> Self { let pos = piece.get_pos(); *self.board.get_square(pos) = Square::from(piece); self } pub fn enable_castling(mut self) -> Self { self.board.black_castling_rights.enable_all(); self.board.white_castling_rights.enable_all(); self } pub fn disable_castling(mut self) -> Self { self.board.black_castling_rights.disable_all(); self.board.white_castling_rights.disable_all(); self } pub fn enable_queenside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.enable_queenside(), BLACK => self.board.black_castling_rights.enable_queenside(), } self } pub fn disable_queenside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.disable_queenside(), BLACK => self.board.black_castling_rights.disable_queenside(), } self } pub fn enable_kingside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.enable_kingside(), BLACK => self.board.black_castling_rights.enable_kingside(), } self } pub fn disable_kingside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.disable_kingside(), BLACK => self.board.black_castling_rights.disable_kingside(), } self } pub fn player_moving(mut self, color: Color) -> Self { self.board.turn = color; self } pub fn build(self) -> Board { self.board } } #[cfg(test)] mod test { use super::*; use crate::position::*; use crate::{BLACK, WHITE}; use pretty_assertions::assert_eq; #[test] fn default() { let builder: BoardBuilder = BoardBuilder::default(); assert_eq!(builder.board.get_legal_moves(WHITE).is_empty(), true); assert_eq!(builder.board.get_legal_moves(BLACK).is_empty(), true); assert_eq!( builder.board.black_castling_rights.can_kingside_castle(), false ); assert_eq!( builder.board.black_castling_rights.can_queenside_castle(), false ); assert_eq!( builder.board.white_castling_rights.can_kingside_castle(), false ); assert_eq!( builder.board.white_castling_rights.can_queenside_castle(), false ); } #[test] fn from() { let builder: BoardBuilder = BoardBuilder::from(Board::default()); assert_eq!(builder.board.get_legal_moves(WHITE).len(), 20); assert_eq!(builder.board.get_legal_moves(BLACK).len(), 20); assert_eq!( builder.board.black_castling_rights.can_kingside_castle(), true ); assert_eq!( builder.board.black_castling_rights.can_queenside_castle(), true ); assert_eq!( builder.board.white_castling_rights.can_kingside_castle(), true ); assert_eq!( builder.board.white_castling_rights.can_queenside_castle(), true ); } #[test] fn row() { let board: Board = BoardBuilder::default().row(Piece::Queen(WHITE, A1)).build(); assert_eq!(board.get_piece(A1).unwrap(), Piece::Queen(WHITE, A1)); assert_eq!(board.get_piece(B1).unwrap(), Piece::Queen(WHITE, B1)); assert_eq!(board.get_piece(C1).unwrap(), Piece::Queen(WHITE, C1)); assert_eq!(board.get_piece(D1).unwrap(), Piece::Queen(WHITE, D1)); assert_eq!(board.get_piece(E1).unwrap(), Piece::Queen(WHITE, E1)); assert_eq!(board.get_piece(F1).unwrap(), Piece::Queen(WHITE, F1)); assert_eq!(board.get_piece(G1).unwrap(), Piece::Queen(WHITE, G1)); assert_eq!(board.get_piece(H1).unwrap(), Piece::Queen(WHITE, H1)); } #[test] fn col() { let board: Board = BoardBuilder::default() .column(Piece::Queen(WHITE, A1)) .build(); assert_eq!(board.get_piece(A1).unwrap(), Piece::Queen(WHITE, A1)); assert_eq!(board.get_piece(A2).unwrap(), Piece::Queen(WHITE, A2)); assert_eq!(board.get_piece(A3).unwrap(), Piece::Queen(WHITE, A3)); assert_eq!(board.get_piece(A4).unwrap(), Piece::Queen(WHITE, A4)); assert_eq!(board.get_piece(A5).unwrap(), Piece::Queen(WHITE, A5)); assert_eq!(board.get_piece(A6).unwrap(), Piece::Queen(WHITE, A6)); assert_eq!(board.get_piece(A7).unwrap(), Piece::Queen(WHITE, A7)); assert_eq!(board.get_piece(A8).unwrap(), Piece::Queen(WHITE, A8)); } #[test] fn piece() { let board: Board = BoardBuilder::default() .piece(Piece::Rook(WHITE, A1)) .piece(Piece::Rook(BLACK, H8)) .build(); assert_eq!(board.get_piece(A1).unwrap(), Piece::Rook(WHITE, A1)); assert_eq!(board.get_piece(H8).unwrap(), Piece::Rook(BLACK, H8)); } #[test] fn player_moving() { let board: Board = BoardBuilder::default().player_moving(BLACK).build(); assert_eq!(board.get_turn(), BLACK); } #[test] fn castling_rights() { let board: Board = BoardBuilder::default().enable_castling().build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), true); assert_eq!(board.black_castling_rights.can_queenside_castle(), true); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); let board: Board = BoardBuilder::default() .enable_kingside_castle(WHITE) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default() .enable_kingside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), true); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default() .enable_queenside_castle(WHITE) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); let board: Board = BoardBuilder::default() .enable_queenside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), true); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default().disable_castling().build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default() .enable_castling() .disable_queenside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), true); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); let board: Board = BoardBuilder::default() .enable_castling() .disable_kingside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), true); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); } }
use super::{Board, Color, Piece, Square, BLACK, WHITE}; pub struct BoardBuilder { board: Board, } impl From<Board> for BoardBuilder { fn from(board: Board) -> Self { Self { board } } } impl Default for BoardBuilder {
} impl BoardBuilder { pub fn row(mut self, piece: Piece) -> Self { let mut pos = piece.get_pos(); while pos.get_col() > 0 { pos = pos.next_left() } for _ in 0..8 { *self.board.get_square(pos) = Square::from(piece.move_to(pos)); pos = pos.next_right(); } self } pub fn column(mut self, piece: Piece) -> Self { let mut pos = piece.get_pos(); while pos.get_row() > 0 { pos = pos.next_below() } for _ in 0..8 { *self.board.get_square(pos) = Square::from(piece.move_to(pos)); pos = pos.next_above(); } self } pub fn piece(mut self, piece: Piece) -> Self { let pos = piece.get_pos(); *self.board.get_square(pos) = Square::from(piece); self } pub fn enable_castling(mut self) -> Self { self.board.black_castling_rights.enable_all(); self.board.white_castling_rights.enable_all(); self } pub fn disable_castling(mut self) -> Self { self.board.black_castling_rights.disable_all(); self.board.white_castling_rights.disable_all(); self } pub fn enable_queenside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.enable_queenside(), BLACK => self.board.black_castling_rights.enable_queenside(), } self } pub fn disable_queenside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.disable_queenside(), BLACK => self.board.black_castling_rights.disable_queenside(), } self } pub fn enable_kingside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.enable_kingside(), BLACK => self.board.black_castling_rights.enable_kingside(), } self } pub fn disable_kingside_castle(mut self, color: Color) -> Self { match color { WHITE => self.board.white_castling_rights.disable_kingside(), BLACK => self.board.black_castling_rights.disable_kingside(), } self } pub fn player_moving(mut self, color: Color) -> Self { self.board.turn = color; self } pub fn build(self) -> Board { self.board } } #[cfg(test)] mod test { use super::*; use crate::position::*; use crate::{BLACK, WHITE}; use pretty_assertions::assert_eq; #[test] fn default() { let builder: BoardBuilder = BoardBuilder::default(); assert_eq!(builder.board.get_legal_moves(WHITE).is_empty(), true); assert_eq!(builder.board.get_legal_moves(BLACK).is_empty(), true); assert_eq!( builder.board.black_castling_rights.can_kingside_castle(), false ); assert_eq!( builder.board.black_castling_rights.can_queenside_castle(), false ); assert_eq!( builder.board.white_castling_rights.can_kingside_castle(), false ); assert_eq!( builder.board.white_castling_rights.can_queenside_castle(), false ); } #[test] fn from() { let builder: BoardBuilder = BoardBuilder::from(Board::default()); assert_eq!(builder.board.get_legal_moves(WHITE).len(), 20); assert_eq!(builder.board.get_legal_moves(BLACK).len(), 20); assert_eq!( builder.board.black_castling_rights.can_kingside_castle(), true ); assert_eq!( builder.board.black_castling_rights.can_queenside_castle(), true ); assert_eq!( builder.board.white_castling_rights.can_kingside_castle(), true ); assert_eq!( builder.board.white_castling_rights.can_queenside_castle(), true ); } #[test] fn row() { let board: Board = BoardBuilder::default().row(Piece::Queen(WHITE, A1)).build(); assert_eq!(board.get_piece(A1).unwrap(), Piece::Queen(WHITE, A1)); assert_eq!(board.get_piece(B1).unwrap(), Piece::Queen(WHITE, B1)); assert_eq!(board.get_piece(C1).unwrap(), Piece::Queen(WHITE, C1)); assert_eq!(board.get_piece(D1).unwrap(), Piece::Queen(WHITE, D1)); assert_eq!(board.get_piece(E1).unwrap(), Piece::Queen(WHITE, E1)); assert_eq!(board.get_piece(F1).unwrap(), Piece::Queen(WHITE, F1)); assert_eq!(board.get_piece(G1).unwrap(), Piece::Queen(WHITE, G1)); assert_eq!(board.get_piece(H1).unwrap(), Piece::Queen(WHITE, H1)); } #[test] fn col() { let board: Board = BoardBuilder::default() .column(Piece::Queen(WHITE, A1)) .build(); assert_eq!(board.get_piece(A1).unwrap(), Piece::Queen(WHITE, A1)); assert_eq!(board.get_piece(A2).unwrap(), Piece::Queen(WHITE, A2)); assert_eq!(board.get_piece(A3).unwrap(), Piece::Queen(WHITE, A3)); assert_eq!(board.get_piece(A4).unwrap(), Piece::Queen(WHITE, A4)); assert_eq!(board.get_piece(A5).unwrap(), Piece::Queen(WHITE, A5)); assert_eq!(board.get_piece(A6).unwrap(), Piece::Queen(WHITE, A6)); assert_eq!(board.get_piece(A7).unwrap(), Piece::Queen(WHITE, A7)); assert_eq!(board.get_piece(A8).unwrap(), Piece::Queen(WHITE, A8)); } #[test] fn piece() { let board: Board = BoardBuilder::default() .piece(Piece::Rook(WHITE, A1)) .piece(Piece::Rook(BLACK, H8)) .build(); assert_eq!(board.get_piece(A1).unwrap(), Piece::Rook(WHITE, A1)); assert_eq!(board.get_piece(H8).unwrap(), Piece::Rook(BLACK, H8)); } #[test] fn player_moving() { let board: Board = BoardBuilder::default().player_moving(BLACK).build(); assert_eq!(board.get_turn(), BLACK); } #[test] fn castling_rights() { let board: Board = BoardBuilder::default().enable_castling().build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), true); assert_eq!(board.black_castling_rights.can_queenside_castle(), true); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); let board: Board = BoardBuilder::default() .enable_kingside_castle(WHITE) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default() .enable_kingside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), true); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default() .enable_queenside_castle(WHITE) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); let board: Board = BoardBuilder::default() .enable_queenside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), true); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default().disable_castling().build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), false); assert_eq!(board.white_castling_rights.can_queenside_castle(), false); let board: Board = BoardBuilder::default() .enable_castling() .disable_queenside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), true); assert_eq!(board.black_castling_rights.can_queenside_castle(), false); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); let board: Board = BoardBuilder::default() .enable_castling() .disable_kingside_castle(BLACK) .build(); assert_eq!(board.black_castling_rights.can_kingside_castle(), false); assert_eq!(board.black_castling_rights.can_queenside_castle(), true); assert_eq!(board.white_castling_rights.can_kingside_castle(), true); assert_eq!(board.white_castling_rights.can_queenside_castle(), true); } }
fn default() -> Self { let mut board = Board::empty(); board.white_castling_rights.disable_all(); board.black_castling_rights.disable_all(); Self { board } }
function_block-function_prefix_line
[ { "content": "/// ### was_illegal_move\n\n///\n\n/// Returns whether game result was an illegal move\n\npub fn was_illegal_move(res: &GameResult) -> bool {\n\n matches!(res, Err(GameError::IllegalMove(_)))\n\n}\n\n\n", "file_path": "src/game/result.rs", "rank": 0, "score": 43371.68225817752 }, { "content": "/// ### set_result_event\n\n///\n\n/// Set `GameEvent` to `GameResult`\n\npub fn set_result_event(res: GameResult, ev: GameEvent) -> GameResult {\n\n match res {\n\n Ok((state, mut event)) => {\n\n event.insert(ev);\n\n Ok((state, event))\n\n }\n\n Err(err) => Err(err),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n use alloc::string::ToString;\n\n\n\n #[test]\n\n fn fmt_game_error() {\n\n assert_eq!(\n\n GameError::CantPromote.to_string().as_str(),\n", "file_path": "src/game/result.rs", "rank": 1, "score": 35170.90637594846 }, { "content": "//! # Square\n\n//!\n\n//! This module exposes the type to define an individual square of the chess board\n\n\n\nuse super::Piece;\n\n\n\n/// ## Square\n\n///\n\n/// Essentially a container for a single piece on a board.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Square {\n\n piece: Option<Piece>,\n\n}\n\n\n\nimpl From<Piece> for Square {\n\n fn from(piece: Piece) -> Self {\n\n Self { piece: Some(piece) }\n\n }\n\n}\n\n\n", "file_path": "src/square.rs", "rank": 2, "score": 30591.698544494986 }, { "content": "impl Square {\n\n /// ### empty\n\n ///\n\n /// A constructor for an empty Square\n\n pub fn empty() -> Self {\n\n Square { piece: None }\n\n }\n\n\n\n /// ### is_empty\n\n ///\n\n /// Does this square contain a piece?\n\n #[inline]\n\n pub fn is_empty(&self) -> bool {\n\n self.piece == None\n\n }\n\n\n\n /// ### get_piece\n\n ///\n\n /// Get the piece contained in this square.\n\n #[inline]\n", "file_path": "src/square.rs", "rank": 3, "score": 30587.85574223603 }, { "content": " pub fn get_piece(&self) -> Option<Piece> {\n\n self.piece\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n use crate::position::C8;\n\n use crate::WHITE;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn square() {\n\n // empty\n\n let square: Square = Square::empty();\n\n assert_eq!(square.is_empty(), true);\n\n assert_eq!(square.get_piece(), None);\n\n // from\n\n let square: Square = Square::from(Piece::Queen(WHITE, C8));\n\n assert_eq!(square.is_empty(), false);\n\n assert_eq!(square.get_piece().unwrap().is_queen(), true);\n\n }\n\n}\n", "file_path": "src/square.rs", "rank": 4, "score": 30586.80877255293 }, { "content": " pub fn get_weighted_value(&self) -> f64 {\n\n let weights = match self {\n\n Self::King(c, _) => match c {\n\n Color::White => WHITE_KING_POSITION_WEIGHTS,\n\n Color::Black => BLACK_KING_POSITION_WEIGHTS,\n\n },\n\n Self::Queen(c, _) => match c {\n\n Color::White => WHITE_QUEEN_POSITION_WEIGHTS,\n\n Color::Black => BLACK_QUEEN_POSITION_WEIGHTS,\n\n },\n\n Self::Rook(c, _) => match c {\n\n Color::White => WHITE_ROOK_POSITION_WEIGHTS,\n\n Color::Black => BLACK_ROOK_POSITION_WEIGHTS,\n\n },\n\n Self::Bishop(c, _) => match c {\n\n Color::White => WHITE_BISHOP_POSITION_WEIGHTS,\n\n Color::Black => BLACK_BISHOP_POSITION_WEIGHTS,\n\n },\n\n Self::Knight(c, _) => match c {\n\n Color::White => WHITE_KNIGHT_POSITION_WEIGHTS,\n", "file_path": "src/piece.rs", "rank": 5, "score": 29232.04324664953 }, { "content": "//! # Piece\n\n//!\n\n//! Exposes the piece type and its related functions\n\n\n\nuse super::{Board, Color, Move, Position, BLACK, WHITE};\n\nuse alloc::vec::Vec;\n\n\n\n/// ## Piece\n\n///\n\n/// A piece on a board.\n\n///\n\n/// Every piece has both a color and a position.\n\n/// These, combined with the type of piece it is,\n\n/// determine things like\n\n/// 1. The validity of legal moves\n\n/// 2. The validity of legal attacks\n\n/// 3. Move generation\n\n/// 4. Material and positional value\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum Piece {\n", "file_path": "src/piece.rs", "rank": 6, "score": 29232.00597225396 }, { "content": " /// Given a piece and a color, create a copy of the given piece, with the provided color\n\n #[inline]\n\n pub fn with_color(&self, color: Color) -> Self {\n\n match *self {\n\n Self::King(_, pos) => Self::King(color, pos),\n\n Self::Queen(_, pos) => Self::Queen(color, pos),\n\n Self::Rook(_, pos) => Self::Rook(color, pos),\n\n Self::Bishop(_, pos) => Self::Bishop(color, pos),\n\n Self::Knight(_, pos) => Self::Knight(color, pos),\n\n Self::Pawn(_, pos) => Self::Pawn(color, pos),\n\n }\n\n }\n\n\n\n /// ### get_legal_moves\n\n ///\n\n /// Get the exhaustive list of legal moves for a given piece.\n\n ///\n\n /// This is used for move generation.\n\n #[inline]\n\n pub(crate) fn get_legal_moves(&self, board: &Board) -> Vec<Move> {\n", "file_path": "src/piece.rs", "rank": 7, "score": 29231.844448157575 }, { "content": " Color::Black => BLACK_KNIGHT_POSITION_WEIGHTS,\n\n },\n\n Self::Pawn(c, _) => match c {\n\n Color::White => WHITE_PAWN_POSITION_WEIGHTS,\n\n Color::Black => BLACK_PAWN_POSITION_WEIGHTS,\n\n },\n\n };\n\n weights[(7 - self.get_pos().get_row()) as usize][self.get_pos().get_col() as usize]\n\n + (self.get_material_value() * 10) as f64\n\n }\n\n\n\n /// ### is_king\n\n ///\n\n /// Is this piece a king?\n\n #[inline]\n\n pub fn is_king(&self) -> bool {\n\n matches!(self, Self::King(_, _))\n\n }\n\n\n\n /// ### is_queen\n", "file_path": "src/piece.rs", "rank": 8, "score": 29230.996077429772 }, { "content": " assert_eq!(\n\n Piece::Knight(WHITE, H1).with_color(BLACK),\n\n Piece::Knight(BLACK, H1)\n\n );\n\n assert_eq!(\n\n Piece::Bishop(WHITE, H1).with_color(BLACK),\n\n Piece::Bishop(BLACK, H1)\n\n );\n\n assert_eq!(\n\n Piece::Pawn(WHITE, H1).with_color(BLACK),\n\n Piece::Pawn(BLACK, H1)\n\n );\n\n assert_eq!(\n\n Piece::Queen(WHITE, H1).with_color(BLACK),\n\n Piece::Queen(BLACK, H1)\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_starting_position() {\n", "file_path": "src/piece.rs", "rank": 9, "score": 29230.230584776175 }, { "content": " }\n\n\n\n #[test]\n\n fn get_color() {\n\n assert_eq!(Piece::Bishop(WHITE, A1).get_color(), WHITE);\n\n assert_eq!(Piece::King(BLACK, A1).get_color(), BLACK);\n\n assert_eq!(Piece::Knight(WHITE, A1).get_color(), WHITE);\n\n assert_eq!(Piece::Queen(BLACK, A1).get_color(), BLACK);\n\n assert_eq!(Piece::Pawn(WHITE, A1).get_color(), WHITE);\n\n assert_eq!(Piece::Rook(BLACK, A1).get_color(), BLACK);\n\n }\n\n\n\n #[test]\n\n fn get_pos() {\n\n assert_eq!(Piece::Bishop(WHITE, D4).get_pos(), D4);\n\n assert_eq!(Piece::King(BLACK, D4).get_pos(), D4);\n\n assert_eq!(Piece::Knight(WHITE, D4).get_pos(), D4);\n\n assert_eq!(Piece::Queen(BLACK, D4).get_pos(), D4);\n\n assert_eq!(Piece::Pawn(WHITE, D4).get_pos(), D4);\n\n assert_eq!(Piece::Rook(BLACK, D4).get_pos(), D4);\n", "file_path": "src/piece.rs", "rank": 10, "score": 29229.45308145773 }, { "content": " // Can take pawn (white)\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(WHITE, E4))\n\n .piece(Piece::Pawn(BLACK, D5))\n\n .piece(Piece::Pawn(BLACK, F5))\n\n .build();\n\n assert_eq!(\n\n Piece::Pawn(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, D5),\n\n Move::Piece(E4, F5)\n\n ]\n\n );\n\n // Can take pawn (black)\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(BLACK, E5))\n\n .piece(Piece::Pawn(WHITE, D4))\n", "file_path": "src/piece.rs", "rank": 11, "score": 29229.35049064208 }, { "content": " assert_eq!(Piece::Rook(BLACK, H8).is_kingside_rook(), true);\n\n assert_eq!(Piece::Rook(BLACK, H1).is_kingside_rook(), false);\n\n assert_eq!(Piece::Rook(WHITE, H8).is_kingside_rook(), false);\n\n }\n\n\n\n #[test]\n\n fn move_to() {\n\n assert_eq!(Piece::Rook(WHITE, H1).move_to(H8), Piece::Rook(WHITE, H8));\n\n }\n\n\n\n #[test]\n\n fn with_color() {\n\n assert_eq!(\n\n Piece::Rook(WHITE, H1).with_color(BLACK),\n\n Piece::Rook(BLACK, H1)\n\n );\n\n assert_eq!(\n\n Piece::King(WHITE, H1).with_color(BLACK),\n\n Piece::King(BLACK, H1)\n\n );\n", "file_path": "src/piece.rs", "rank": 12, "score": 29229.1263746419 }, { "content": " Self::Queen(_, pos) => Self::is_legal_queen_move(pos, new_pos, board),\n\n Self::Rook(_, pos) => Self::is_legal_rook_move(pos, new_pos, board),\n\n Self::Bishop(_, pos) => Self::is_legal_bishop_move(pos, new_pos, board),\n\n Self::Knight(_, pos) => pos.is_knight_move(new_pos),\n\n }\n\n }\n\n\n\n /// ### is_legal_attack\n\n ///\n\n /// Verify that attacking a given square is a legal move.\n\n #[inline]\n\n pub(crate) fn is_legal_attack(&self, new_pos: Position, board: &Board) -> bool {\n\n if board.has_ally_piece(new_pos, self.get_color()) || new_pos.is_off_board() {\n\n return false;\n\n }\n\n\n\n match *self {\n\n Self::Pawn(ally_color, pos) => {\n\n Self::is_legal_pawn_attack(ally_color, pos, new_pos, board)\n\n }\n", "file_path": "src/piece.rs", "rank": 13, "score": 29229.092341221556 }, { "content": " .piece(Piece::Rook(BLACK, E8))\n\n .piece(Piece::King(WHITE, E1))\n\n .build();\n\n assert_eq!(\n\n Piece::Pawn(WHITE, E4).get_legal_moves(&board),\n\n vec![Move::Piece(E4, E5)]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_king_legal_moves_starting_position() {\n\n // King without free squares\n\n assert_eq!(\n\n Piece::King(WHITE, E1).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n assert_eq!(\n\n Piece::King(BLACK, E8).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n", "file_path": "src/piece.rs", "rank": 14, "score": 29228.812941504242 }, { "content": " }\n\n _ => board.is_legal_move(*x, color),\n\n })\n\n .collect::<Vec<Move>>()\n\n }\n\n\n\n /// ### is_legal_move\n\n ///\n\n /// Verify that moving to a new position is a legal move.\n\n #[inline]\n\n pub(crate) fn is_legal_move(&self, new_pos: Position, board: &Board) -> bool {\n\n if board.has_ally_piece(new_pos, self.get_color()) || new_pos.is_off_board() {\n\n return false;\n\n }\n\n\n\n match *self {\n\n Self::Pawn(ally_color, pos) => {\n\n Self::is_legal_pawn_move(ally_color, pos, new_pos, board)\n\n }\n\n Self::King(_, pos) => pos.is_adjacent_to(new_pos),\n", "file_path": "src/piece.rs", "rank": 15, "score": 29228.38723040898 }, { "content": " let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(WHITE, E4))\n\n .piece(Piece::Pawn(WHITE, E5))\n\n .build();\n\n assert_eq!(Piece::Pawn(WHITE, E4).is_legal_move(E5, &board), false);\n\n }\n\n\n\n #[test]\n\n fn fmt() {\n\n assert_eq!(Piece::Bishop(WHITE, A1).to_string(), \"♗\");\n\n assert_eq!(Piece::Bishop(BLACK, A1).to_string(), \"♝\");\n\n assert_eq!(Piece::King(WHITE, A1).to_string(), \"♔\");\n\n assert_eq!(Piece::King(BLACK, A1).to_string(), \"♚\");\n\n assert_eq!(Piece::Knight(WHITE, A1).to_string(), \"♘\");\n\n assert_eq!(Piece::Knight(BLACK, A1).to_string(), \"♞\");\n\n assert_eq!(Piece::Queen(WHITE, A1).to_string(), \"♕\");\n\n assert_eq!(Piece::Queen(BLACK, A1).to_string(), \"♛\");\n\n assert_eq!(Piece::Pawn(WHITE, A1).to_string(), \"♙\");\n\n assert_eq!(Piece::Pawn(BLACK, A1).to_string(), \"♟\");\n\n assert_eq!(Piece::Rook(WHITE, A1).to_string(), \"♖\");\n\n assert_eq!(Piece::Rook(BLACK, A1).to_string(), \"♜\");\n\n }\n\n}\n", "file_path": "src/piece.rs", "rank": 16, "score": 29228.31367121109 }, { "content": " let result: Vec<Move> = match *self {\n\n Self::Pawn(ally_color, pos) => Self::get_pawn_legal_moves(ally_color, pos, board),\n\n Self::King(ally_color, pos) => Self::get_king_legal_moves(ally_color, pos, board),\n\n Self::Queen(ally_color, pos) => Self::get_queen_legal_moves(ally_color, pos, board),\n\n Self::Rook(ally_color, pos) => Self::get_rook_legal_moves(ally_color, pos, board),\n\n Self::Bishop(ally_color, pos) => Self::get_bishop_legal_moves(ally_color, pos, board),\n\n Self::Knight(ally_color, pos) => Self::get_knight_legal_moves(ally_color, pos, board),\n\n };\n\n\n\n let color = self.get_color();\n\n // Filter illegal moves and off-boards from result\n\n result\n\n .into_iter()\n\n .filter(|x| match x {\n\n Move::Piece(from, to) => {\n\n if from.is_on_board() && to.is_on_board() {\n\n board.is_legal_move(*x, color)\n\n } else {\n\n false\n\n }\n", "file_path": "src/piece.rs", "rank": 17, "score": 29228.276685560228 }, { "content": " pub fn get_color(&self) -> Color {\n\n match self {\n\n Self::King(c, _)\n\n | Self::Queen(c, _)\n\n | Self::Rook(c, _)\n\n | Self::Bishop(c, _)\n\n | Self::Knight(c, _)\n\n | Self::Pawn(c, _) => *c,\n\n }\n\n }\n\n\n\n /// ### get_pos\n\n ///\n\n /// Get the position of a piece.\n\n #[inline]\n\n pub fn get_pos(&self) -> Position {\n\n match self {\n\n Self::King(_, p)\n\n | Self::Queen(_, p)\n\n | Self::Rook(_, p)\n", "file_path": "src/piece.rs", "rank": 18, "score": 29228.214004019792 }, { "content": " // Starting pawn (white)\n\n assert_eq!(\n\n Piece::Pawn(WHITE, E2).get_legal_moves(&Board::default()),\n\n vec![Move::Piece(E2, E4), Move::Piece(E2, E3)]\n\n );\n\n // Starting pawn (black)\n\n assert_eq!(\n\n Piece::Pawn(BLACK, F7).get_legal_moves(&Board::default()),\n\n vec![Move::Piece(F7, F5), Move::Piece(F7, F6)]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_last_position() {\n\n // Starting pawn (white)\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(WHITE, H8))\n\n .piece(Piece::Pawn(BLACK, A1))\n\n .build();\n", "file_path": "src/piece.rs", "rank": 19, "score": 29228.118624956565 }, { "content": " #[test]\n\n fn get_rook_legal_moves_blocked() {\n\n assert_eq!(\n\n Piece::Rook(WHITE, A1).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n assert_eq!(\n\n Piece::Rook(BLACK, H8).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_rook_legal_moves_piece_on_col() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Rook(WHITE, E4))\n\n .piece(Piece::Knight(BLACK, E6))\n\n .piece(Piece::Knight(WHITE, E1))\n\n .build();\n", "file_path": "src/piece.rs", "rank": 20, "score": 29227.992424167347 }, { "content": " .piece(Piece::King(WHITE, E1))\n\n .piece(Piece::Queen(BLACK, E2))\n\n .piece(Piece::Rook(BLACK, E8))\n\n .build();\n\n assert_eq!(Piece::King(WHITE, E1).get_legal_moves(&board), vec![]);\n\n }\n\n\n\n #[test]\n\n fn get_queen_legal_moves_free() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Queen(WHITE, E4))\n\n .build();\n\n assert_eq!(\n\n Piece::Queen(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, B1),\n\n Move::Piece(E4, H1),\n\n Move::Piece(E4, C2),\n\n Move::Piece(E4, G2),\n", "file_path": "src/piece.rs", "rank": 21, "score": 29227.92181914536 }, { "content": " .piece(Piece::Pawn(WHITE, F4))\n\n .build();\n\n assert_eq!(\n\n Piece::Pawn(BLACK, E5).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E5, E4),\n\n Move::Piece(E5, D4),\n\n Move::Piece(E5, F4)\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_opposite() {\n\n // Opposite pawn\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(WHITE, E4))\n\n .piece(Piece::Pawn(BLACK, E5))\n\n .build();\n", "file_path": "src/piece.rs", "rank": 22, "score": 29227.634315770316 }, { "content": " /// Change the position of this piece to a new position.\n\n ///\n\n /// For example, `Pawn(WHITE, E4).move_to(E5)` will result in\n\n /// `Pawn(WHITE, E5)`. This does not check for move legality,\n\n /// it merely creates a new piece with the same color and type, but\n\n /// with a new position.\n\n #[inline]\n\n pub fn move_to(&self, new_pos: Position) -> Self {\n\n match *self {\n\n Self::King(c, _) => Self::King(c, new_pos),\n\n Self::Queen(c, _) => Self::Queen(c, new_pos),\n\n Self::Rook(c, _) => Self::Rook(c, new_pos),\n\n Self::Bishop(c, _) => Self::Bishop(c, new_pos),\n\n Self::Knight(c, _) => Self::Knight(c, new_pos),\n\n Self::Pawn(c, _) => Self::Pawn(c, new_pos),\n\n }\n\n }\n\n\n\n /// ### with_color\n\n ///\n", "file_path": "src/piece.rs", "rank": 23, "score": 29227.631911746543 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n use crate::board::BoardBuilder;\n\n use crate::position::*;\n\n use crate::MoveResult;\n\n\n\n use alloc::string::ToString;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn get_name() {\n\n assert_eq!(Piece::Bishop(WHITE, A1).get_name(), \"bishop\");\n\n assert_eq!(Piece::King(WHITE, A1).get_name(), \"king\");\n\n assert_eq!(Piece::Knight(WHITE, A1).get_name(), \"knight\");\n\n assert_eq!(Piece::Queen(WHITE, A1).get_name(), \"queen\");\n\n assert_eq!(Piece::Pawn(WHITE, A1).get_name(), \"pawn\");\n\n assert_eq!(Piece::Rook(WHITE, A1).get_name(), \"rook\");\n", "file_path": "src/piece.rs", "rank": 24, "score": 29227.4979857439 }, { "content": " fn get_queen_legal_moves_piece_on_diagonal() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Queen(WHITE, D4))\n\n .piece(Piece::Bishop(BLACK, E3))\n\n .piece(Piece::Bishop(BLACK, E5))\n\n .piece(Piece::Bishop(BLACK, C3))\n\n .piece(Piece::Bishop(BLACK, C5))\n\n .build();\n\n assert_eq!(\n\n Piece::Queen(WHITE, D4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(D4, C3),\n\n Move::Piece(D4, E3),\n\n Move::Piece(D4, C5),\n\n Move::Piece(D4, E5),\n\n Move::Piece(D4, D1),\n\n Move::Piece(D4, D2),\n\n Move::Piece(D4, D3),\n\n Move::Piece(D4, D5),\n", "file_path": "src/piece.rs", "rank": 25, "score": 29227.417029317174 }, { "content": " if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(F7, F5)) {\n\n // Black pawn\n\n board = next_board;\n\n }\n\n assert_eq!(\n\n Piece::Pawn(WHITE, E5).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E5, F6), // En passant\n\n Move::Piece(E5, E6),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_discovered_check() {\n\n // Discovered check by rook\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(WHITE, E4))\n\n .piece(Piece::Pawn(BLACK, F5)) // Could capture, but discovered check by rook\n", "file_path": "src/piece.rs", "rank": 26, "score": 29227.27915709147 }, { "content": " Move::Piece(E4, F6),\n\n Move::Piece(E4, G3),\n\n Move::Piece(E4, F2),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_knight_legal_moves_discovered_check() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Knight(WHITE, E4))\n\n .piece(Piece::King(WHITE, B1))\n\n .piece(Piece::Bishop(BLACK, H7))\n\n .build();\n\n assert_eq!(Piece::Knight(WHITE, E4).get_legal_moves(&board), vec![]);\n\n }\n\n\n\n #[test]\n\n fn is_legal_move() {\n", "file_path": "src/piece.rs", "rank": 27, "score": 29227.230807400163 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_king_legal_moves_protected_rook_check() {\n\n // King, checked by a protected rook on the left\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::King(WHITE, E5))\n\n .piece(Piece::Rook(BLACK, D5))\n\n .piece(Piece::Pawn(BLACK, C6))\n\n .build();\n\n assert_eq!(\n\n Piece::King(WHITE, E5).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E5, E6),\n\n Move::Piece(E5, E4),\n\n Move::Piece(E5, F6),\n\n Move::Piece(E5, F4),\n", "file_path": "src/piece.rs", "rank": 28, "score": 29227.217716298248 }, { "content": " assert_eq!(Piece::Pawn(WHITE, H8).get_legal_moves(&board), vec![]);\n\n // Starting pawn (black)\n\n assert_eq!(Piece::Pawn(BLACK, A1).get_legal_moves(&board), vec![]);\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_already_moved() {\n\n // Not starting position pawn\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Pawn(WHITE, E3))\n\n .build();\n\n assert_eq!(\n\n Piece::Pawn(WHITE, E3).get_legal_moves(&board),\n\n vec![Move::Piece(E3, E4)]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_can_take() {\n", "file_path": "src/piece.rs", "rank": 29, "score": 29227.128987715627 }, { "content": " Move::Piece(E4, F2),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_knight_legal_moves_busy() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Knight(WHITE, E4))\n\n .piece(Piece::Knight(BLACK, C5))\n\n .piece(Piece::Knight(WHITE, D6))\n\n .build();\n\n assert_eq!(\n\n Piece::Knight(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, C5),\n\n Move::Piece(E4, C3),\n\n Move::Piece(E4, D2),\n\n Move::Piece(E4, G5),\n", "file_path": "src/piece.rs", "rank": 30, "score": 29227.103806244915 }, { "content": " /// ### get_name\n\n ///\n\n /// Get the name of the piece such as `\"pawn\"` or `\"king\"`.\n\n /// All names are lowercase.\n\n #[inline]\n\n pub fn get_name(&self) -> &'static str {\n\n match self {\n\n Self::King(_, _) => \"king\",\n\n Self::Queen(_, _) => \"queen\",\n\n Self::Rook(_, _) => \"rook\",\n\n Self::Bishop(_, _) => \"bishop\",\n\n Self::Knight(_, _) => \"knight\",\n\n Self::Pawn(_, _) => \"pawn\",\n\n }\n\n }\n\n\n\n /// ### get_color\n\n ///\n\n /// Get the color of a given piece.\n\n #[inline]\n", "file_path": "src/piece.rs", "rank": 31, "score": 29227.085811905043 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_rook_legal_moves_discovered_check() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Rook(WHITE, E4))\n\n .piece(Piece::King(WHITE, E1))\n\n .piece(Piece::Rook(BLACK, E7))\n\n .build();\n\n assert_eq!(\n\n Piece::Rook(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, E7),\n", "file_path": "src/piece.rs", "rank": 32, "score": 29227.039508404538 }, { "content": " Move::Piece(E4, B7),\n\n Move::Piece(E4, A8),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_bishop_legal_moves_discovered_check() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Bishop(WHITE, E4))\n\n .piece(Piece::Bishop(BLACK, G6))\n\n .piece(Piece::King(WHITE, C2))\n\n .build();\n\n assert_eq!(\n\n Piece::Bishop(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, D3),\n\n Move::Piece(E4, F5),\n\n Move::Piece(E4, G6),\n", "file_path": "src/piece.rs", "rank": 33, "score": 29227.039508404538 }, { "content": " assert_eq!(Piece::Pawn(WHITE, E4).get_legal_moves(&board), vec![]);\n\n assert_eq!(Piece::Pawn(BLACK, E5).get_legal_moves(&board), vec![]);\n\n }\n\n\n\n #[test]\n\n fn get_pawn_legal_moves_en_passant() {\n\n // En passant - edge case 😈\n\n let mut board: Board = Board::default();\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(E2, E4)) {\n\n // White pawn\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(H7, H5)) {\n\n // Black pawn\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(E4, E5)) {\n\n // White pawn\n\n board = next_board;\n\n }\n", "file_path": "src/piece.rs", "rank": 34, "score": 29226.868239306394 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_king_legal_moves_rook_on_left() {\n\n // King in the middle of the board, rook on the left\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::King(WHITE, E5))\n\n .piece(Piece::Rook(BLACK, D8))\n\n .build();\n\n assert_eq!(\n\n Piece::King(WHITE, E5).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E5, F5),\n\n Move::Piece(E5, E6),\n\n Move::Piece(E5, E4),\n\n Move::Piece(E5, F6),\n\n Move::Piece(E5, F4),\n", "file_path": "src/piece.rs", "rank": 35, "score": 29226.627418745506 }, { "content": "\n\n #[test]\n\n fn get_bishop_legal_moves_piece_on_diagonal() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Bishop(WHITE, E4))\n\n .piece(Piece::Bishop(BLACK, G6))\n\n .piece(Piece::Pawn(WHITE, C2))\n\n .build();\n\n assert_eq!(\n\n Piece::Bishop(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, H1),\n\n Move::Piece(E4, G2),\n\n Move::Piece(E4, D3),\n\n Move::Piece(E4, F3),\n\n Move::Piece(E4, D5),\n\n Move::Piece(E4, F5),\n\n Move::Piece(E4, C6),\n\n Move::Piece(E4, G6),\n", "file_path": "src/piece.rs", "rank": 36, "score": 29226.547829536576 }, { "content": "\n\n if let Some(en_passant) = board.get_en_passant() {\n\n if en_passant == up_left || en_passant == up_right {\n\n result.push(Move::Piece(pos, en_passant));\n\n }\n\n }\n\n\n\n if next_up.is_on_board()\n\n && pos.is_starting_pawn(ally_color)\n\n && board.has_no_piece(up)\n\n && board.has_no_piece(next_up)\n\n {\n\n result.push(Move::Piece(pos, next_up))\n\n }\n\n\n\n if up.is_on_board() && board.has_no_piece(up) {\n\n result.push(Move::Piece(pos, up))\n\n }\n\n\n\n // Check up_left NOTE: don't use else if, you can have both of them\n", "file_path": "src/piece.rs", "rank": 37, "score": 29226.46169677083 }, { "content": " }\n\n\n\n #[test]\n\n fn get_king_legal_moves_free_squares() {\n\n // King in the middle of the board, no threatened\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::King(WHITE, E5))\n\n .build();\n\n assert_eq!(\n\n Piece::King(WHITE, E5).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E5, D5),\n\n Move::Piece(E5, F5),\n\n Move::Piece(E5, E6),\n\n Move::Piece(E5, E4),\n\n Move::Piece(E5, D6),\n\n Move::Piece(E5, D4),\n\n Move::Piece(E5, F6),\n\n Move::Piece(E5, F4),\n", "file_path": "src/piece.rs", "rank": 38, "score": 29226.453374180204 }, { "content": " let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Rook(WHITE, E4))\n\n .piece(Piece::Queen(BLACK, G4))\n\n .piece(Piece::Queen(WHITE, B4))\n\n .build();\n\n assert_eq!(\n\n Piece::Rook(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, E1),\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, E7),\n\n Move::Piece(E4, E8),\n\n Move::Piece(E4, C4),\n\n Move::Piece(E4, D4),\n\n Move::Piece(E4, F4),\n\n Move::Piece(E4, G4),\n", "file_path": "src/piece.rs", "rank": 39, "score": 29226.370743604453 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_king_legal_moves_rook_on_left_can_take() {\n\n // King in the middle of the board, rook on the left, can take rook\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::King(WHITE, E5))\n\n .piece(Piece::Rook(BLACK, D5))\n\n .build();\n\n assert_eq!(\n\n Piece::King(WHITE, E5).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E5, D5),\n\n Move::Piece(E5, E6),\n\n Move::Piece(E5, E4),\n\n Move::Piece(E5, F6),\n\n Move::Piece(E5, F4),\n", "file_path": "src/piece.rs", "rank": 40, "score": 29226.318078367654 }, { "content": " Move::Piece(E4, E6),\n\n Move::Piece(E4, A4),\n\n Move::Piece(E4, B4),\n\n Move::Piece(E4, C4),\n\n Move::Piece(E4, D4),\n\n Move::Piece(E4, F4),\n\n Move::Piece(E4, G4),\n\n Move::Piece(E4, H4),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_queen_legal_moves_discovered_check() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Queen(WHITE, E4))\n\n .piece(Piece::King(WHITE, E1))\n\n .piece(Piece::Rook(BLACK, E8))\n\n .build();\n", "file_path": "src/piece.rs", "rank": 41, "score": 29226.180435247206 }, { "content": " ///\n\n /// Is this piece a queen?\n\n #[inline]\n\n pub fn is_queen(&self) -> bool {\n\n matches!(self, Self::Queen(_, _))\n\n }\n\n\n\n /// ### is_rook\n\n ///\n\n /// Is this piece a rook?\n\n #[inline]\n\n pub fn is_rook(&self) -> bool {\n\n matches!(self, Self::Rook(_, _))\n\n }\n\n\n\n /// ### is_bishop\n\n ///\n\n /// Is this piece a bishop?\n\n #[inline]\n\n pub fn is_bishop(&self) -> bool {\n", "file_path": "src/piece.rs", "rank": 42, "score": 29226.12574916975 }, { "content": " Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, E7),\n\n Move::Piece(E4, E8),\n\n Move::Piece(E4, C4),\n\n Move::Piece(E4, D4),\n\n Move::Piece(E4, F4),\n\n Move::Piece(E4, G4),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_queen_legal_moves_piece_on_column() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Queen(WHITE, E4))\n\n .piece(Piece::Rook(BLACK, E2))\n\n .piece(Piece::Rook(BLACK, E6))\n", "file_path": "src/piece.rs", "rank": 43, "score": 29226.096486267794 }, { "content": " matches!(self, Self::Bishop(_, _))\n\n }\n\n\n\n /// ### is_knight\n\n ///\n\n /// Is this piece a knight?\n\n #[inline]\n\n pub fn is_knight(&self) -> bool {\n\n matches!(self, Self::Knight(_, _))\n\n }\n\n\n\n /// ### is_pawn\n\n ///\n\n /// Is this piece a pawn?\n\n #[inline]\n\n pub fn is_pawn(&self) -> bool {\n\n matches!(self, Self::Pawn(_, _))\n\n }\n\n\n\n /// ### is_starting_pawn\n", "file_path": "src/piece.rs", "rank": 44, "score": 29225.78023227874 }, { "content": " ///\n\n /// Get all legal moves for provided queen\n\n fn get_queen_legal_moves(ally_color: Color, pos: Position, board: &Board) -> Vec<Move> {\n\n let mut result = Self::get_bishop_legal_moves(ally_color, pos, board);\n\n result.extend(Self::get_rook_legal_moves(ally_color, pos, board));\n\n result\n\n }\n\n\n\n /// ### get_rook_legal_moves\n\n ///\n\n /// Get all legal moves for provided rook\n\n fn get_rook_legal_moves(ally_color: Color, pos: Position, board: &Board) -> Vec<Move> {\n\n let mut result: Vec<Move> = Vec::new();\n\n for row in 0..8 {\n\n let new_pos = Position::new(row, pos.get_col());\n\n if new_pos != pos\n\n && !board.has_ally_piece(new_pos, ally_color)\n\n && new_pos.is_orthogonal_to(pos)\n\n {\n\n result.push(Move::Piece(pos, new_pos));\n", "file_path": "src/piece.rs", "rank": 45, "score": 29225.713754255867 }, { "content": " [1.0, 1.0, 2.0, 3.0, 3.0, 2.0, 1.0, 1.0],\n\n [0.5, 0.5, 1.0, 2.5, 2.5, 1.0, 0.5, 0.5],\n\n [0.0, 0.0, 0.0, 2.0, 2.0, 0.0, 0.0, 0.0],\n\n [0.5, -0.5, -1.0, 0.0, 0.0, -1.0, -0.5, 0.5],\n\n [0.5, 1.5, -1.0, -2.0, -2.0, 1.0, 1.5, 0.5],\n\n [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],\n\n];\n\n\n\nconst BLACK_PAWN_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],\n\n [0.5, 1.5, -1.0, -2.0, -2.0, 1.0, 1.5, 0.5],\n\n [0.5, -0.5, -1.0, 0.0, 0.0, -1.0, -0.5, 0.5],\n\n [0.0, 0.0, 0.0, 2.0, 2.0, 0.0, 0.0, 0.0],\n\n [0.5, 0.5, 1.0, 2.5, 2.5, 1.0, 0.5, 0.5],\n\n [1.0, 1.0, 2.0, 3.0, 3.0, 2.0, 1.0, 1.0],\n\n [5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0],\n\n [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],\n\n];\n\n\n\nimpl Piece {\n", "file_path": "src/piece.rs", "rank": 46, "score": 29225.66837081951 }, { "content": " pos: Position,\n\n new_pos: Position,\n\n board: &Board,\n\n ) -> bool {\n\n let up = pos.pawn_up(ally_color);\n\n let up_left = up.next_left();\n\n let up_right = up.next_right();\n\n\n\n (if let Some(en_passant) = board.get_en_passant() {\n\n (en_passant == up_left || en_passant == up_right) && (new_pos == en_passant)\n\n } else {\n\n false\n\n }) || (pos.is_starting_pawn(ally_color)\n\n && board.has_no_piece(new_pos)\n\n && board.has_no_piece(up)\n\n && new_pos == up.pawn_up(ally_color))\n\n || (board.has_enemy_piece(new_pos, ally_color) && new_pos == up_left)\n\n || (board.has_enemy_piece(new_pos, ally_color) && new_pos == up_right)\n\n || (board.has_no_piece(new_pos) && new_pos == up)\n\n }\n", "file_path": "src/piece.rs", "rank": 47, "score": 29225.53654029442 }, { "content": " assert_eq!(Piece::Knight(BLACK, C2).is_knight(), true);\n\n assert_eq!(Piece::Rook(WHITE, D4).is_knight(), false);\n\n }\n\n\n\n #[test]\n\n fn is_starting_pawn() {\n\n // White pawns\n\n assert_eq!(Piece::Pawn(WHITE, B2).is_starting_pawn(), true);\n\n assert_eq!(Piece::Pawn(WHITE, C2).is_starting_pawn(), true);\n\n assert_eq!(Piece::Pawn(BLACK, B2).is_starting_pawn(), false);\n\n assert_eq!(Piece::Pawn(BLACK, C2).is_starting_pawn(), false);\n\n // Black pawns\n\n assert_eq!(Piece::Pawn(BLACK, B7).is_starting_pawn(), true);\n\n assert_eq!(Piece::Pawn(BLACK, C7).is_starting_pawn(), true);\n\n assert_eq!(Piece::Pawn(WHITE, B7).is_starting_pawn(), false);\n\n assert_eq!(Piece::Pawn(WHITE, C7).is_starting_pawn(), false);\n\n // others\n\n assert_eq!(Piece::Queen(WHITE, C2).is_starting_pawn(), false);\n\n assert_eq!(Piece::Bishop(BLACK, C7).is_starting_pawn(), false);\n\n }\n", "file_path": "src/piece.rs", "rank": 48, "score": 29225.43455710727 }, { "content": " traveling.pop();\n\n\n\n for pos in traveling {\n\n if board.has_piece(pos) {\n\n return false;\n\n }\n\n }\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl core::fmt::Display for Piece {\n\n fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self.get_color() {\n", "file_path": "src/piece.rs", "rank": 49, "score": 29225.406064705665 }, { "content": " Move::Piece(E4, F4),\n\n Move::Piece(E4, G4),\n\n Move::Piece(E4, H4),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_queen_legal_moves_blocked() {\n\n assert_eq!(\n\n Piece::Queen(WHITE, D1).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n assert_eq!(\n\n Piece::Queen(BLACK, D8).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/piece.rs", "rank": 50, "score": 29225.334989757404 }, { "content": " Move::Piece(D4, D6),\n\n Move::Piece(D4, D7),\n\n Move::Piece(D4, D8),\n\n Move::Piece(D4, A4),\n\n Move::Piece(D4, B4),\n\n Move::Piece(D4, C4),\n\n Move::Piece(D4, E4),\n\n Move::Piece(D4, F4),\n\n Move::Piece(D4, G4),\n\n Move::Piece(D4, H4),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_queen_legal_moves_piece_on_line() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Queen(WHITE, E4))\n\n .piece(Piece::Rook(BLACK, C4))\n", "file_path": "src/piece.rs", "rank": 51, "score": 29225.27363302383 }, { "content": " if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(F1, C4)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(F8, C5)) {\n\n board = next_board;\n\n }\n\n assert_eq!(\n\n Piece::King(WHITE, E1).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E1, F1),\n\n Move::Piece(E1, E2),\n\n Move::KingSideCastle, // Castle\n\n ]\n\n );\n\n // Also black can castle now\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(C2, C3)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(G8, F6)) {\n\n board = next_board;\n", "file_path": "src/piece.rs", "rank": 52, "score": 29225.260216093637 }, { "content": " assert_eq!(Piece::Queen(WHITE, F6).get_weighted_value(), 90.5);\n\n assert_eq!(Piece::Queen(BLACK, F6).get_weighted_value(), 90.5);\n\n assert_eq!(Piece::Pawn(WHITE, H4).get_weighted_value(), 10.0);\n\n assert_eq!(Piece::Pawn(BLACK, H4).get_weighted_value(), 10.5);\n\n assert_eq!(Piece::Rook(WHITE, A2).get_weighted_value(), 49.5);\n\n assert_eq!(Piece::Rook(BLACK, A2).get_weighted_value(), 50.5);\n\n }\n\n\n\n #[test]\n\n fn is_king() {\n\n assert_eq!(Piece::King(WHITE, D4).is_king(), true);\n\n assert_eq!(Piece::Bishop(BLACK, C2).is_king(), false);\n\n }\n\n\n\n #[test]\n\n fn is_queen() {\n\n assert_eq!(Piece::Queen(WHITE, D4).is_queen(), true);\n\n assert_eq!(Piece::Bishop(BLACK, C2).is_queen(), false);\n\n }\n\n\n", "file_path": "src/piece.rs", "rank": 53, "score": 29225.1682628821 }, { "content": " Self::King(_, pos) => pos.is_adjacent_to(new_pos),\n\n Self::Queen(_, pos) => Self::is_legal_queen_attack(pos, new_pos, board),\n\n Self::Rook(_, pos) => Self::is_legal_rook_attack(pos, new_pos, board),\n\n Self::Bishop(_, pos) => Self::is_legal_bishop_attack(pos, new_pos, board),\n\n\n\n Self::Knight(_, pos) => pos.is_knight_move(new_pos),\n\n }\n\n }\n\n\n\n // -- private\n\n\n\n /// ### get_pawn_legal_moves\n\n ///\n\n /// Get all legal moves for provided pawn\n\n fn get_pawn_legal_moves(ally_color: Color, pos: Position, board: &Board) -> Vec<Move> {\n\n let mut result: Vec<Move> = Vec::new();\n\n let up = pos.pawn_up(ally_color);\n\n let next_up = up.pawn_up(ally_color);\n\n let up_left = up.next_left();\n\n let up_right = up.next_right();\n", "file_path": "src/piece.rs", "rank": 54, "score": 29225.003969997786 }, { "content": " assert_eq!(\n\n Piece::Queen(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, E7),\n\n Move::Piece(E4, E8),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_rook_legal_moves_free() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Rook(WHITE, E4))\n\n .build();\n\n assert_eq!(\n", "file_path": "src/piece.rs", "rank": 55, "score": 29225.003083389416 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_knight_legal_moves_free() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Knight(WHITE, E4))\n\n .build();\n\n assert_eq!(\n\n Piece::Knight(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, C5),\n\n Move::Piece(E4, D6),\n\n Move::Piece(E4, C3),\n\n Move::Piece(E4, D2),\n\n Move::Piece(E4, G5),\n\n Move::Piece(E4, F6),\n\n Move::Piece(E4, G3),\n", "file_path": "src/piece.rs", "rank": 56, "score": 29224.987534190772 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_bishop_legal_moves_free() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n\n .piece(Piece::Bishop(WHITE, E4))\n\n .build();\n\n assert_eq!(\n\n Piece::Bishop(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, B1),\n\n Move::Piece(E4, H1),\n\n Move::Piece(E4, C2),\n\n Move::Piece(E4, G2),\n\n Move::Piece(E4, D3),\n\n Move::Piece(E4, F3),\n\n Move::Piece(E4, D5),\n", "file_path": "src/piece.rs", "rank": 57, "score": 29224.987534190772 }, { "content": " }\n\n\n\n #[test]\n\n fn get_material_value() {\n\n assert_eq!(Piece::Bishop(WHITE, D4).get_material_value(), 3);\n\n assert_eq!(Piece::King(BLACK, D4).get_material_value(), 99999);\n\n assert_eq!(Piece::Knight(WHITE, D4).get_material_value(), 3);\n\n assert_eq!(Piece::Queen(BLACK, D4).get_material_value(), 9);\n\n assert_eq!(Piece::Pawn(WHITE, D4).get_material_value(), 1);\n\n assert_eq!(Piece::Rook(BLACK, D4).get_material_value(), 5);\n\n }\n\n\n\n #[test]\n\n fn get_weighted_value() {\n\n assert_eq!(Piece::Bishop(WHITE, C2).get_weighted_value(), 30.0);\n\n assert_eq!(Piece::Bishop(BLACK, C2).get_weighted_value(), 30.0);\n\n assert_eq!(Piece::King(WHITE, D4).get_weighted_value(), 999986.0);\n\n assert_eq!(Piece::King(BLACK, D4).get_weighted_value(), 999985.0);\n\n assert_eq!(Piece::Knight(WHITE, E5).get_weighted_value(), 32.0);\n\n assert_eq!(Piece::Knight(BLACK, E5).get_weighted_value(), 32.0);\n", "file_path": "src/piece.rs", "rank": 58, "score": 29224.97609444461 }, { "content": " Move::Piece(E4, F5),\n\n Move::Piece(E4, C6),\n\n Move::Piece(E4, G6),\n\n Move::Piece(E4, B7),\n\n Move::Piece(E4, H7),\n\n Move::Piece(E4, A8),\n\n ]\n\n );\n\n }\n\n #[test]\n\n fn get_bishop_legal_moves_blocked() {\n\n assert_eq!(\n\n Piece::Bishop(WHITE, C1).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n assert_eq!(\n\n Piece::Bishop(BLACK, F8).get_legal_moves(&Board::default()),\n\n vec![]\n\n );\n\n }\n", "file_path": "src/piece.rs", "rank": 59, "score": 29224.888958025982 }, { "content": " if up_left.is_on_board() && board.has_enemy_piece(up_left, ally_color) {\n\n result.push(Move::Piece(pos, up.next_left()))\n\n }\n\n // Check up_right\n\n if up_right.is_on_board() && board.has_enemy_piece(up.next_right(), ally_color) {\n\n result.push(Move::Piece(pos, up.next_right()))\n\n }\n\n result\n\n }\n\n\n\n /// ### get_king_legal_moves\n\n ///\n\n /// Get all legal moves for provided king\n\n fn get_king_legal_moves(ally_color: Color, pos: Position, board: &Board) -> Vec<Move> {\n\n let mut result: Vec<Move> = Vec::new();\n\n for p in &[\n\n pos.next_left(),\n\n pos.next_right(),\n\n pos.next_above(),\n\n pos.next_below(),\n", "file_path": "src/piece.rs", "rank": 60, "score": 29224.860142345944 }, { "content": " #[test]\n\n fn is_rook() {\n\n assert_eq!(Piece::Rook(WHITE, D4).is_rook(), true);\n\n assert_eq!(Piece::Bishop(BLACK, C2).is_rook(), false);\n\n }\n\n\n\n #[test]\n\n fn is_bishop() {\n\n assert_eq!(Piece::Bishop(BLACK, C2).is_bishop(), true);\n\n assert_eq!(Piece::Rook(WHITE, D4).is_bishop(), false);\n\n }\n\n\n\n #[test]\n\n fn is_pawn() {\n\n assert_eq!(Piece::Pawn(BLACK, C2).is_pawn(), true);\n\n assert_eq!(Piece::Rook(WHITE, D4).is_pawn(), false);\n\n }\n\n\n\n #[test]\n\n fn is_knight() {\n", "file_path": "src/piece.rs", "rank": 61, "score": 29224.85484116587 }, { "content": " WHITE => match self {\n\n Self::King(_, _) => \"♔\",\n\n Self::Queen(_, _) => \"♕\",\n\n Self::Rook(_, _) => \"♖\",\n\n Self::Knight(_, _) => \"♘\",\n\n Self::Bishop(_, _) => \"♗\",\n\n Self::Pawn(_, _) => \"♙\",\n\n },\n\n BLACK => match self {\n\n Self::King(_, _) => \"♚\",\n\n Self::Queen(_, _) => \"♛\",\n\n Self::Rook(_, _) => \"♜\",\n\n Self::Knight(_, _) => \"♞\",\n\n Self::Bishop(_, _) => \"♝\",\n\n Self::Pawn(_, _) => \"♟\",\n\n },\n\n }\n\n )\n\n }\n\n}\n", "file_path": "src/piece.rs", "rank": 62, "score": 29224.808025467522 }, { "content": " pub fn is_promoting_pawn(&self) -> bool {\n\n if let Self::Pawn(c, pos) = self {\n\n pos.is_promoting_pawn(*c)\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// ### is_queenside_rook\n\n ///\n\n /// Is this piece in the starting position for the queenside rook?\n\n ///\n\n /// This method will only return true for rooks that are in the position\n\n /// of the queenside rook, not for any particular rook.\n\n #[inline]\n\n pub fn is_queenside_rook(&self) -> bool {\n\n if let Self::Rook(_, pos) = self {\n\n pos.is_queenside_rook(self.get_color())\n\n } else {\n\n false\n", "file_path": "src/piece.rs", "rank": 63, "score": 29224.80426719736 }, { "content": "\n\n #[test]\n\n fn is_promoting_pawn() {\n\n assert_eq!(Piece::Pawn(WHITE, G8).is_promoting_pawn(), true);\n\n assert_eq!(Piece::Pawn(WHITE, A1).is_promoting_pawn(), false);\n\n assert_eq!(Piece::Pawn(BLACK, A1).is_promoting_pawn(), true);\n\n assert_eq!(Piece::Pawn(BLACK, G8).is_promoting_pawn(), false);\n\n }\n\n\n\n #[test]\n\n fn is_queenside_rook() {\n\n assert_eq!(Piece::Rook(WHITE, A1).is_queenside_rook(), true);\n\n assert_eq!(Piece::Rook(BLACK, A8).is_queenside_rook(), true);\n\n assert_eq!(Piece::Rook(BLACK, A1).is_queenside_rook(), false);\n\n assert_eq!(Piece::Rook(WHITE, A8).is_queenside_rook(), false);\n\n }\n\n\n\n #[test]\n\n fn is_kingside_rook() {\n\n assert_eq!(Piece::Rook(WHITE, H1).is_kingside_rook(), true);\n", "file_path": "src/piece.rs", "rank": 64, "score": 29224.76164521447 }, { "content": " Self::King(_, _) => 99999,\n\n Self::Queen(_, _) => 9,\n\n Self::Rook(_, _) => 5,\n\n Self::Bishop(_, _) => 3,\n\n Self::Knight(_, _) => 3,\n\n Self::Pawn(_, _) => 1,\n\n }\n\n }\n\n\n\n /// ### get_weighted_value\n\n ///\n\n /// Get the weighted value of a piece. This simply factors in position\n\n /// to the pieces value. For example, a knight that is in the center is\n\n /// more favorable than a knight on the side of the board. Similarly,\n\n /// a king in the center of the board is highly unfavorable compared to\n\n /// a king its respective side.\n\n ///\n\n /// Additionally, the weighted value of the piece is 10 times greater than\n\n /// its material value, plus or minus a weight ranging between 5.0 and -5.0.\n\n #[inline]\n", "file_path": "src/piece.rs", "rank": 65, "score": 29224.635297300763 }, { "content": " King(Color, Position),\n\n Queen(Color, Position),\n\n Rook(Color, Position),\n\n Bishop(Color, Position),\n\n Knight(Color, Position),\n\n Pawn(Color, Position),\n\n}\n\n\n\nconst WHITE_KING_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-2.0, -3.0, -3.0, -4.0, -4.0, -3.0, -3.0, -2.0],\n\n [-1.0, -2.0, -2.0, -2.0, -2.0, -2.0, -2.0, -1.0],\n\n [2.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0],\n\n [2.0, 3.0, 1.0, 0.0, 0.0, 1.0, 3.0, 2.0],\n\n];\n\n\n\nconst BLACK_KING_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n", "file_path": "src/piece.rs", "rank": 66, "score": 29224.274219782983 }, { "content": " board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(D2, D4)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(C8, B7)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(C1, G5)) {\n\n board = next_board;\n\n }\n\n assert_eq!(\n\n Piece::King(BLACK, E8).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E8, D8),\n\n Move::Piece(E8, F8),\n\n Move::KingSideCastle,\n\n Move::QueenSideCastle\n\n ]\n\n );\n", "file_path": "src/piece.rs", "rank": 67, "score": 29224.270436643197 }, { "content": " }\n\n assert_eq!(\n\n Piece::King(BLACK, E8).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E8, F8),\n\n Move::Piece(E8, E7),\n\n Move::KingSideCastle\n\n ]\n\n );\n\n // Allow also queenside castling\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(D1, A4)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(D8, E7)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(B1, A3)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(B7, B5)) {\n", "file_path": "src/piece.rs", "rank": 68, "score": 29224.151241816155 }, { "content": " if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(E8, C8)) {\n\n board = next_board;\n\n }\n\n assert_eq!(\n\n Piece::King(WHITE, E1).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E1, D1),\n\n Move::Piece(E1, F1),\n\n Move::Piece(E1, E2),\n\n Move::Piece(E1, D2),\n\n Move::KingSideCastle,\n\n Move::QueenSideCastle\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_king_legal_moves_check_mate() {\n\n let board: Board = BoardBuilder::default()\n\n .enable_castling()\n", "file_path": "src/piece.rs", "rank": 69, "score": 29224.142639692927 }, { "content": " }\n\n }\n\n\n\n /// ### is_kingside_rook\n\n ///\n\n /// Is this piece in the starting position for the kingside rook?\n\n ///\n\n /// This method will only return true for rooks that are in the position\n\n /// of the kingside rook, not for any particular rook.\n\n #[inline]\n\n pub fn is_kingside_rook(&self) -> bool {\n\n if let Self::Rook(_, pos) = self {\n\n pos.is_kingside_rook(self.get_color())\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// ### move_to\n\n ///\n", "file_path": "src/piece.rs", "rank": 70, "score": 29224.099836582867 }, { "content": " pos.next_left().next_above(),\n\n pos.next_left().next_below(),\n\n pos.next_right().next_above(),\n\n pos.next_right().next_below(),\n\n ] {\n\n if p.is_on_board() && !board.has_ally_piece(*p, ally_color) {\n\n result.push(Move::Piece(pos, *p))\n\n }\n\n }\n\n // Castling; don't check with else if, you can sometimes do both moves\n\n if board.can_kingside_castle(ally_color) {\n\n result.push(Move::KingSideCastle);\n\n }\n\n if board.can_queenside_castle(ally_color) {\n\n result.push(Move::QueenSideCastle);\n\n }\n\n result\n\n }\n\n\n\n /// ### get_queen_legal_moves\n", "file_path": "src/piece.rs", "rank": 71, "score": 29223.647560212263 }, { "content": " | Self::Bishop(_, p)\n\n | Self::Knight(_, p)\n\n | Self::Pawn(_, p) => *p,\n\n }\n\n }\n\n\n\n /// ### get_material_value\n\n ///\n\n /// Get the material value for a piece.\n\n /// | Name | Value |\n\n /// |-|-|\n\n /// | King | 99999 |\n\n /// | Queen | 9 |\n\n /// | Rook | 5 |\n\n /// | Bishop | 3 |\n\n /// | Knight | 3 |\n\n /// | Pawn | 1 |\n\n #[inline]\n\n pub fn get_material_value(&self) -> i32 {\n\n match self {\n", "file_path": "src/piece.rs", "rank": 72, "score": 29223.62508716112 }, { "content": " .piece(Piece::Rook(BLACK, G4))\n\n .build();\n\n assert_eq!(\n\n Piece::Queen(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, B1),\n\n Move::Piece(E4, H1),\n\n Move::Piece(E4, C2),\n\n Move::Piece(E4, G2),\n\n Move::Piece(E4, D3),\n\n Move::Piece(E4, F3),\n\n Move::Piece(E4, D5),\n\n Move::Piece(E4, F5),\n\n Move::Piece(E4, C6),\n\n Move::Piece(E4, G6),\n\n Move::Piece(E4, B7),\n\n Move::Piece(E4, H7),\n\n Move::Piece(E4, A8),\n\n Move::Piece(E4, E1),\n\n Move::Piece(E4, E2),\n", "file_path": "src/piece.rs", "rank": 73, "score": 29223.42695408107 }, { "content": " }\n\n }\n\n for col in 0..8 {\n\n let new_pos = Position::new(pos.get_row(), col);\n\n if new_pos != pos\n\n && !board.has_ally_piece(new_pos, ally_color)\n\n && new_pos.is_orthogonal_to(pos)\n\n {\n\n result.push(Move::Piece(pos, new_pos));\n\n }\n\n }\n\n result\n\n }\n\n\n\n /// ### get_bishop_legal_moves\n\n ///\n\n /// Get all legal moves for provided bishop\n\n fn get_bishop_legal_moves(ally_color: Color, pos: Position, board: &Board) -> Vec<Move> {\n\n let mut result: Vec<Move> = Vec::new();\n\n for row in 0..8 {\n", "file_path": "src/piece.rs", "rank": 74, "score": 29223.33648832273 }, { "content": " for col in 0..8 {\n\n let new_pos = Position::new(row, col);\n\n if new_pos != pos\n\n && !board.has_ally_piece(new_pos, ally_color)\n\n && new_pos.is_diagonal_to(pos)\n\n {\n\n result.push(Move::Piece(pos, new_pos));\n\n }\n\n }\n\n }\n\n result\n\n }\n\n\n\n /// ### get_pawn_legal_moves\n\n ///\n\n /// Get all legal moves for provided pawn\n\n fn get_knight_legal_moves(ally_color: Color, pos: Position, board: &Board) -> Vec<Move> {\n\n let mut result: Vec<Move> = Vec::new();\n\n for p in &[\n\n pos.next_left().next_left().next_above(),\n", "file_path": "src/piece.rs", "rank": 75, "score": 29223.16503050289 }, { "content": " ///\n\n /// Is this piece a starting pawn?\n\n ///\n\n /// A starting pawn is a pawn that has not been pushed\n\n /// yet whatsoever.\n\n #[inline]\n\n pub fn is_starting_pawn(&self) -> bool {\n\n if let Self::Pawn(c, pos) = self {\n\n pos.is_starting_pawn(*c)\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// ### is_promoting_pawn\n\n ///\n\n /// Is this piece a pawn which can be promoted?\n\n ///\n\n /// This means the pawn is at the last rank\n\n #[inline]\n", "file_path": "src/piece.rs", "rank": 76, "score": 29222.61026485234 }, { "content": " ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_king_legal_moves_castling() {\n\n // Kingside Castling (giuoco piano)\n\n let mut board: Board = Board::default();\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(E2, E4)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(E7, E5)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(G1, F3)) {\n\n board = next_board;\n\n }\n\n if let MoveResult::Continuing(next_board) = board.play_move(Move::Piece(B8, C6)) {\n\n board = next_board;\n\n }\n", "file_path": "src/piece.rs", "rank": 77, "score": 29222.60572735765 }, { "content": " pos.next_left().next_above().next_above(),\n\n pos.next_left().next_left().next_below(),\n\n pos.next_left().next_below().next_below(),\n\n pos.next_right().next_right().next_above(),\n\n pos.next_right().next_above().next_above(),\n\n pos.next_right().next_right().next_below(),\n\n pos.next_right().next_below().next_below(),\n\n ] {\n\n if p.is_on_board() && !board.has_ally_piece(*p, ally_color) {\n\n result.push(Move::Piece(pos, *p))\n\n }\n\n }\n\n result\n\n }\n\n\n\n /// ### is_legal_pawn_move\n\n ///\n\n /// Checks whether provided move is legal for a pawn\n\n fn is_legal_pawn_move(\n\n ally_color: Color,\n", "file_path": "src/piece.rs", "rank": 78, "score": 29222.524200140757 }, { "content": " assert_eq!(\n\n Piece::Rook(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, A4),\n\n Move::Piece(E4, B4),\n\n Move::Piece(E4, C4),\n\n Move::Piece(E4, D4),\n\n Move::Piece(E4, F4),\n\n Move::Piece(E4, G4),\n\n Move::Piece(E4, H4),\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn get_rook_legal_moves_piece_on_row() {\n", "file_path": "src/piece.rs", "rank": 79, "score": 29222.427928246187 }, { "content": " Piece::Rook(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, E1),\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, E7),\n\n Move::Piece(E4, E8),\n\n Move::Piece(E4, A4),\n\n Move::Piece(E4, B4),\n\n Move::Piece(E4, C4),\n\n Move::Piece(E4, D4),\n\n Move::Piece(E4, F4),\n\n Move::Piece(E4, G4),\n\n Move::Piece(E4, H4),\n\n ]\n\n );\n\n }\n\n\n", "file_path": "src/piece.rs", "rank": 80, "score": 29222.41020746314 }, { "content": "\n\n /// ### is_legal_queen_move\n\n ///\n\n /// Checks whether provided move is legal for a queen\n\n fn is_legal_queen_move(pos: Position, new_pos: Position, board: &Board) -> bool {\n\n // Queen is union of bishop and rook\n\n Self::is_legal_rook_move(pos, new_pos, board)\n\n || Self::is_legal_bishop_move(pos, new_pos, board)\n\n }\n\n\n\n /// ### is_legal_rook_move\n\n ///\n\n /// Checks whether provided move is legal for a rook\n\n fn is_legal_rook_move(pos: Position, new_pos: Position, board: &Board) -> bool {\n\n if pos.is_orthogonal_to(new_pos) {\n\n let mut traveling = pos.orthogonals_to(new_pos);\n\n traveling.pop();\n\n\n\n for pos in traveling {\n\n if board.has_piece(pos) {\n", "file_path": "src/piece.rs", "rank": 81, "score": 29222.219635347068 }, { "content": " .build();\n\n assert_eq!(\n\n Piece::Queen(WHITE, E4).get_legal_moves(&board),\n\n vec![\n\n Move::Piece(E4, B1),\n\n Move::Piece(E4, H1),\n\n Move::Piece(E4, C2),\n\n Move::Piece(E4, G2),\n\n Move::Piece(E4, D3),\n\n Move::Piece(E4, F3),\n\n Move::Piece(E4, D5),\n\n Move::Piece(E4, F5),\n\n Move::Piece(E4, C6),\n\n Move::Piece(E4, G6),\n\n Move::Piece(E4, B7),\n\n Move::Piece(E4, H7),\n\n Move::Piece(E4, A8),\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n", "file_path": "src/piece.rs", "rank": 82, "score": 29222.1167762249 }, { "content": " }\n\n }\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// ### is_legal_pawn_attack\n\n ///\n\n /// Checks whether provided position is a valid attack for a pawn\n\n fn is_legal_pawn_attack(\n\n ally_color: Color,\n\n pos: Position,\n\n new_pos: Position,\n\n board: &Board,\n\n ) -> bool {\n\n let up = pos.pawn_up(ally_color);\n\n (if let Some(en_passant) = board.get_en_passant() {\n\n (en_passant == up.next_left() || en_passant == up.next_right())\n", "file_path": "src/piece.rs", "rank": 83, "score": 29221.724662629706 }, { "content": "];\n\n\n\nconst BLACK_ROOK_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5],\n\n [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],\n\n];\n\n\n\nconst WHITE_BISHOP_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-2.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -2.0],\n\n [-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0],\n\n [-1.0, 0.0, 0.5, 1.0, 1.0, 0.5, 0.0, -1.0],\n\n [-1.0, 0.5, 0.5, 1.0, 1.0, 0.5, 0.5, -1.0],\n\n [-1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, -1.0],\n\n [-1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0],\n", "file_path": "src/piece.rs", "rank": 84, "score": 29221.531584797012 }, { "content": " [-1.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, -1.0],\n\n [-2.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -2.0],\n\n];\n\n\n\nconst BLACK_BISHOP_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-2.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -2.0],\n\n [-1.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, -1.0],\n\n [-1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0],\n\n [-1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, -1.0],\n\n [-1.0, 0.5, 0.5, 1.0, 1.0, 0.5, 0.5, -1.0],\n\n [-1.0, 0.0, 0.5, 1.0, 1.0, 0.5, 0.0, -1.0],\n\n [-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0],\n\n [-2.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -2.0],\n\n];\n\n\n\nconst WHITE_KNIGHT_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-5.0, -4.0, -3.0, -3.0, -3.0, -3.0, -4.0, -5.0],\n\n [-4.0, -2.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0],\n\n [-3.0, 0.0, 1.0, 1.5, 1.5, 1.0, 0.0, -3.0],\n\n [-3.0, 0.5, 1.5, 2.0, 2.0, 1.5, 0.5, -3.0],\n", "file_path": "src/piece.rs", "rank": 85, "score": 29221.531584797012 }, { "content": "const BLACK_QUEEN_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-1.0, -0.0, -1.0, -0.5, -0.5, -0.5, -1.0, -2.0],\n\n [-1.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, -1.0],\n\n [-1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.0, -1.0],\n\n [0.0, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -0.5],\n\n [-0.5, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -0.5],\n\n [-1.0, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -1.0],\n\n [-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0],\n\n [-2.0, -1.0, -1.0, -0.5, -0.5, -1.0, -1.0, -2.0],\n\n];\n\n\n\nconst WHITE_ROOK_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],\n\n [0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5],\n\n [0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0],\n", "file_path": "src/piece.rs", "rank": 86, "score": 29221.531584797012 }, { "content": " [-3.0, 0.0, 1.5, 2.0, 2.0, 1.5, 0.0, -3.0],\n\n [-3.0, 0.5, 1.0, 1.5, 1.5, 1.0, 0.5, -3.0],\n\n [-4.0, -2.0, 0.0, 0.5, 0.5, 0.0, -2.0, -4.0],\n\n [-5.0, -4.0, -3.0, -3.0, -3.0, -3.0, -4.0, -5.0],\n\n];\n\n\n\nconst BLACK_KNIGHT_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-5.0, -4.0, -3.0, -3.0, -3.0, -3.0, -4.0, -5.0],\n\n [-4.0, -2.0, 0.0, 0.5, 0.5, 0.0, -2.0, -4.0],\n\n [-3.0, 0.5, 1.0, 1.5, 1.5, 1.0, 0.5, -3.0],\n\n [-3.0, 0.0, 1.5, 2.0, 2.0, 1.5, 0.0, -3.0],\n\n [-3.0, 0.5, 1.5, 2.0, 2.0, 1.5, 0.5, -3.0],\n\n [-3.0, 0.0, 1.0, 1.5, 1.5, 1.0, 0.0, -3.0],\n\n [-4.0, -2.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0],\n\n [-5.0, -4.0, -3.0, -3.0, -3.0, -3.0, -4.0, -5.0],\n\n];\n\n\n\nconst WHITE_PAWN_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],\n\n [5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0],\n", "file_path": "src/piece.rs", "rank": 87, "score": 29221.531584797012 }, { "content": " && (new_pos == en_passant)\n\n } else {\n\n false\n\n }) || new_pos == up.next_left()\n\n || new_pos == up.next_right()\n\n }\n\n\n\n /// ### is_legal_queen_attack\n\n ///\n\n /// Checks whether provided position is a valid attack for a queen\n\n fn is_legal_queen_attack(pos: Position, new_pos: Position, board: &Board) -> bool {\n\n Self::is_legal_rook_attack(pos, new_pos, board)\n\n || Self::is_legal_bishop_attack(pos, new_pos, board)\n\n }\n\n\n\n /// ### is_legal_rook_attack\n\n ///\n\n /// Checks whether provided position is a valid attack for a rook\n\n fn is_legal_rook_attack(pos: Position, new_pos: Position, board: &Board) -> bool {\n\n if pos.is_orthogonal_to(new_pos) {\n", "file_path": "src/piece.rs", "rank": 88, "score": 29220.83729282743 }, { "content": " return false;\n\n }\n\n }\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// ### is_legal_bishop_move\n\n ///\n\n /// Checks whether provided move is legal for a bishop\n\n fn is_legal_bishop_move(pos: Position, new_pos: Position, board: &Board) -> bool {\n\n if pos.is_diagonal_to(new_pos) {\n\n let mut traveling = pos.diagonals_to(new_pos);\n\n traveling.pop();\n\n\n\n for pos in traveling {\n\n if board.has_piece(pos) {\n\n return false;\n", "file_path": "src/piece.rs", "rank": 89, "score": 29220.25597704626 }, { "content": " let mut traveling = pos.orthogonals_to(new_pos);\n\n traveling.pop();\n\n\n\n for pos in traveling {\n\n if board.has_piece(pos) {\n\n return false;\n\n }\n\n }\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// ### is_legal_bishop_attack\n\n ///\n\n /// Checks whether provided position is a valid attack for a bishop\n\n fn is_legal_bishop_attack(pos: Position, new_pos: Position, board: &Board) -> bool {\n\n if pos.is_diagonal_to(new_pos) {\n\n let mut traveling = pos.diagonals_to(new_pos);\n", "file_path": "src/piece.rs", "rank": 90, "score": 29219.97625706175 }, { "content": " Move::Piece(E4, D3),\n\n Move::Piece(E4, F3),\n\n Move::Piece(E4, D5),\n\n Move::Piece(E4, F5),\n\n Move::Piece(E4, C6),\n\n Move::Piece(E4, G6),\n\n Move::Piece(E4, B7),\n\n Move::Piece(E4, H7),\n\n Move::Piece(E4, A8),\n\n Move::Piece(E4, E1),\n\n Move::Piece(E4, E2),\n\n Move::Piece(E4, E3),\n\n Move::Piece(E4, E5),\n\n Move::Piece(E4, E6),\n\n Move::Piece(E4, E7),\n\n Move::Piece(E4, E8),\n\n Move::Piece(E4, A4),\n\n Move::Piece(E4, B4),\n\n Move::Piece(E4, C4),\n\n Move::Piece(E4, D4),\n", "file_path": "src/piece.rs", "rank": 91, "score": 29219.60848622284 }, { "content": " [2.0, 3.0, 1.0, 0.0, 0.0, 1.0, 3.0, 2.0],\n\n [2.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0],\n\n [-1.0, -2.0, -2.0, -2.0, -2.0, -2.0, -2.0, -1.0],\n\n [-2.0, -3.0, -3.0, -4.0, -4.0, -3.0, -3.0, -2.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n [-3.0, -4.0, -4.0, -5.0, -5.0, -4.0, -4.0, -3.0],\n\n];\n\n\n\nconst WHITE_QUEEN_POSITION_WEIGHTS: [[f64; 8]; 8] = [\n\n [-2.0, -1.0, -1.0, -0.5, -0.5, -1.0, -1.0, -2.0],\n\n [-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0],\n\n [-1.0, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -1.0],\n\n [-0.5, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -0.5],\n\n [0.0, 0.0, 0.5, 0.5, 0.5, 0.5, 0.0, -0.5],\n\n [-1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.0, -1.0],\n\n [-1.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, -1.0],\n\n [-1.0, -0.0, -1.0, -0.5, -0.5, -0.5, -1.0, -2.0],\n\n];\n", "file_path": "src/piece.rs", "rank": 92, "score": 29218.69896884193 }, { "content": "pub struct Board {\n\n /// the 64 squares of the chess board\n\n squares: [Square; 64],\n\n /// tracks eventually a possible en passant position\n\n en_passant: Option<Position>,\n\n /// tracks eventually a taken piece on the last turn\n\n taken_piece: Option<Piece>,\n\n /// tracks eventually the possibility to promote a pawn\n\n promotion: Option<Position>,\n\n /// castling rights for white player\n\n white_castling_rights: CastlingRights,\n\n /// castling rights for black player\n\n black_castling_rights: CastlingRights,\n\n /// describes which player has to move the next turn\n\n turn: Color,\n\n}\n\n\n\nimpl Default for Board {\n\n fn default() -> Self {\n\n BoardBuilder::default()\n", "file_path": "src/board/mod.rs", "rank": 94, "score": 26471.2700407455 }, { "content": " }\n\n}\n\n\n\nimpl Board {\n\n // -- constructors\n\n\n\n /// ### empty\n\n ///\n\n /// Create an empty Board\n\n pub fn empty() -> Self {\n\n Self {\n\n squares: [Square::empty(); 64],\n\n en_passant: None,\n\n taken_piece: None,\n\n promotion: None,\n\n white_castling_rights: CastlingRights::default(),\n\n black_castling_rights: CastlingRights::default(),\n\n turn: WHITE,\n\n }\n\n }\n", "file_path": "src/board/mod.rs", "rank": 96, "score": 26471.050796231946 }, { "content": " /// <https://en.wikipedia.org/wiki/Dunsany%27s_chess>\n\n pub fn dunsany() -> Self {\n\n BoardBuilder::from(Board::default())\n\n .row(Piece::Pawn(WHITE, A1))\n\n .row(Piece::Pawn(WHITE, A2))\n\n .row(Piece::Pawn(WHITE, A3))\n\n .row(Piece::Pawn(WHITE, A4))\n\n .player_moving(BLACK)\n\n .build()\n\n }\n\n\n\n // -- getters\n\n\n\n /// ### get_turn\n\n ///\n\n /// Get the color of the current player\n\n #[inline]\n\n pub fn get_turn(&self) -> Color {\n\n self.turn\n\n }\n", "file_path": "src/board/mod.rs", "rank": 98, "score": 26468.65082189119 } ]
Rust
src/direction.rs
jack-atack/Cursive
fc065e8e589c24dfa7906d9423c7b63fee095e23
use crate::vec::Vec2; use crate::XY; #[derive(Clone, Copy, Debug, PartialEq)] pub enum Orientation { Horizontal, Vertical, } impl Orientation { pub fn pair() -> XY<Orientation> { XY::new(Orientation::Horizontal, Orientation::Vertical) } pub fn get<T: Clone>(self, v: &XY<T>) -> T { v.get(self).clone() } pub fn swap(self) -> Self { match self { Orientation::Horizontal => Orientation::Vertical, Orientation::Vertical => Orientation::Horizontal, } } pub fn get_ref<T>(self, v: &mut XY<T>) -> &mut T { match self { Orientation::Horizontal => &mut v.x, Orientation::Vertical => &mut v.y, } } pub fn stack<'a, T: Iterator<Item = &'a Vec2>>(self, iter: T) -> Vec2 { match self { Orientation::Horizontal => { iter.fold(Vec2::zero(), |a, b| a.stack_horizontal(b)) } Orientation::Vertical => { iter.fold(Vec2::zero(), |a, b| a.stack_vertical(b)) } } } pub fn make_vec(self, main_axis: usize, second_axis: usize) -> Vec2 { let mut result = Vec2::zero(); *self.get_ref(&mut result) = main_axis; *self.swap().get_ref(&mut result) = second_axis; result } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Direction { Abs(Absolute), Rel(Relative), } impl Direction { pub fn relative(self, orientation: Orientation) -> Option<Relative> { match self { Direction::Abs(abs) => abs.relative(orientation), Direction::Rel(rel) => Some(rel), } } pub fn absolute(self, orientation: Orientation) -> Absolute { match self { Direction::Abs(abs) => abs, Direction::Rel(rel) => rel.absolute(orientation), } } pub fn back() -> Self { Direction::Rel(Relative::Back) } pub fn front() -> Self { Direction::Rel(Relative::Front) } pub fn left() -> Self { Direction::Abs(Absolute::Left) } pub fn right() -> Self { Direction::Abs(Absolute::Right) } pub fn up() -> Self { Direction::Abs(Absolute::Up) } pub fn down() -> Self { Direction::Abs(Absolute::Down) } pub fn none() -> Self { Direction::Abs(Absolute::None) } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Relative { Front, Back, } impl Relative { pub fn absolute(self, orientation: Orientation) -> Absolute { match (orientation, self) { (Orientation::Horizontal, Relative::Front) => Absolute::Left, (Orientation::Horizontal, Relative::Back) => Absolute::Right, (Orientation::Vertical, Relative::Front) => Absolute::Up, (Orientation::Vertical, Relative::Back) => Absolute::Down, } } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Absolute { Left, Up, Right, Down, None, } impl Absolute { pub fn relative(self, orientation: Orientation) -> Option<Relative> { match (orientation, self) { (Orientation::Horizontal, Absolute::Left) | (Orientation::Vertical, Absolute::Up) => Some(Relative::Front), (Orientation::Horizontal, Absolute::Right) | (Orientation::Vertical, Absolute::Down) => Some(Relative::Back), _ => None, } } }
use crate::vec::Vec2; use crate::XY; #[derive(Clone, Copy, Debug, PartialEq)] pub enum Orientation { Horizontal, Vertical, } impl Orientation { pub fn pair() -> XY<Orientation> { XY::new(Orientation::Horizontal, Orientation::Vertical) } pub fn get<T: Clone>(self, v: &XY<T>) -> T { v.get(self).clone() } pub fn swap(self) -> Self { match self { Orientation::Horizontal => Orientation::Vertical, Orientation::Vertical => Orientation::Horizontal, } } pub fn get_ref<T>(self, v: &mut XY<T>) -> &mut T { match self { Orientation::Horizontal => &mut v.x, Orientation::Vertical => &mut v.y, } } pub fn stack<'a, T: Iterator<Item = &'a Vec2>>(self, iter: T) -> Vec2 { match self { Orientation::Horizontal => { iter.fold(Vec2::zero(), |a, b| a.stack_horizontal(b)) } Orientation::Vertical => { iter.fold(Vec2::zero(), |a, b| a.stack_vertical(b)) } } } pub fn make_vec(self, main_axis: usize, second_axis: usize) -> Vec2 { let mut result = Vec2::zero(); *self.get_ref(&mut result) = main_axis; *self.swap().get_ref(&mut result) = second_axis; result } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Direction { Abs(Absolute), Rel(Relative), } impl Direction { pub fn relative(self, orientation: Orientation) -> Option<Relative> { match self { Direction::Abs(abs) => abs.relative(orientation), Direction::Rel(rel) => Some(rel), } } pub fn absolute(self, orientation: Orientation) -> Absolute { match self { Direction::Abs(abs) => abs, Direction::Rel(rel) => rel.absolute(orientation), } } pub fn back() -> Self { Direction::Rel(Relative::Back) } pub fn front() -> Self { Direction::Rel(Relative::Front) } pub fn left() -> Self { Direction::Abs(Absolute::Left) } pub fn right() -> Self { Direction::Abs(Absolute::Right) } pub fn up() -> Self { Direction::Abs(Absolute::Up) } pub fn down() -> Self { Direction::Abs(Absolute::Down) } pub fn none() -> Self { Direction::Abs(Absolute::None) } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Relative { Front, Back, } impl Relative { pub fn absolute(self, orientation: Orientation) -> Absolute { match (orientation, self) { (Orientation::Horizontal, Relative::Front) => Absolute::Left, (Orientation::Horizontal, Relative::Back) => Absolute::Right, (Orientation::Vertical, Relative::Front) => Absolute::Up, (Orientation::Vertical, Relative::Back) => Absolute::Down, } } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Absolute { Left, Up, Right, Down, None, } impl Absolute { pub fn relative(self, orientation: O
n::Horizontal, Absolute::Right) | (Orientation::Vertical, Absolute::Down) => Some(Relative::Back), _ => None, } } }
rientation) -> Option<Relative> { match (orientation, self) { (Orientation::Horizontal, Absolute::Left) | (Orientation::Vertical, Absolute::Up) => Some(Relative::Front), (Orientatio
function_block-random_span
[ { "content": "fn cap<'a, I: Iterator<Item = &'a mut usize>>(iter: I, max: usize) {\n\n let mut available = max;\n\n for item in iter {\n\n if *item > available {\n\n *item = available;\n\n }\n\n\n\n available -= *item;\n\n }\n\n}\n\n\n\nimpl LinearLayout {\n\n /// Creates a new layout with the given orientation.\n\n pub fn new(orientation: direction::Orientation) -> Self {\n\n LinearLayout {\n\n children: Vec::new(),\n\n orientation,\n\n focus: 0,\n\n cache: None,\n\n }\n", "file_path": "src/views/linear_layout.rs", "rank": 0, "score": 216632.62417401368 }, { "content": "/// Wraps a `FnMut` into a `Fn`\n\n///\n\n/// This can be used to use a `FnMut` when a callack expects a `Fn`.\n\n///\n\n/// # Note\n\n///\n\n/// If the resulting `Fn` is called recursively, subsequent calls will be\n\n/// no-ops.\n\npub fn immutify<F: FnMut(&mut Cursive)>(\n\n f: F,\n\n) -> impl for<'s> Fn(&'s mut Cursive) {\n\n let callback = RefCell::new(f);\n\n move |s| {\n\n // Here's the weird trick: if we're already borrowed,\n\n // just ignored the callback.\n\n if let Ok(mut f) = callback.try_borrow_mut() {\n\n // Beeeaaah that's ugly.\n\n // Why do we need to manually dereference here?\n\n (&mut *f)(s);\n\n }\n\n }\n\n}\n\n\n\n*/\n\n\n\n/// Macro to wrap a `FnMut` with 1 argument into a `Fn`.\n\n///\n\n/// This can wrap any `FnMut` with a single arguments (for example `&mut Cursive`).\n", "file_path": "src/utils/immutify.rs", "rank": 1, "score": 201095.8785707883 }, { "content": "/// Computes the longest suffix that fits in the given `width`.\n\n///\n\n/// Doesn't break inside elements returned by `iter`.\n\n///\n\n/// Returns the number of bytes of the longest\n\n/// suffix from `text` that fits in `width`.\n\n///\n\n/// This is a shortcut for `prefix_length(iter.rev(), width, delimiter)`\n\npub fn suffix<'a, I>(iter: I, width: usize, delimiter: &str) -> Span\n\nwhere\n\n I: DoubleEndedIterator<Item = &'a str>,\n\n{\n\n prefix(iter.rev(), width, delimiter)\n\n}\n\n\n", "file_path": "src/utils/lines/simple/mod.rs", "rank": 2, "score": 190800.94876694045 }, { "content": "/// Computes a prefix that fits in the given `width`.\n\n///\n\n/// Takes non-breakable elements from `iter`, while keeping the string width\n\n/// under `width` (and adding `delimiter` between each element).\n\n///\n\n/// Given `total_text = iter.collect().join(delimiter)`, the result is the\n\n/// length of the longest prefix of `width` or less cells, without breaking\n\n/// inside an element.\n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// # extern crate cursive;\n\n/// extern crate unicode_segmentation;\n\n/// use unicode_segmentation::UnicodeSegmentation;\n\n///\n\n/// # use cursive::utils::lines::simple::prefix;\n\n/// # fn main() {\n\n/// let my_text = \"blah...\";\n\n/// // This returns the number of bytes for a prefix of `my_text` that\n\n/// // fits within 5 cells.\n\n/// prefix(my_text.graphemes(true), 5, \"\");\n\n/// # }\n\n/// ```\n\npub fn prefix<'a, I>(iter: I, available_width: usize, delimiter: &str) -> Span\n\nwhere\n\n I: Iterator<Item = &'a str>,\n\n{\n\n let delimiter_width = delimiter.width();\n\n let delimiter_len = delimiter.len();\n\n\n\n // `current_width` is the width of everything\n\n // before the next token, including any space.\n\n let mut current_width = 0;\n\n let sum: usize = iter\n\n .take_while(|token| {\n\n let width = token.width();\n\n if current_width + width > available_width {\n\n false\n\n } else {\n\n // Include the delimiter after this token.\n\n current_width += width;\n\n current_width += delimiter_width;\n\n true\n", "file_path": "src/utils/lines/simple/mod.rs", "rank": 3, "score": 187496.03273249464 }, { "content": "/// Loads a theme string and sets it as active.\n\npub fn load_toml(content: &str) -> Result<Theme, Error> {\n\n let table = toml::de::from_str(content)?;\n\n\n\n let mut theme = Theme::default();\n\n theme.load_toml(&table);\n\n\n\n Ok(theme)\n\n}\n\n\n", "file_path": "src/theme/mod.rs", "rank": 4, "score": 172192.83662851673 }, { "content": "/// Draws a frame around the scrollable content.\n\n///\n\n/// `left_border` will be called for each row to draw the left border for the given line number.\n\npub fn draw_frame<T, LeftBorder, TopBorder, RightBorder, BottomBorder>(\n\n scroller: &T, printer: &Printer, mut left_border: LeftBorder,\n\n mut top_border: TopBorder, mut right_border: RightBorder,\n\n mut bottom_border: BottomBorder,\n\n) where\n\n T: Scroller,\n\n LeftBorder: FnMut(&T, &Printer, usize),\n\n TopBorder: FnMut(&T, &Printer, usize),\n\n RightBorder: FnMut(&T, &Printer, usize),\n\n BottomBorder: FnMut(&T, &Printer, usize),\n\n{\n\n let viewport = scroller.get_scroller().content_viewport();\n\n let size = printer.size.saturating_sub((1, 1));\n\n\n\n for (i, x) in (viewport.left()..=viewport.right()).enumerate() {\n\n top_border(scroller, &printer.offset((i + 1, 0)), x);\n\n bottom_border(scroller, &printer.offset((i + 1, size.y)), x);\n\n }\n\n\n\n // Also draw padding\n", "file_path": "src/view/scroll/mod.rs", "rank": 5, "score": 168634.34906189208 }, { "content": "fn show_child(s: &mut Cursive, offset: Vec2, menu: Rc<MenuTree>) {\n\n // Adds a new layer located near the item title with the menu popup.\n\n // Also adds two key callbacks on this new view, to handle `left` and\n\n // `right` key presses.\n\n // (If the view itself listens for a `left` or `right` press, it will\n\n // consume it before our OnEventView. This means sub-menus can properly\n\n // be entered.)\n\n s.screen_mut().add_layer_at(\n\n Position::absolute(offset),\n\n OnEventView::new(\n\n MenuPopup::new(menu)\n\n .on_dismiss(Cursive::select_menubar)\n\n .on_action(|s| s.menubar().state = State::Inactive),\n\n )\n\n .on_event(Key::Right, |s| {\n\n s.pop_layer();\n\n s.select_menubar();\n\n // Act as if we sent \"Right\" then \"Down\"\n\n s.menubar().on_event(Event::Key(Key::Right)).process(s);\n\n if let EventResult::Consumed(Some(cb)) =\n", "file_path": "src/views/menubar.rs", "rank": 6, "score": 161837.47105209643 }, { "content": "#[allow(dead_code)]\n\npub fn read_char<F>(first: u8, next: F) -> Result<char, String>\n\nwhere\n\n F: Fn() -> Option<u8>,\n\n{\n\n if first < 0x80 {\n\n return Ok(first as char);\n\n }\n\n\n\n // Number of leading 1s determines the number of bytes we'll have to read\n\n let n_bytes = match (!first).leading_zeros() {\n\n n @ 2..=6 => n as usize,\n\n 1 => return Err(\"First byte is continuation byte.\".to_string()),\n\n 7..=8 => return Err(\"WTF is this byte??\".to_string()),\n\n _ => unreachable!(),\n\n };\n\n\n\n let mut res = 0_u32;\n\n\n\n // First, get the data - only the few last bits\n\n res |= u32::from(first & make_mask(7 - n_bytes));\n", "file_path": "src/utf8.rs", "rank": 7, "score": 158496.18664836016 }, { "content": "/// Loads a theme from file and sets it as active.\n\npub fn load_theme_file<P: AsRef<Path>>(filename: P) -> Result<Theme, Error> {\n\n let content = {\n\n let mut content = String::new();\n\n let mut file = File::open(filename)?;\n\n file.read_to_string(&mut content)?;\n\n content\n\n };\n\n\n\n load_toml(&content)\n\n}\n\n\n", "file_path": "src/theme/mod.rs", "rank": 8, "score": 153640.32891344678 }, { "content": "/// Draws a box-style frame around a scrollable content.\n\n///\n\n/// Assumes horizontal lines are present in the content whenever `is_h_delim`\n\n/// returns `true` (and vertical lines when `is_v_delim` returns `true`).\n\n///\n\n/// It will print a box with the appropriate `├`, `┤` and so on.\n\npub fn draw_box_frame<T, IsHDelim, IsVDelim>(\n\n scroller: &T, printer: &Printer, is_h_delim: IsHDelim,\n\n is_v_delim: IsVDelim,\n\n) where\n\n T: Scroller,\n\n IsHDelim: Fn(&T, usize) -> bool,\n\n IsVDelim: Fn(&T, usize) -> bool,\n\n{\n\n draw_frame(\n\n scroller,\n\n printer,\n\n |s, printer, y| {\n\n if is_h_delim(s, y) {\n\n printer.print((0, 0), \"├\");\n\n } else {\n\n printer.print((0, 0), \"│\");\n\n }\n\n },\n\n |s, printer, x| {\n\n if is_v_delim(s, x) {\n", "file_path": "src/view/scroll/mod.rs", "rank": 9, "score": 147897.03286828537 }, { "content": "// Compare the content of the two edit views,\n\n// and update the TextView accordingly.\n\n//\n\n// We'll ignore the `content` and `cursor` arguments,\n\n// and directly retrieve the content from the `Cursive` root.\n\nfn on_edit(siv: &mut Cursive, _content: &str, _cursor: usize) {\n\n // Get handles for each view.\n\n let edit_1 = siv.find_id::<EditView>(\"1\").unwrap();\n\n let edit_2 = siv.find_id::<EditView>(\"2\").unwrap();\n\n\n\n // Directly compare references to edit_1 and edit_2.\n\n let matches = edit_1.get_content() == edit_2.get_content();\n\n\n\n siv.call_on_id(\"match\", |v: &mut TextView| {\n\n v.set_content(if matches { \"match\" } else { \"no match\" })\n\n });\n\n}\n", "file_path": "examples/refcell_view.rs", "rank": 10, "score": 147537.77507268253 }, { "content": "/// Computes the longest prefix that fits in the given width.\n\n///\n\n/// Breaks between any two graphemes.\n\npub fn simple_prefix(text: &str, width: usize) -> Span {\n\n prefix(text.graphemes(true), width, \"\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/utils/lines/simple/mod.rs", "rank": 11, "score": 143346.33397949312 }, { "content": "/// Computes the longest suffix that fits in the given `width`.\n\n///\n\n/// Breaks between any two graphemes.\n\npub fn simple_suffix(text: &str, width: usize) -> Span {\n\n suffix(text.graphemes(true), width, \"\")\n\n}\n\n\n", "file_path": "src/utils/lines/simple/mod.rs", "rank": 12, "score": 143346.33397949312 }, { "content": "/// Initialize the Cursive logger.\n\n///\n\n/// Make sure this is the only logger your are using.\n\n///\n\n/// Use a [`DebugView`](crate::views::DebugView) to see the logs, or use\n\n/// [`Cursive::toggle_debug_console()`](crate::Cursive::toggle_debug_console()).\n\npub fn init() {\n\n // TODO: Configure the deque size?\n\n LOGS.lock().unwrap().reserve(1_000);\n\n\n\n // This will panic if `set_logger` was already called.\n\n log::set_logger(&LOGGER).unwrap();\n\n\n\n // TODO: read the level from env variable? From argument?\n\n log::set_max_level(log::LevelFilter::Trace);\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 13, "score": 141745.06700207922 }, { "content": "/// Compute lines for the given content and width.\n\n///\n\n/// Equivalent to constructing a new `LinesIterator` and collecting it.\n\npub fn make_lines(content: &str, width: usize) -> Vec<Row> {\n\n LinesIterator::new(content, width).collect()\n\n}\n\n\n", "file_path": "src/utils/lines/simple/mod.rs", "rank": 14, "score": 138947.321230314 }, { "content": "fn find_closest_pair(pair: ColorPair) -> (i16, i16) {\n\n super::find_closest_pair(pair, ncurses::COLORS() as i16)\n\n}\n\n\n", "file_path": "src/backend/curses/n.rs", "rank": 15, "score": 135957.28628976922 }, { "content": "fn find_closest_pair(pair: ColorPair) -> (i16, i16) {\n\n super::find_closest_pair(pair, pancurses::COLORS() as i16)\n\n}\n\n\n\nimpl Backend {\n\n /// Creates a new pancurses-based backend.\n\n pub fn init() -> std::io::Result<Box<dyn backend::Backend>> {\n\n // Check the $TERM variable (at least on unix).\n\n // Otherwise we'll just abort.\n\n // TODO: On windows, is there anything to check?\n\n if cfg!(unix)\n\n && std::env::var(\"TERM\")\n\n .map(|var| var.is_empty())\n\n .unwrap_or(true)\n\n {\n\n return Err(std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n \"$TERM is unset. Cannot initialize pancurses interface.\",\n\n ));\n\n }\n", "file_path": "src/backend/curses/pan.rs", "rank": 16, "score": 134363.24198006914 }, { "content": "#[allow(unused)]\n\npub fn start_resize_thread(\n\n resize_sender: Sender<()>, resize_running: Arc<AtomicBool>,\n\n) {\n\n let signals = Signals::new(&[libc::SIGWINCH]).unwrap();\n\n thread::spawn(move || {\n\n // This thread will listen to SIGWINCH events and report them.\n\n while resize_running.load(Ordering::Relaxed) {\n\n // We know it will only contain SIGWINCH signals, so no need to check.\n\n if signals.wait().count() > 0 {\n\n resize_sender.send(()).unwrap();\n\n }\n\n }\n\n });\n\n}\n", "file_path": "src/backend/resize.rs", "rank": 17, "score": 134078.01488877996 }, { "content": "fn phase_2(s: &mut Cursive) {\n\n // Now, we'll run N tasks\n\n // (It could be downloading a file, extracting an archive,\n\n // reticulating sprites, ...)\n\n let n_bars = 10;\n\n // Each task will have its own shiny counter\n\n let counters: Vec<_> = (0..n_bars).map(|_| Counter::new(0)).collect();\n\n // To make things more interesting, we'll give a random speed to each bar\n\n let speeds: Vec<_> = (0..n_bars)\n\n .map(|_| rand::thread_rng().gen_range(50, 150))\n\n .collect();\n\n\n\n let n_max = 100_000;\n\n let cb = s.cb_sink().clone();\n\n\n\n // Let's prepare the progress bars...\n\n let mut linear = LinearLayout::vertical();\n\n for c in &counters {\n\n linear.add_child(ProgressBar::new().max(n_max).with_value(c.clone()));\n\n }\n", "file_path": "examples/progress.rs", "rank": 18, "score": 133589.69782233922 }, { "content": "fn phase_1(s: &mut Cursive) {\n\n // Phase 1 is easy: a simple pre-loading.\n\n\n\n // Number of ticks\n\n let n_max = 1000;\n\n\n\n // This is the callback channel\n\n let cb = s.cb_sink().clone();\n\n\n\n s.pop_layer();\n\n s.add_layer(Dialog::around(\n\n ProgressBar::new()\n\n // We need to know how many ticks represent a full bar.\n\n .range(0, n_max)\n\n .with_task(move |counter| {\n\n // This closure will be called in a separate thread.\n\n fake_load(n_max, &counter);\n\n\n\n // When we're done, send a callback through the channel\n\n cb.send(Box::new(coffee_break)).unwrap();\n\n })\n\n .full_width(),\n\n ));\n\n s.set_autorefresh(true);\n\n}\n\n\n", "file_path": "examples/progress.rs", "rank": 19, "score": 133589.69782233922 }, { "content": "// Shortcut for a boxed callback (for the wrap_call_on_any method).\n\ntype BoxedCallback<'a> = Box<for<'b> FnMut(&'b mut dyn Any) + 'a>;\n\n\n\nimpl<T: View + 'static> ViewWrapper for IdView<T> {\n\n type V = T;\n\n\n\n fn with_view<F, R>(&self, f: F) -> Option<R>\n\n where\n\n F: FnOnce(&Self::V) -> R,\n\n {\n\n self.view.try_borrow().ok().map(|v| f(&*v))\n\n }\n\n\n\n fn with_view_mut<F, R>(&mut self, f: F) -> Option<R>\n\n where\n\n F: FnOnce(&mut Self::V) -> R,\n\n {\n\n self.view.try_borrow_mut().ok().map(|mut v| f(&mut *v))\n\n }\n\n\n\n fn into_inner(mut self) -> Result<Self::V, Self>\n", "file_path": "src/views/id_view.rs", "rank": 20, "score": 131690.33440614323 }, { "content": "fn final_step(s: &mut Cursive) {\n\n // A little break before things get serious.\n\n s.set_autorefresh(false);\n\n s.pop_layer();\n\n s.add_layer(\n\n Dialog::new()\n\n .title(\"Report\")\n\n .content(\n\n TextView::new(\n\n \"Time travel was a success!\\n\\\n\n We went forward a few seconds!!\",\n\n )\n\n .center(),\n\n )\n\n .button(\"That's it?\", |s| s.quit()),\n\n );\n\n}\n\n\n", "file_path": "examples/progress.rs", "rank": 21, "score": 131039.4805206093 }, { "content": "fn coffee_break(s: &mut Cursive) {\n\n // A little break before things get serious.\n\n s.set_autorefresh(false);\n\n s.pop_layer();\n\n s.add_layer(\n\n Dialog::new()\n\n .title(\"Preparation complete\")\n\n .content(TextView::new(\"Now, the real deal!\").center())\n\n .button(\"Again??\", phase_2),\n\n );\n\n}\n\n\n", "file_path": "examples/progress.rs", "rank": 22, "score": 131039.4805206093 }, { "content": "/// Concatenates chunks as long as they fit in the given width.\n\npub fn prefix<I>(\n\n tokens: &mut Peekable<I>, width: usize, offset: &mut ChunkPart,\n\n) -> Vec<Chunk>\n\nwhere\n\n I: Iterator<Item = Chunk>,\n\n{\n\n let mut available = width;\n\n let mut chunks = Vec::new();\n\n\n\n // Accumulate chunks until it doesn't fit.\n\n loop {\n\n // Look at the next chunk and see if it would fit.\n\n let result = {\n\n let next_chunk = match tokens.peek() {\n\n None => break,\n\n Some(chunk) => chunk,\n\n };\n\n\n\n // When considering if the chunk fits, remember that we may\n\n // already have processed part of it.\n", "file_path": "src/utils/lines/spans/prefix.rs", "rank": 23, "score": 130697.6959752263 }, { "content": "/// Loads the default theme, and returns its representation.\n\npub fn load_default() -> Theme {\n\n Theme::default()\n\n}\n", "file_path": "src/theme/mod.rs", "rank": 24, "score": 130697.6959752263 }, { "content": "fn show_popup(siv: &mut Cursive) {\n\n // Let's center the popup horizontally, but offset it down a few rows,\n\n // so the user can see both the popup and the view underneath.\n\n siv.screen_mut().add_layer_at(\n\n Position::new(Offset::Center, Offset::Parent(5)),\n\n Dialog::around(TextView::new(\"Tak!\"))\n\n .button(\"Change\", |s| {\n\n // Look for a view tagged \"text\".\n\n // We _know_ it's there, so unwrap it.\n\n s.call_on_id(\"text\", |view: &mut TextView| {\n\n let content = reverse(view.get_content().source());\n\n view.set_content(content);\n\n });\n\n })\n\n .dismiss_button(\"Ok\"),\n\n );\n\n}\n\n\n", "file_path": "examples/mutation.rs", "rank": 25, "score": 128636.53904288812 }, { "content": "type InnerCallback<T> = Rc<Box<dyn Fn(&mut T, &Event) -> Option<EventResult>>>;\n\n\n", "file_path": "src/views/on_event_view.rs", "rank": 26, "score": 128051.97572674266 }, { "content": "/// Initialise the Cursive logger, adding the ability to filter debug logs by module\n\npub fn init_for_module(module: &str) {\n\n let mut custom_module = MODULE.lock().unwrap();\n\n *custom_module = Some(module.to_string());\n\n\n\n // TODO: Configure the deque size?\n\n MODULE_LOGS.lock().unwrap().reserve(1_000);\n\n\n\n init();\n\n}\n", "file_path": "src/logger.rs", "rank": 27, "score": 127725.05544228505 }, { "content": "fn show_options(siv: &mut Cursive) {\n\n siv.add_layer(\n\n Dialog::new()\n\n .title(\"Select difficulty\")\n\n .content(\n\n SelectView::new()\n\n .item(\n\n \"Easy: 8x8, 10 mines\",\n\n game::Options {\n\n size: Vec2::new(8, 8),\n\n mines: 10,\n\n },\n\n )\n\n .item(\n\n \"Medium: 16x16, 40 mines\",\n\n game::Options {\n\n size: Vec2::new(16, 16),\n\n mines: 40,\n\n },\n\n )\n", "file_path": "examples/mines/main.rs", "rank": 28, "score": 126368.47359960807 }, { "content": "fn find_closest_pair(pair: ColorPair, max_colors: i16) -> (i16, i16) {\n\n (\n\n find_closest(pair.front, max_colors),\n\n find_closest(pair.back, max_colors),\n\n )\n\n}\n\n\n", "file_path": "src/backend/curses/mod.rs", "rank": 29, "score": 125312.58793442008 }, { "content": "/// Writes some bytes directly to `/dev/tty`\n\n///\n\n/// Since this is not going to be used often, we can afford to re-open the\n\n/// file every time.\n\nfn write_to_tty(bytes: &[u8]) -> io::Result<()> {\n\n let mut tty_output =\n\n File::create(\"/dev/tty\").expect(\"cursive can only run with a tty\");\n\n tty_output.write_all(bytes)?;\n\n // tty_output will be flushed automatically at the end of the function.\n\n Ok(())\n\n}\n\n\n\nimpl Backend {\n\n /// Creates a new ncurses-based backend.\n\n pub fn init() -> io::Result<Box<dyn backend::Backend>> {\n\n // Check the $TERM variable.\n\n if std::env::var(\"TERM\")\n\n .map(|var| var.is_empty())\n\n .unwrap_or(true)\n\n {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"$TERM is unset. Cannot initialize ncurses interface.\",\n\n ));\n", "file_path": "src/backend/curses/n.rs", "rank": 30, "score": 122192.1044110718 }, { "content": "/// Performs `View::important_area` on a `scroll::Scroller`.\n\npub fn important_area<T, ImportantArea>(\n\n scroller: &T, size: Vec2, mut important_area: ImportantArea,\n\n) -> Rect\n\nwhere\n\n T: Scroller,\n\n ImportantArea: FnMut(&T, Vec2) -> Rect,\n\n{\n\n let viewport = scroller.get_scroller().content_viewport();\n\n let area = important_area(scroller, size);\n\n let top_left = area.top_left().saturating_sub(viewport.top_left());\n\n let bot_right = area\n\n .bottom_right()\n\n .saturating_sub(viewport.top_left())\n\n .or_min(viewport.bottom_right());\n\n\n\n Rect::from_corners(top_left, bot_right)\n\n}\n\n\n", "file_path": "src/view/scroll/mod.rs", "rank": 31, "score": 121285.19881103317 }, { "content": "/// Performs a line-based `View::draw` on a `scroll::Scroller`.\n\n///\n\n/// This is an alternative to `scroll::draw()` when you just need to print individual lines.\n\npub fn draw_lines<T, LineDrawer>(\n\n scroller: &T, printer: &Printer, mut line_drawer: LineDrawer,\n\n) where\n\n T: Scroller,\n\n LineDrawer: FnMut(&T, &Printer, usize),\n\n{\n\n draw(scroller, printer, |s, printer| {\n\n let start = printer.content_offset.y;\n\n let end = start + printer.output_size.y;\n\n for y in start..end {\n\n let printer = printer.offset((0, y)).cropped((printer.size.x, 1));\n\n line_drawer(s, &printer, y);\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/view/scroll/mod.rs", "rank": 32, "score": 121285.19881103317 }, { "content": "/// Performs `View::required_size` on a `scroll::Scroller`.\n\npub fn required_size<T, RequiredSize>(\n\n scroller: &mut T, size: Vec2, needs_relayout: bool,\n\n required_size: RequiredSize,\n\n) -> Vec2\n\nwhere\n\n T: Scroller,\n\n RequiredSize: FnMut(&mut T, Vec2) -> Vec2,\n\n{\n\n raw::required_size(\n\n size,\n\n needs_relayout,\n\n scroller,\n\n Scroller::get_scroller_mut,\n\n required_size,\n\n )\n\n}\n\n\n", "file_path": "src/view/scroll/mod.rs", "rank": 33, "score": 121285.19881103317 }, { "content": "// This will replace the current layer with a new popup.\n\n// If the name is empty, we'll show an error message instead.\n\nfn show_popup(s: &mut Cursive, name: &str) {\n\n if name.is_empty() {\n\n // Try again as many times as we need!\n\n s.add_layer(Dialog::info(\"Please enter a name!\"));\n\n } else {\n\n let content = format!(\"Hello {}!\", name);\n\n // Remove the initial popup\n\n s.pop_layer();\n\n // And put a new one instead\n\n s.add_layer(\n\n Dialog::around(TextView::new(content))\n\n .button(\"Quit\", |s| s.quit()),\n\n );\n\n }\n\n}\n", "file_path": "examples/edit.rs", "rank": 34, "score": 121277.04436762 }, { "content": "/// Look at a chunk, and decide how it could fit.\n\nfn consider_chunk(available: usize, chunk: &Chunk) -> ChunkFitResult {\n\n if chunk.width <= available {\n\n // We fits. No question about it.\n\n if chunk.hard_stop {\n\n // Still, we have to stop here.\n\n // And possibly trim a newline.\n\n ChunkFitResult::FitsBarely\n\n } else {\n\n // Nothing special here.\n\n ChunkFitResult::Fits\n\n }\n\n } else if chunk.width == available + 1 {\n\n // We're just SLIGHTLY too big!\n\n // Can we just pop something?\n\n if chunk.ends_with_space {\n\n // Yay!\n\n ChunkFitResult::FitsBarely\n\n } else {\n\n // Noo(\n\n ChunkFitResult::DoesNotFit\n\n }\n\n } else {\n\n // Can't bargain with me.\n\n ChunkFitResult::DoesNotFit\n\n }\n\n}\n", "file_path": "src/utils/lines/spans/prefix.rs", "rank": 35, "score": 119703.54100055111 }, { "content": "fn find(siv: &mut Cursive, text: &str) {\n\n // First, remove the find popup\n\n siv.pop_layer();\n\n\n\n let res = siv.call_on_id(\"text\", |v: &mut TextArea| {\n\n // Find the given text from the text area content\n\n // Possible improvement: search after the current cursor.\n\n if let Some(i) = v.get_content().find(text) {\n\n // If we found it, move the cursor\n\n v.set_cursor(i);\n\n Ok(())\n\n } else {\n\n // Otherwise, return an error so we can show a warning.\n\n Err(())\n\n }\n\n });\n\n\n\n if let Some(Err(())) = res {\n\n // If we didn't find anything, tell the user!\n\n siv.add_layer(Dialog::info(format!(\"`{}` not found\", text)));\n\n }\n\n}\n", "file_path": "examples/text_area.rs", "rank": 36, "score": 119132.8091591152 }, { "content": "/// Performs `View::on_event` on a `scroll::Scroller`.\n\n///\n\n/// Example:\n\n///\n\n/// ```rust,ignore\n\n/// fn on_event(&mut self, event: Event) -> EventResult {\n\n/// scroll::on_event(self, event, Self::inner_on_event, Self::inner_important_area)\n\n/// }\n\n/// ```\n\npub fn on_event<T, OnEvent, ImportantArea>(\n\n scroller: &mut T, event: Event, on_event: OnEvent,\n\n important_area: ImportantArea,\n\n) -> EventResult\n\nwhere\n\n T: Scroller,\n\n OnEvent: FnMut(&mut T, Event) -> EventResult,\n\n ImportantArea: FnMut(&T, Vec2) -> Rect,\n\n{\n\n raw::on_event(\n\n event,\n\n scroller,\n\n Scroller::get_scroller_mut,\n\n on_event,\n\n important_area,\n\n )\n\n}\n\n\n", "file_path": "src/view/scroll/mod.rs", "rank": 37, "score": 118660.43670289627 }, { "content": "/// Performs `View::layout` on a `scroll::Scroller`.\n\npub fn layout<T, Layout, RequiredSize>(\n\n scroller: &mut T, size: Vec2, needs_relayout: bool, layout: Layout,\n\n required_size: RequiredSize,\n\n) where\n\n T: Scroller,\n\n Layout: FnMut(&mut T, Vec2),\n\n RequiredSize: FnMut(&mut T, Vec2) -> Vec2,\n\n{\n\n raw::layout(\n\n size,\n\n needs_relayout,\n\n scroller,\n\n Scroller::get_scroller_mut,\n\n required_size,\n\n layout,\n\n );\n\n}\n\n\n", "file_path": "src/view/scroll/mod.rs", "rank": 38, "score": 118642.49701489169 }, { "content": "pub fn draw<Model, GetScroller, Draw>(\n\n printer: &Printer, model: &Model, mut get_scroller: GetScroller,\n\n inner_draw: Draw,\n\n) where\n\n Model: ?Sized,\n\n GetScroller: FnMut(&Model) -> &scroll::Core,\n\n Draw: FnOnce(&Model, &Printer),\n\n{\n\n let printer = get_scroller(model).sub_printer(printer);\n\n inner_draw(model, &printer);\n\n}\n\n\n", "file_path": "src/view/scroll/raw.rs", "rank": 39, "score": 118642.49701489169 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum ModuleFilter {\n\n All,\n\n Module,\n\n}\n\n\n", "file_path": "src/views/debug_view.rs", "rank": 40, "score": 117334.96983700953 }, { "content": "/// Moves top layer by the specified amount\n\nfn move_top(c: &mut Cursive, x_in: isize, y_in: isize) {\n\n // Step 1. Get the current position of the layer.\n\n let s = c.screen_mut();\n\n let l = LayerPosition::FromFront(0);\n\n\n\n // Step 2. add the specifed amount\n\n let pos = s.offset().saturating_add((x_in, y_in));\n\n\n\n // convert the new x and y into a position\n\n let p = Position::absolute(pos);\n\n\n\n // Step 3. Apply the new position\n\n s.reposition_layer(l, p);\n\n}\n\n\n", "file_path": "examples/position.rs", "rank": 41, "score": 117165.44428879586 }, { "content": "// Let's put the callback in a separate function to keep it clean,\n\n// but it's not required.\n\nfn show_next_window(siv: &mut Cursive, city: &str) {\n\n siv.pop_layer();\n\n let text = format!(\"{} is a great city!\", city);\n\n siv.add_layer(\n\n Dialog::around(TextView::new(text)).button(\"Quit\", |s| s.quit()),\n\n );\n\n}\n", "file_path": "examples/select.rs", "rank": 42, "score": 117107.52844253797 }, { "content": "/// Integer division that rounds up.\n\npub fn div_up<T>(p: T, q: T) -> T\n\nwhere\n\n T: Num + Clone,\n\n{\n\n let d = p.clone() / q.clone();\n\n\n\n if p % q == T::zero() {\n\n d\n\n } else {\n\n T::one() + d\n\n }\n\n}\n", "file_path": "src/div.rs", "rank": 43, "score": 116409.70458468744 }, { "content": "pub fn required_size<Model, GetScroller, RequiredSize>(\n\n constraint: Vec2, needs_relayout: bool, model: &mut Model,\n\n mut get_scroller: GetScroller, mut required_size: RequiredSize,\n\n) -> Vec2\n\nwhere\n\n Model: ?Sized,\n\n GetScroller: FnMut(&mut Model) -> &mut scroll::Core,\n\n RequiredSize: FnMut(&mut Model, Vec2) -> Vec2,\n\n{\n\n let (_, size) = sizes(\n\n constraint,\n\n false,\n\n needs_relayout,\n\n model,\n\n &mut get_scroller,\n\n &mut required_size,\n\n );\n\n\n\n size\n\n}\n\n\n", "file_path": "src/view/scroll/raw.rs", "rank": 44, "score": 114893.75305384866 }, { "content": "/// Parses the given string as markdown text.\n\npub fn parse<S>(input: S) -> StyledString\n\nwhere\n\n S: Into<String>,\n\n{\n\n let input = input.into();\n\n\n\n let spans = parse_spans(&input);\n\n\n\n StyledString::with_spans(input, spans)\n\n}\n\n\n\n/// Iterator that parse a markdown text and outputs styled spans.\n\npub struct Parser<'a> {\n\n first: bool,\n\n stack: Vec<Style>,\n\n input: &'a str,\n\n parser: pulldown_cmark::Parser<'a>,\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n", "file_path": "src/utils/markup/markdown.rs", "rank": 45, "score": 114358.26788616757 }, { "content": "fn main() {\n\n // Initialize the cursive logger.\n\n cursive::logger::init();\n\n\n\n // Use some logging macros from the `log` crate.\n\n error!(\"Something serious probably happened!\");\n\n warn!(\"Or did it?\");\n\n debug!(\"Logger initialized.\");\n\n info!(\"Starting!\");\n\n\n\n let mut siv = cursive::Cursive::default();\n\n siv.add_layer(cursive::views::Dialog::text(\"Press ~ to open the console.\\nPress l to generate logs.\\nPress q to quit.\"));\n\n siv.add_global_callback('q', cursive::Cursive::quit);\n\n siv.add_global_callback('~', cursive::Cursive::toggle_debug_console);\n\n\n\n siv.add_global_callback('l', |_| trace!(\"Wooo\"));\n\n\n\n siv.run();\n\n}\n", "file_path": "examples/debug_console.rs", "rank": 46, "score": 113839.59278232366 }, { "content": "fn new_game(siv: &mut Cursive, options: game::Options) {\n\n let _board = game::Board::new(options);\n\n\n\n siv.add_layer(\n\n Dialog::new()\n\n .title(\"Minesweeper\")\n\n .content(\n\n LinearLayout::horizontal()\n\n .child(Panel::new(BoardView::new(options))),\n\n )\n\n .button(\"Quit game\", |s| {\n\n s.pop_layer();\n\n }),\n\n );\n\n\n\n siv.add_layer(Dialog::info(\n\n \"Controls:\n\nReveal cell: left click\n\nMark as mine: right-click\n\nReveal nearby unmarked cells: middle-click\",\n\n ));\n\n}\n", "file_path": "examples/mines/main.rs", "rank": 47, "score": 113017.65752788744 }, { "content": "/// Parse the given code into one or more event.\n\n///\n\n/// If the given event code should expend into multiple events\n\n/// (for instance click expends into PRESS + RELEASE),\n\n/// the returned Vec will include those queued events.\n\n///\n\n/// The main event is returned separately to avoid allocation in most cases.\n\nfn on_mouse_event<F>(bare_event: i32, mut f: F)\n\nwhere\n\n F: FnMut(MouseEvent),\n\n{\n\n let button = get_mouse_button(bare_event);\n\n match bare_event {\n\n ncurses::BUTTON1_RELEASED\n\n | ncurses::BUTTON2_RELEASED\n\n | ncurses::BUTTON3_RELEASED => f(MouseEvent::Release(button)),\n\n ncurses::BUTTON1_PRESSED\n\n | ncurses::BUTTON2_PRESSED\n\n | ncurses::BUTTON3_PRESSED => f(MouseEvent::Press(button)),\n\n ncurses::BUTTON4_PRESSED => f(MouseEvent::WheelUp),\n\n ncurses::BUTTON5_PRESSED => f(MouseEvent::WheelDown),\n\n // BUTTON4_RELEASED? BUTTON5_RELEASED?\n\n // Do they ever happen?\n\n _ => debug!(\"Unknown event: {:032b}\", bare_event),\n\n }\n\n}\n\n\n", "file_path": "src/backend/curses/n.rs", "rank": 48, "score": 113017.65752788744 }, { "content": "/// Result of a fitness test\n\n///\n\n/// Describes how well a chunk fits in the available space.\n\nenum ChunkFitResult {\n\n /// This chunk can fit as-is\n\n Fits,\n\n\n\n /// This chunk fits, but it'll be the last one.\n\n /// Additionally, its last char may need to be removed.\n\n FitsBarely,\n\n\n\n /// This chunk doesn't fit. Don't even.\n\n DoesNotFit,\n\n}\n\n\n", "file_path": "src/utils/lines/spans/prefix.rs", "rank": 49, "score": 112655.12097770226 }, { "content": "pub fn layout<Model, GetScroller, RequiredSize, Layout>(\n\n size: Vec2, needs_relayout: bool, model: &mut Model,\n\n mut get_scroller: GetScroller, mut required_size: RequiredSize,\n\n mut layout: Layout,\n\n) where\n\n Model: ?Sized,\n\n GetScroller: FnMut(&mut Model) -> &mut scroll::Core,\n\n RequiredSize: FnMut(&mut Model, Vec2) -> Vec2,\n\n Layout: FnMut(&mut Model, Vec2),\n\n{\n\n get_scroller(model).set_last_size(size);\n\n\n\n // This is what we'd like\n\n let (inner_size, self_size) = sizes(\n\n size,\n\n true,\n\n needs_relayout,\n\n model,\n\n &mut get_scroller,\n\n &mut required_size,\n\n );\n\n\n\n get_scroller(model).set_inner_size(inner_size);\n\n get_scroller(model).build_cache(self_size, size);\n\n\n\n layout(model, inner_size);\n\n\n\n get_scroller(model).update_offset();\n\n}\n\n\n", "file_path": "src/view/scroll/raw.rs", "rank": 50, "score": 112532.41472787847 }, { "content": "pub fn on_event<Model, GetScroller, OnEvent, ImportantArea>(\n\n event: Event, model: &mut Model, mut get_scroller: GetScroller,\n\n mut on_event: OnEvent, mut important_area: ImportantArea,\n\n) -> EventResult\n\nwhere\n\n Model: ?Sized,\n\n GetScroller: FnMut(&mut Model) -> &mut scroll::Core,\n\n OnEvent: FnMut(&mut Model, Event) -> EventResult,\n\n ImportantArea: FnMut(&Model, Vec2) -> Rect,\n\n{\n\n let mut relative_event = event.clone();\n\n let inside = get_scroller(model).is_event_inside(&mut relative_event);\n\n let result = if inside {\n\n on_event(model, relative_event)\n\n } else {\n\n EventResult::Ignored\n\n };\n\n let inner_size = get_scroller(model).inner_size();\n\n let important = important_area(model, inner_size);\n\n get_scroller(model).on_inner_event(event, result, important)\n\n}\n", "file_path": "src/view/scroll/raw.rs", "rank": 51, "score": 112532.41472787847 }, { "content": "/// Parse the given code into one or more event.\n\n///\n\n/// If the given event code should expend into multiple events\n\n/// (for instance click expends into PRESS + RELEASE),\n\n/// the returned Vec will include those queued events.\n\n///\n\n/// The main event is returned separately to avoid allocation in most cases.\n\nfn on_mouse_event<F>(bare_event: mmask_t, mut f: F)\n\nwhere\n\n F: FnMut(MouseEvent),\n\n{\n\n let button = get_mouse_button(bare_event);\n\n match bare_event {\n\n pancurses::BUTTON4_PRESSED => f(MouseEvent::WheelUp),\n\n pancurses::BUTTON5_PRESSED => f(MouseEvent::WheelDown),\n\n pancurses::BUTTON1_RELEASED\n\n | pancurses::BUTTON2_RELEASED\n\n | pancurses::BUTTON3_RELEASED\n\n | pancurses::BUTTON4_RELEASED\n\n | pancurses::BUTTON5_RELEASED => f(MouseEvent::Release(button)),\n\n pancurses::BUTTON1_PRESSED\n\n | pancurses::BUTTON2_PRESSED\n\n | pancurses::BUTTON3_PRESSED => f(MouseEvent::Press(button)),\n\n pancurses::BUTTON1_CLICKED\n\n | pancurses::BUTTON2_CLICKED\n\n | pancurses::BUTTON3_CLICKED\n\n | pancurses::BUTTON4_CLICKED\n", "file_path": "src/backend/curses/pan.rs", "rank": 52, "score": 111189.64234150623 }, { "content": "/// Parse the given markdown text into a list of spans.\n\n///\n\n/// This is a shortcut for `Parser::new(input).collect()`.\n\npub fn parse_spans(input: &str) -> Vec<StyledIndexedSpan> {\n\n Parser::new(input).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::utils::span::Span;\n\n\n\n #[test]\n\n fn test_parse() {\n\n let input = r\"\n", "file_path": "src/utils/markup/markdown.rs", "rank": 53, "score": 110796.43456573052 }, { "content": "fn add_fn<F>(start: i32, with_key: F, map: &mut HashMap<i32, Event>)\n\nwhere\n\n F: Fn(Key) -> Event,\n\n{\n\n for i in 0..12 {\n\n map.insert(start + i, with_key(Key::from_f((i + 1) as u8)));\n\n }\n\n}\n\n\n", "file_path": "src/backend/curses/n.rs", "rank": 54, "score": 109927.26227501413 }, { "content": "// Iterate over a toml\n\nfn iterate_toml<'a>(\n\n table: &'a toml::value::Table,\n\n) -> impl Iterator<Item = (&'a str, PaletteNode)> + 'a {\n\n table.iter().flat_map(|(key, value)| {\n\n let node = match value {\n\n toml::Value::Table(table) => {\n\n // This should define a new namespace\n\n // Treat basic colors as simple string.\n\n // We'll convert them back in the merge method.\n\n let map = iterate_toml(table)\n\n .map(|(key, value)| (key.to_string(), value))\n\n .collect();\n\n // Should we only return something if it's non-empty?\n\n Some(PaletteNode::Namespace(map))\n\n }\n\n toml::Value::Array(colors) => {\n\n // This should be a list of colors - just pick the first valid one.\n\n colors\n\n .iter()\n\n .flat_map(toml::Value::as_str)\n", "file_path": "src/theme/palette.rs", "rank": 55, "score": 109347.75230776737 }, { "content": "/// Returns length * value/max\n\n///\n\n/// Constraint: `value` from 0 to `max` should, as much as possible, produce equal-sized segments\n\n/// from 0 to length.\n\n///\n\n/// Returns a tuple with:\n\n/// * The integer part of the division\n\n/// * A value between 0 and 8 (exclusive) corresponding to the remainder.\n\nfn ratio(value: usize, max: usize, length: usize) -> (usize, usize) {\n\n let integer = length * value / max;\n\n let fraction = length * value - max * integer;\n\n\n\n let fraction = fraction * 8 / max;\n\n\n\n (integer, fraction)\n\n}\n\n\n\nnew_default!(ProgressBar);\n\n\n\nimpl ProgressBar {\n\n /// Creates a new progress bar.\n\n ///\n\n /// Default values:\n\n ///\n\n /// * `min`: 0\n\n /// * `max`: 100\n\n /// * `value`: 0\n\n pub fn new() -> Self {\n", "file_path": "src/views/progress_bar.rs", "rank": 56, "score": 109200.47717355115 }, { "content": "fn record_above_set_filter(\n\n record_level: log::Level,\n\n display_filter: log::LevelFilter,\n\n) -> bool {\n\n // If no display filter set (ie, the user has not applied a log filter yet), display all logs\n\n match display_filter.to_level() {\n\n Some(display_level) => (record_level <= display_level),\n\n None => true\n\n }\n\n}\n\n\n", "file_path": "src/views/debug_view.rs", "rank": 57, "score": 106171.55587266857 }, { "content": "#[allow(dead_code)]\n\nfn make_mask(n: usize) -> u8 {\n\n let mut r = 0_u8;\n\n for i in 0..n {\n\n r |= 1 << i;\n\n }\n\n r\n\n}\n", "file_path": "src/utf8.rs", "rank": 58, "score": 104040.8048552974 }, { "content": "fn make_percentage(value: usize, (min, max): (usize, usize)) -> String {\n\n if value < min {\n\n return String::from(\"0 %\");\n\n }\n\n\n\n let (percentage, extra) = ratio(value - min, max - min, 100);\n\n let percentage = if extra > 4 {\n\n percentage + 1\n\n } else {\n\n percentage\n\n };\n\n format!(\"{} %\", percentage)\n\n}\n\n\n", "file_path": "src/views/progress_bar.rs", "rank": 59, "score": 103812.02304332786 }, { "content": "fn fill_key_codes<F>(target: &mut HashMap<i32, Event>, f: F)\n\nwhere\n\n F: Fn(i32) -> Option<String>,\n\n{\n\n let key_names = hashmap! {\n\n \"DC\" => Key::Del,\n\n \"DN\" => Key::Down,\n\n \"END\" => Key::End,\n\n \"HOM\" => Key::Home,\n\n \"IC\" => Key::Ins,\n\n \"LFT\" => Key::Left,\n\n \"NXT\" => Key::PageDown,\n\n \"PRV\" => Key::PageUp,\n\n \"RIT\" => Key::Right,\n\n \"UP\" => Key::Up,\n\n };\n\n\n\n for code in 512..1024 {\n\n let name = match f(code) {\n\n Some(name) => name,\n", "file_path": "src/backend/curses/mod.rs", "rank": 60, "score": 102495.55648207958 }, { "content": "/// Performs `View::draw` on a `scroll::Scroller`.\n\npub fn draw<T, Draw>(scroller: &T, printer: &Printer, draw: Draw)\n\nwhere\n\n T: Scroller,\n\n Draw: FnOnce(&T, &Printer),\n\n{\n\n raw::draw(printer, scroller, Scroller::get_scroller, draw);\n\n}\n\n\n", "file_path": "src/view/scroll/mod.rs", "rank": 61, "score": 101771.21383749772 }, { "content": "// Define this type separately to appease the Clippy god\n\ntype CallOnAny<T> = Box<dyn for<'a> FnMut(&mut T, &Selector, AnyCb<'a>)>;\n\n\n\n/// A blank view that forwards calls to closures.\n\n///\n\n/// You can use this view to easily draw your own interface.\n\npub struct Canvas<T> {\n\n state: T,\n\n\n\n draw: Box<dyn Fn(&T, &Printer)>,\n\n on_event: Box<dyn FnMut(&mut T, Event) -> EventResult>,\n\n required_size: Box<dyn FnMut(&mut T, Vec2) -> Vec2>,\n\n layout: Box<dyn FnMut(&mut T, Vec2)>,\n\n take_focus: Box<dyn FnMut(&mut T, Direction) -> bool>,\n\n needs_relayout: Box<dyn Fn(&T) -> bool>,\n\n focus_view: Box<dyn FnMut(&mut T, &Selector) -> Result<(), ()>>,\n\n call_on_any: CallOnAny<T>,\n\n important_area: Box<dyn Fn(&T, Vec2) -> Rect>,\n\n}\n\n\n\nimpl<T: 'static + View> Canvas<T> {\n", "file_path": "src/views/canvas.rs", "rank": 62, "score": 99193.05458731823 }, { "content": "// Function to simulate a long process.\n\nfn fake_load(n_max: usize, counter: &Counter) {\n\n for _ in 0..n_max {\n\n thread::sleep(Duration::from_millis(5));\n\n // The `counter.tick()` method increases the progress value\n\n counter.tick(1);\n\n }\n\n}\n", "file_path": "examples/progress.rs", "rank": 63, "score": 98199.00906276808 }, { "content": "fn header(level: usize) -> &'static str {\n\n &\"##########\"[..level]\n\n}\n\n\n\nimpl<'a> Iterator for Parser<'a> {\n\n type Item = StyledIndexedSpan;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n let next = match self.parser.next() {\n\n None => return None,\n\n Some(event) => event,\n\n };\n\n\n\n match next {\n\n Event::Start(tag) => match tag {\n\n // Add to the stack!\n\n Tag::Emphasis => {\n\n self.stack.push(Style::from(Effect::Italic))\n\n }\n", "file_path": "src/utils/markup/markdown.rs", "rank": 64, "score": 96054.05642943982 }, { "content": "fn sub_block(extra: usize) -> &'static str {\n\n match extra {\n\n 0 => \" \",\n\n 1 => \"▏\",\n\n 2 => \"▎\",\n\n 3 => \"▍\",\n\n 4 => \"▌\",\n\n 5 => \"▋\",\n\n 6 => \"▊\",\n\n 7 => \"▉\",\n\n _ => \"█\",\n\n }\n\n}\n\n\n\nimpl View for ProgressBar {\n\n fn draw(&self, printer: &Printer<'_, '_>) {\n\n // Now, the bar itself...\n\n let available = printer.size.x;\n\n\n\n let value = self.value.get();\n", "file_path": "src/views/progress_bar.rs", "rank": 65, "score": 94023.1004502685 }, { "content": "/// Internal function to aid the creation of the DebugViewFilter.\n\n/// Returns a SelectView to update the minimum severity of new logs saved in the circular buffer\n\n/// Wrapped by a Panel and BoxView for appearance\n\nfn debug_set_log_level() -> views::Panel<views::BoxView<views::ListView>> {\n\n views::Panel::new(views::BoxView::with_full_width(views::ListView::new().child(\n\n \"Set Max Log Level\",\n\n views::SelectView::new()\n\n .popup()\n\n .item(\"Debug\", log::LevelFilter::Debug)\n\n .item(\"Info\", log::LevelFilter::Info)\n\n .item(\"Warn\", log::LevelFilter::Warn)\n\n .item(\"Error\", log::LevelFilter::Error)\n\n .on_submit({\n\n move |_s, new_log_level| {\n\n log::set_max_level(*new_log_level);\n\n }\n\n }),\n\n )))\n\n}\n\n\n", "file_path": "src/views/debug_view.rs", "rank": 66, "score": 94012.01858998794 }, { "content": "/// Returns a `&str` with `length` characters `*`.\n\n///\n\n/// Only works for small `length` (1 or 2).\n\n/// Best used for single character replacement.\n\nfn make_small_stars(length: usize) -> &'static str {\n\n // TODO: be able to use any character as hidden mode?\n\n &\"****\"[..length]\n\n}\n\n\n\nimpl View for EditView {\n\n fn draw(&self, printer: &Printer<'_, '_>) {\n\n assert_eq!(\n\n printer.size.x, self.last_length,\n\n \"Was promised {}, received {}\",\n\n self.last_length, printer.size.x\n\n );\n\n\n\n let width = self.content.width();\n\n printer.with_color(self.style, |printer| {\n\n let effect = if self.enabled && printer.enabled {\n\n Effect::Reverse\n\n } else {\n\n Effect::Simple\n\n };\n", "file_path": "src/views/edit_view.rs", "rank": 67, "score": 92101.86719072246 }, { "content": "/// Internal function to aid the creation of the DebugViewFilter.\n\n/// Returns a SelectView to modify the minimum log severity displayed.\n\n/// Wrapped by a Panel and BoxView for appearance\n\nfn debug_set_log_filter(debug_view_id: &'static str) -> views::Panel<views::BoxView<views::ListView>> {\n\n views::Panel::new(views::BoxView::with_full_width(views::ListView::new().child(\n\n \"Filter Log Levels\",\n\n views::SelectView::new()\n\n .popup()\n\n .item(\"Debug\", log::LevelFilter::Debug)\n\n .item(\"Info\", log::LevelFilter::Info)\n\n .item(\"Warn\", log::LevelFilter::Warn)\n\n .item(\"Error\", log::LevelFilter::Error)\n\n .on_submit({\n\n move |s, new_filter| {\n\n s.call_on_id(&debug_view_id, {\n\n move |debug_view: &mut views::DebugView| {\n\n debug_view.set_filter(new_filter.clone());\n\n }\n\n });\n\n }\n\n }),\n\n )))\n\n}\n\n\n", "file_path": "src/views/debug_view.rs", "rank": 68, "score": 91926.53671765618 }, { "content": "/// Internal function to aid the creation of the DebugViewFilter.\n\n/// Returns a SelectView to modify whether all logs, or only logs relating to a custom module, are\n\n/// displayed. Wrapped by a Panel and BoxView for appearance\n\nfn debug_set_mod_filter(debug_view_id: &'static str) -> views::Panel<views::BoxView<views::ListView>> {\n\n let mut filter_module_select_view = views::SelectView::new()\n\n .popup()\n\n .item(\"All\", ModuleFilter::All)\n\n .on_submit({\n\n move |s, mod_filter| {\n\n s.call_on_id(&debug_view_id, {\n\n move |debug_view: &mut views::DebugView| {\n\n debug_view.set_module(mod_filter.clone());\n\n }\n\n });\n\n }\n\n });\n\n\n\n // If the logger has been initialised to monitor a custom module, add to the SelectView\n\n let module = logger::MODULE.lock().unwrap();\n\n if let Some(ref module_name) = *module {\n\n filter_module_select_view.add_item(module_name.to_string(), ModuleFilter::Module)\n\n };\n\n\n", "file_path": "src/views/debug_view.rs", "rank": 69, "score": 91926.42070829423 }, { "content": "fn make_rows(text: &str, width: usize) -> Vec<Row> {\n\n // We can't make rows with width=0, so force at least width=1.\n\n let width = usize::max(width, 1);\n\n LinesIterator::new(text, width).show_spaces().collect()\n\n}\n\n\n\nnew_default!(TextArea);\n\n\n\nimpl TextArea {\n\n /// Creates a new, empty TextArea.\n\n pub fn new() -> Self {\n\n TextArea {\n\n content: String::new(),\n\n rows: Vec::new(),\n\n enabled: true,\n\n scrollbase: ScrollBase::new().right_padding(0),\n\n size_cache: None,\n\n last_size: Vec2::zero(),\n\n cursor: 0,\n\n }\n", "file_path": "src/views/text_area.rs", "rank": 70, "score": 88284.78983931888 }, { "content": "// Gradient for the front color\n\nfn front_color(x: u8, y: u8, x_max: u8, y_max: u8) -> Color {\n\n // We return a full 24-bits RGB color, but some backends\n\n // will project it to a 256-colors palette.\n\n Color::Rgb(\n\n x * (255 / x_max),\n\n y * (255 / y_max),\n\n (x + 2 * y) * (255 / (x_max + 2 * y_max)),\n\n )\n\n}\n\n\n", "file_path": "examples/colors.rs", "rank": 71, "score": 87857.10449221743 }, { "content": "// Gradient for the background color\n\nfn back_color(x: u8, y: u8, x_max: u8, y_max: u8) -> Color {\n\n // Let's try to have a gradient in a different direction than the front color.\n\n Color::Rgb(\n\n 128 + (2 * y_max + x - 2 * y) * (128 / (x_max + 2 * y_max)),\n\n 255 - y * (255 / y_max),\n\n 255 - x * (255 / x_max),\n\n )\n\n}\n", "file_path": "examples/colors.rs", "rank": 72, "score": 87851.85411513755 }, { "content": "#[derive(Clone, Copy, PartialEq)]\n\nenum Cell {\n\n Visible(usize),\n\n Flag,\n\n Unknown,\n\n}\n\n\n", "file_path": "examples/mines/main.rs", "rank": 73, "score": 78087.49801784399 }, { "content": "#[derive(PartialEq, Debug)]\n\nenum State {\n\n /// The menubar is inactive.\n\n Inactive,\n\n /// The menubar is actively selected.\n\n ///\n\n /// It will receive input.\n\n Selected,\n\n /// The menubar is still visible, but a submenu is open.\n\n ///\n\n /// It will not receive input.\n\n Submenu,\n\n}\n\n\n\n/// Shows a single-line list of items, with pop-up menus when one is selected.\n\n///\n\n/// The [`Cursive`] root already includes a menubar\n\n/// that you just need to configure.\n\n///\n\n/// [`Cursive`]: ../struct.Cursive.html#method.menubar\n\npub struct Menubar {\n", "file_path": "src/views/menubar.rs", "rank": 74, "score": 78082.40367827428 }, { "content": "enum ColorRole {\n\n Foreground,\n\n Background,\n\n}\n\n\n\n/// Backend using BearLibTerminal\n\npub struct Backend {\n\n buttons_pressed: HashSet<MouseButton>,\n\n mouse_position: Vec2,\n\n}\n\n\n\nimpl Backend {\n\n /// Creates a new BearLibTerminal-based backend.\n\n pub fn init() -> Box<dyn backend::Backend> {\n\n // TODO: Add some error handling?\n\n terminal::open(\"Cursive\", 80, 24);\n\n terminal::set(terminal::config::Window::empty().resizeable(true));\n\n terminal::set(vec![\n\n terminal::config::InputFilter::Group {\n\n group: terminal::config::InputFilterGroup::Keyboard,\n", "file_path": "src/backend/blt.rs", "rank": 75, "score": 76711.18853508454 }, { "content": "/// Where should the view be on the screen (per dimension).\n\nenum Placement {\n\n /// View is floating at a specific position.\n\n Floating(Position),\n\n\n\n /// View is full-screen; it should not have a 1-cell border.\n\n Fullscreen,\n\n}\n\n\n\n/// Identifies a layer in a `StackView`.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum LayerPosition {\n\n /// Starts from the back (bottom) of the stack.\n\n FromBack(usize),\n\n /// Starts from the front (top) of the stack.\n\n FromFront(usize),\n\n}\n\n\n\nimpl Placement {\n\n pub fn compute_offset<S, A, P>(\n\n &self, size: S, available: A, parent: P,\n", "file_path": "src/views/stack_view.rs", "rank": 76, "score": 76711.18853508454 }, { "content": "#[derive(PartialEq, Clone)]\n\nenum TriggerPhase {\n\n BeforeChild,\n\n AfterChild,\n\n}\n\n\n\nimpl<T: View> OnEventView<T> {\n\n /// Wraps the given view in a new OnEventView.\n\n pub fn new(view: T) -> Self {\n\n OnEventView {\n\n view,\n\n callbacks: Vec::new(),\n\n }\n\n }\n\n\n\n /// Registers a callback when the given event is ignored by the child.\n\n ///\n\n /// Chainable variant.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/views/on_event_view.rs", "rank": 77, "score": 75431.52034880916 }, { "content": "/// A view that can be downcasted to its concrete type.\n\n///\n\n/// This trait is automatically implemented for any `T: View`.\n\npub trait AnyView {\n\n /// Downcast self to a `Any`.\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n /// Downcast self to a mutable `Any`.\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n\n\n /// Returns a boxed any from a boxed self.\n\n ///\n\n /// Can be used before `Box::downcast()`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// # use cursive::views::TextView;\n\n /// # use cursive::view::View;\n\n /// # fn main() {\n\n /// let boxed: Box<View> = Box::new(TextView::new(\"text\"));\n\n /// let text: Box<TextView> = boxed.as_boxed_any().downcast().unwrap();\n\n /// # }\n", "file_path": "src/view/any.rs", "rank": 78, "score": 74154.06471269269 }, { "content": "/// Generic trait to enable chainable API\n\npub trait With: Sized {\n\n /// Calls the given closure on `self`.\n\n fn with<F: FnOnce(&mut Self)>(mut self, f: F) -> Self {\n\n f(&mut self);\n\n self\n\n }\n\n\n\n /// Calls the given closure on `self`.\n\n fn try_with<E, F>(mut self, f: F) -> Result<Self, E>\n\n where\n\n F: FnOnce(&mut Self) -> Result<(), E>,\n\n {\n\n f(&mut self)?;\n\n Ok(self)\n\n }\n\n\n\n /// Calls the given closure if `condition == true`.\n\n fn with_if<F>(mut self, condition: bool, f: F) -> Self\n\n where\n\n F: FnOnce(&mut Self),\n\n {\n\n if condition {\n\n f(&mut self);\n\n }\n\n self\n\n }\n\n}\n\n\n\nimpl<T: Sized> With for T {}\n", "file_path": "src/with.rs", "rank": 79, "score": 73718.90374197383 }, { "content": "/// Provides `call_on<V: View>` to views.\n\n///\n\n/// This trait is mostly a wrapper around [`View::call_on_any`].\n\n///\n\n/// It provides a nicer interface to find a view when you know its type.\n\n///\n\n/// [`View::call_on_any`]: ./trait.View.html#method.call_on_any\n\npub trait Finder {\n\n /// Runs a callback on the view identified by `sel`.\n\n ///\n\n /// If the view is found, return the result of `callback`.\n\n ///\n\n /// If the view is not found, or if it is not of the asked type,\n\n /// it returns `None`.\n\n fn call_on<V, F, R>(\n\n &mut self, sel: &Selector<'_>, callback: F,\n\n ) -> Option<R>\n\n where\n\n V: View + Any,\n\n F: FnOnce(&mut V) -> R;\n\n\n\n /// Convenient method to use `call_on` with a `view::Selector::Id`.\n\n fn call_on_id<V, F, R>(&mut self, id: &str, callback: F) -> Option<R>\n\n where\n\n V: View + Any,\n\n F: FnOnce(&mut V) -> R,\n\n {\n", "file_path": "src/view/finder.rs", "rank": 80, "score": 72797.57483840935 }, { "content": "/// Trait defining the required methods to be a backend.\n\n///\n\n/// A backend is the interface between the abstract view tree and the actual\n\n/// input/output, like a terminal.\n\n///\n\n/// It usually delegates the work to a terminal-handling library like ncurses\n\n/// or termion, or it can entirely simulate a terminal and show it as a\n\n/// graphical window (`BearLibTerminal`).\n\n///\n\n/// When creating a new cursive tree with `Cursive::new()`, you will need to\n\n/// provide a backend initializer - usually their `init()` function.\n\n///\n\n/// Backends are responsible for handling input and converting it to `Event`. Input must be\n\n/// non-blocking, it will be polled regularly.\n\npub trait Backend {\n\n /// Polls the backend for any input.\n\n ///\n\n /// Should return immediately.\n\n fn poll_event(&mut self) -> Option<Event>;\n\n\n\n // TODO: take `self` by value?\n\n // Or implement Drop?\n\n /// Prepares to close the backend.\n\n ///\n\n /// This should clear any state in the terminal.\n\n fn finish(&mut self);\n\n\n\n /// Refresh the screen.\n\n ///\n\n /// This will be called each frame after drawing has been done.\n\n ///\n\n /// A backend could, for example, buffer any print command, and apply\n\n /// everything when refresh() is called.\n\n fn refresh(&mut self);\n", "file_path": "src/backend/mod.rs", "rank": 81, "score": 72793.32576726643 }, { "content": "/// Describes an item with a scroll core.\n\n///\n\n/// This trait is used to represent \"something that can scroll\".\n\n/// All it needs is an accessible core.\n\n///\n\n/// See the various methods in the [`scroll`](crate::view::scroll) module.\n\npub trait Scroller {\n\n /// Returns a mutable access to the scroll core.\n\n fn get_scroller_mut(&mut self) -> &mut Core;\n\n\n\n /// Returns an immutable access to the scroll core.\n\n fn get_scroller(&self) -> &Core;\n\n}\n\n\n\n/// Core system for scrolling views.\n\n///\n\n/// See also [`ScrollView`](crate::views::ScrollView).\n\n#[derive(Debug)]\n\npub struct Core {\n\n /// This is the size the child thinks we're giving him.\n\n inner_size: Vec2,\n\n\n\n /// Offset into the inner view.\n\n ///\n\n /// Our `(0,0)` will be inner's `offset`\n\n offset: Vec2,\n", "file_path": "src/view/scroll/core.rs", "rank": 82, "score": 71517.71765514123 }, { "content": "/// Generic trait for elements that can be converted into a `ViewPath`.\n\npub trait ToPath {\n\n /// Creates a path from the element.\n\n fn to_path(self) -> ViewPath;\n\n}\n\n\n\nimpl<'a> ToPath for &'a [usize] {\n\n fn to_path(self) -> ViewPath {\n\n ViewPath {\n\n path: self.to_owned(),\n\n }\n\n }\n\n}\n", "file_path": "src/view/view_path.rs", "rank": 83, "score": 71513.50647669955 }, { "content": "/// Describes an object that appears like a `SpannedStr`.\n\npub trait SpannedText {\n\n /// Type of span returned by `SpannedText::spans()`.\n\n type S: AsRef<IndexedCow>;\n\n\n\n /// Returns the source text.\n\n fn source(&self) -> &str;\n\n\n\n /// Returns the spans for this text.\n\n fn spans(&self) -> &[Self::S];\n\n\n\n /// Returns a `SpannedText` by reference.\n\n fn as_ref(&self) -> SpannedTextRef<'_, Self> {\n\n SpannedTextRef { r: self }\n\n }\n\n}\n\n\n\n/// A reference to another `SpannedText`.\n\npub struct SpannedTextRef<'a, C>\n\nwhere\n\n C: SpannedText + ?Sized,\n", "file_path": "src/utils/span.rs", "rank": 84, "score": 71513.50647669955 }, { "content": "fn main() {\n\n // Start as usual\n\n let mut siv = Cursive::default();\n\n siv.add_global_callback('q', |s| s.quit());\n\n\n\n // Canvas lets us easily override any method.\n\n // Canvas can have states, but we don't need any here, so we use `()`.\n\n siv.add_layer(Canvas::new(()).with_draw(draw).fixed_size((20, 10)));\n\n\n\n siv.run();\n\n}\n\n\n", "file_path": "examples/colors.rs", "rank": 85, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n // Create a dialog with an edit text and a button.\n\n // The user can either hit the <Ok> button,\n\n // or press Enter on the edit text.\n\n siv.add_layer(\n\n Dialog::new()\n\n .title(\"Enter your name\")\n\n // Padding is (left, right, top, bottom)\n\n .padding((1, 1, 1, 0))\n\n .content(\n\n EditView::new()\n\n // Call `show_popup` when the user presses `Enter`\n\n .on_submit(show_popup)\n\n // Give the `EditView` a name so we can refer to it later.\n\n .with_id(\"name\")\n\n // Wrap this in a `BoxView` with a fixed width.\n\n // Do this _after_ `with_id` or the name will point to the\n\n // `BoxView` instead of `EditView`!\n", "file_path": "examples/edit.rs", "rank": 86, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n // Some description text. We want it to be long, but not _too_ long.\n\n let text = \"This is a very simple example of linear layout. Two views \\\n\n are present, a short title above, and this text. The text \\\n\n has a fixed width, and the title is centered horizontally.\";\n\n\n\n // We'll create a dialog with a TextView serving as a title\n\n siv.add_layer(\n\n Dialog::around(\n\n LinearLayout::vertical()\n\n .child(TextView::new(\"Title\").h_align(HAlign::Center))\n\n // Use a DummyView as spacer\n\n .child(DummyView.fixed_height(1))\n\n // Disabling scrollable means the view cannot shrink.\n\n .child(TextView::new(text))\n\n // The other views will share the remaining space.\n\n .child(TextView::new(text).scrollable())\n\n .child(TextView::new(text).scrollable())\n\n .child(TextView::new(text).scrollable())\n\n .fixed_width(30),\n\n )\n\n .button(\"Quit\", |s| s.quit())\n\n .h_align(HAlign::Center),\n\n );\n\n\n\n siv.run();\n\n}\n", "file_path": "examples/linear.rs", "rank": 87, "score": 70727.56173792123 }, { "content": "fn main() {\n\n // Creates the cursive root - required for every application.\n\n let mut siv = Cursive::default();\n\n\n\n // Creates a dialog with a single \"Quit\" button\n\n siv.add_layer(\n\n // Most views can be configured in a chainable way\n\n CircularFocus::wrap_tab(\n\n Dialog::around(TextView::new(\"Hello Dialog!\"))\n\n .title(\"Cursive\")\n\n .button(\"Foo\", |_s| ())\n\n .button(\"Quit\", |s| s.quit()),\n\n ),\n\n );\n\n\n\n // Starts the event loop.\n\n siv.run();\n\n}\n", "file_path": "examples/dialog.rs", "rank": 88, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n // We can quit by pressing `q`\n\n siv.add_global_callback('q', Cursive::quit);\n\n // Next Gen FPS Controls.\n\n siv.add_global_callback('w', |s| move_top(s, 0, -1));\n\n siv.add_global_callback('a', |s| move_top(s, -1, 0));\n\n siv.add_global_callback('s', |s| move_top(s, 0, 1));\n\n siv.add_global_callback('d', |s| move_top(s, 1, 0));\n\n\n\n // Add window to fly around.\n\n siv.add_layer(TextView::new(\n\n \"Press w,a,s,d to move the window.\\n\\\n\n Press q to quit the application.\",\n\n ));\n\n\n\n // Run the event loop\n\n siv.run();\n\n}\n", "file_path": "examples/position.rs", "rank": 89, "score": 70727.56173792123 }, { "content": "fn main() {\n\n // Read some long text from a file.\n\n let content = include_str!(\"../assets/lorem.txt\");\n\n\n\n let mut siv = Cursive::default();\n\n\n\n // We can quit by pressing q\n\n siv.add_global_callback('q', |s| s.quit());\n\n\n\n // The text is too long to fit on a line, so the view will wrap lines,\n\n // and will adapt to the terminal size.\n\n siv.add_fullscreen_layer(\n\n Dialog::around(Panel::new(TextView::new(content).scrollable()))\n\n .title(\"Unicode and wide-character support\")\n\n // This is the alignment for the button\n\n .h_align(HAlign::Center)\n\n .button(\"Quit\", |s| s.quit()),\n\n );\n\n // Show a popup on top of the view.\n\n siv.add_layer(Dialog::info(\n\n \"Try resizing the terminal!\\n(Press 'q' to \\\n\n quit when you're done.)\",\n\n ));\n\n\n\n siv.run();\n\n}\n", "file_path": "examples/lorem.rs", "rank": 90, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n siv.add_global_callback('q', |s| s.quit());\n\n\n\n // Let's add a simple slider in a dialog.\n\n // Moving the slider will update the dialog's title.\n\n // And pressing \"Enter\" will show a new dialog.\n\n siv.add_layer(\n\n Dialog::around(\n\n // We give the number of steps in the constructor\n\n SliderView::horizontal(15)\n\n // Sets the initial value\n\n .value(7)\n\n .on_change(|s, v| {\n\n let title = format!(\"[ {} ]\", v);\n\n s.call_on_id(\"dialog\", |view: &mut Dialog| {\n\n view.set_title(title)\n\n });\n\n })\n", "file_path": "examples/slider.rs", "rank": 91, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n // We'll start slowly with a simple start button...\n\n siv.add_layer(\n\n Dialog::new()\n\n .title(\"Progress bar example\")\n\n .padding((0, 0, 1, 1))\n\n .content(Button::new(\"Start\", phase_1)),\n\n );\n\n\n\n siv.run();\n\n}\n\n\n", "file_path": "examples/progress.rs", "rank": 92, "score": 70727.56173792123 }, { "content": "fn main() {\n\n // As usual, create the Cursive root\n\n let mut siv = Cursive::default();\n\n\n\n let cb_sink = siv.cb_sink().clone();\n\n\n\n // We want to refresh the page even when no input is given.\n\n siv.add_global_callback('q', |s| s.quit());\n\n\n\n // A channel will communicate data from our running task to the UI.\n\n let (tx, rx) = mpsc::channel();\n\n\n\n // Generate data in a separate thread.\n\n thread::spawn(move || {\n\n generate_logs(&tx, cb_sink);\n\n });\n\n\n\n // And sets the view to read from the other end of the channel.\n\n siv.add_layer(BufferView::new(200, rx).full_screen());\n\n\n\n siv.run();\n\n}\n\n\n", "file_path": "examples/logs.rs", "rank": 93, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n let content = \"Press Q to quit the application.\\n\\nPress P to open the \\\n\n popup.\";\n\n\n\n siv.add_global_callback('q', |s| s.quit());\n\n\n\n // Let's wrap the view to give it a recognizable ID, so we can look for it.\n\n // We add the P callback on the textview only (and not globally),\n\n // so that we can't call it when the popup is already visible.\n\n siv.add_layer(\n\n OnEventView::new(TextView::new(content).with_id(\"text\"))\n\n .on_event('p', |s| show_popup(s)),\n\n );\n\n\n\n siv.run();\n\n}\n\n\n", "file_path": "examples/mutation.rs", "rank": 94, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = cursive::Cursive::default();\n\n\n\n siv.add_layer(\n\n Dialog::around(\n\n LinearLayout::vertical()\n\n .child(Button::new(\"Foo\", |s| s.add_layer(Dialog::info(\"Ah\"))))\n\n .child(Canvas::new(()).with_draw(draw).fixed_size((120, 40)))\n\n .child(Button::new(\"Bar\", |s| s.add_layer(Dialog::info(\"Uh\"))))\n\n .scrollable()\n\n .scroll_x(true),\n\n )\n\n .fixed_size((60, 30)),\n\n );\n\n\n\n siv.add_global_callback('q', |s| s.quit());\n\n\n\n siv.run();\n\n}\n\n\n", "file_path": "examples/scroll.rs", "rank": 95, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n // We'll use a counter to name new files.\n\n let counter = AtomicUsize::new(1);\n\n\n\n // The menubar is a list of (label, menu tree) pairs.\n\n siv.menubar()\n\n // We add a new \"File\" tree\n\n .add_subtree(\n\n \"File\",\n\n MenuTree::new()\n\n // Trees are made of leaves, with are directly actionable...\n\n .leaf(\"New\", move |s| {\n\n // Here we use the counter to add an entry\n\n // in the list of \"Recent\" items.\n\n let i = counter.fetch_add(1, Ordering::Relaxed);\n\n let filename = format!(\"New {}\", i);\n\n s.menubar()\n\n .find_subtree(\"File\")\n", "file_path": "examples/menubar.rs", "rank": 96, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n let mut styled = StyledString::plain(\"Isn't \");\n\n styled.append(StyledString::styled(\"that \", Color::Dark(BaseColor::Red)));\n\n styled.append(StyledString::styled(\n\n \"cool?\",\n\n Style::from(Color::Light(BaseColor::Blue)).combine(Effect::Bold),\n\n ));\n\n\n\n // TextView can natively accept StyledString.\n\n siv.add_layer(\n\n Dialog::around(TextView::new(styled))\n\n .button(\"Hell yeah!\", |s| s.quit()),\n\n );\n\n\n\n siv.run();\n\n}\n", "file_path": "examples/markup.rs", "rank": 97, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut siv = Cursive::default();\n\n\n\n // We need to pre-create the groups for our RadioButtons.\n\n let mut color_group: RadioGroup<String> = RadioGroup::new();\n\n let mut size_group: RadioGroup<u32> = RadioGroup::new();\n\n\n\n siv.add_layer(\n\n Dialog::new()\n\n .title(\"Make your selection\")\n\n // We'll have two columns side-by-side\n\n .content(\n\n LinearLayout::horizontal()\n\n .child(\n\n LinearLayout::vertical()\n\n // The color group uses the label itself as stored value\n\n // By default, the first item is selected.\n\n .child(color_group.button_str(\"Red\"))\n\n .child(color_group.button_str(\"Green\"))\n\n .child(color_group.button_str(\"Blue\")),\n", "file_path": "examples/radio.rs", "rank": 98, "score": 70727.56173792123 }, { "content": "fn main() {\n\n let mut select = SelectView::new()\n\n // Center the text horizontally\n\n .h_align(HAlign::Center)\n\n // Use keyboard to jump to the pressed letters\n\n .autojump();\n\n\n\n // Read the list of cities from separate file, and fill the view with it.\n\n // (We include the file at compile-time to avoid runtime read errors.)\n\n let content = include_str!(\"../assets/cities.txt\");\n\n select.add_all_str(content.lines());\n\n\n\n // Sets the callback for when \"Enter\" is pressed.\n\n select.set_on_submit(show_next_window);\n\n\n\n // Let's override the `j` and `k` keys for navigation\n\n let select = OnEventView::new(select)\n\n .on_pre_event_inner('k', |s, _| {\n\n s.select_up(1);\n\n Some(EventResult::Consumed(None))\n", "file_path": "examples/select.rs", "rank": 99, "score": 70727.56173792123 } ]
Rust
src/gui/layout.rs
lukexor/pix-engine
447b794d57fcb4e6c5631254857d210c430f0181
use crate::{ops::clamp_size, prelude::*}; impl PixState { #[inline] pub fn same_line<O>(&mut self, offset: O) where O: Into<Option<[i32; 2]>>, { let [x, y] = self.ui.pcursor().as_array(); let offset = offset.into().unwrap_or([0; 2]); let item_pad = self.theme.spacing.item_pad; self.ui .set_cursor([x + item_pad.x() + offset[0], y + offset[1]]); self.ui.line_height = self.ui.pline_height - offset[1]; } #[inline] pub fn next_width(&mut self, width: u32) { self.ui.next_width = Some(clamp_size(width)); } pub fn tab_bar<S, I, F>(&mut self, label: S, tabs: &[I], f: F) -> PixResult<()> where S: AsRef<str>, I: AsRef<str>, F: FnOnce(usize, &mut PixState) -> PixResult<()>, { let label = label.as_ref(); let s = self; let tab_id = s.ui.get_id(&label); let colors = s.theme.colors; let fpad = s.theme.spacing.frame_pad; let ipad = s.theme.spacing.item_pad; for (i, tab_label) in tabs.iter().enumerate() { if i > 0 { s.same_line(None); } let tab_label = tab_label.as_ref(); let id = s.ui.get_id(&tab_label); let tab_label = s.ui.get_label(tab_label); let pos = s.cursor_pos(); let colors = s.theme.colors; let (width, height) = s.text_size(tab_label)?; let tab = rect![pos + fpad.x(), width, height].offset_size(2 * ipad); let hovered = s.ui.try_hover(id, &tab); let focused = s.ui.try_focus(id); let disabled = s.ui.disabled; let active = s.ui.is_active(id); s.push(); s.ui.push_cursor(); s.rect_mode(RectMode::Corner); let clip = tab.offset_size([1, 0]); s.clip(clip)?; if hovered { s.frame_cursor(&Cursor::hand())?; } let [stroke, fg, bg] = s.widget_colors(id, ColorType::SecondaryVariant); if active || focused { s.stroke(stroke); } else { s.no_stroke(); } if hovered { s.fill(fg.blended(colors.background, 0.04)); } else { s.fill(colors.background); } if active { s.rect(tab.offset([1, 1]))?; } else { s.rect(tab)?; } s.no_clip()?; s.rect_mode(RectMode::Center); s.clip(tab)?; s.set_cursor_pos(tab.center()); s.no_stroke(); let is_active_tab = i == s.ui.current_tab(tab_id); if is_active_tab { s.fill(colors.secondary_variant); } else if hovered | focused { s.fill(fg); } else { s.fill(colors.secondary_variant.blended(bg, 0.60)); } s.text(tab_label)?; s.no_clip()?; s.ui.pop_cursor(); s.pop(); s.ui.handle_events(id); s.advance_cursor(tab.size()); if !disabled && s.ui.was_clicked(id) { s.ui.set_current_tab(tab_id, i); } } let pos = s.cursor_pos(); let fpad = s.theme.spacing.frame_pad; s.push(); s.stroke(colors.disabled()); let y = pos.y() + 1; let line_width = s.ui_width()? - fpad.x(); s.line(line_![fpad.x(), y, line_width, y])?; s.pop(); s.advance_cursor([line_width, fpad.y()]); s.push_id(tab_id); f(s.ui.current_tab(tab_id), s)?; s.pop_id(); Ok(()) } } impl PixState { pub fn spacing(&mut self) -> PixResult<()> { let s = self; let width = s.ui_width()?; let (_, height) = s.text_size(" ")?; s.advance_cursor([width, height]); Ok(()) } pub fn indent(&mut self) -> PixResult<()> { let s = self; let (width, height) = s.text_size(" ")?; s.advance_cursor([width, height]); s.same_line(None); Ok(()) } pub fn separator(&mut self) -> PixResult<()> { let s = self; let pos = s.cursor_pos(); let colors = s.theme.colors; let pad = s.theme.spacing.frame_pad; let height = clamp_size(s.theme.font_size); let y = pos.y() + height / 2; s.push(); s.stroke(colors.disabled()); let width = s.ui_width()?; s.line(line_![pad.x(), y, width, y])?; s.pop(); s.advance_cursor([width, height]); Ok(()) } }
use crate::{ops::clamp_size, prelude::*}; impl PixState { #[inline] pub fn same_line<O>(&mut self, offset: O) where O: Into<Option<[i32; 2]>>, { let [x, y] = self.ui.pcursor().as_array(); let offset = offset.into().unwrap_or([0; 2]); let item_pad = self.theme.spacing.item_pad; self.ui .set_cursor([x + item_pad.x() + offset[0], y + offset[1]]); self.ui.line_height = self.ui.pline_height - offset[1]; } #[inline] pub fn next_width(&mut self, width: u32) { self.ui.next_width = Some(clamp_size(width)); }
} impl PixState { pub fn spacing(&mut self) -> PixResult<()> { let s = self; let width = s.ui_width()?; let (_, height) = s.text_size(" ")?; s.advance_cursor([width, height]); Ok(()) } pub fn indent(&mut self) -> PixResult<()> { let s = self; let (width, height) = s.text_size(" ")?; s.advance_cursor([width, height]); s.same_line(None); Ok(()) } pub fn separator(&mut self) -> PixResult<()> { let s = self; let pos = s.cursor_pos(); let colors = s.theme.colors; let pad = s.theme.spacing.frame_pad; let height = clamp_size(s.theme.font_size); let y = pos.y() + height / 2; s.push(); s.stroke(colors.disabled()); let width = s.ui_width()?; s.line(line_![pad.x(), y, width, y])?; s.pop(); s.advance_cursor([width, height]); Ok(()) } }
pub fn tab_bar<S, I, F>(&mut self, label: S, tabs: &[I], f: F) -> PixResult<()> where S: AsRef<str>, I: AsRef<str>, F: FnOnce(usize, &mut PixState) -> PixResult<()>, { let label = label.as_ref(); let s = self; let tab_id = s.ui.get_id(&label); let colors = s.theme.colors; let fpad = s.theme.spacing.frame_pad; let ipad = s.theme.spacing.item_pad; for (i, tab_label) in tabs.iter().enumerate() { if i > 0 { s.same_line(None); } let tab_label = tab_label.as_ref(); let id = s.ui.get_id(&tab_label); let tab_label = s.ui.get_label(tab_label); let pos = s.cursor_pos(); let colors = s.theme.colors; let (width, height) = s.text_size(tab_label)?; let tab = rect![pos + fpad.x(), width, height].offset_size(2 * ipad); let hovered = s.ui.try_hover(id, &tab); let focused = s.ui.try_focus(id); let disabled = s.ui.disabled; let active = s.ui.is_active(id); s.push(); s.ui.push_cursor(); s.rect_mode(RectMode::Corner); let clip = tab.offset_size([1, 0]); s.clip(clip)?; if hovered { s.frame_cursor(&Cursor::hand())?; } let [stroke, fg, bg] = s.widget_colors(id, ColorType::SecondaryVariant); if active || focused { s.stroke(stroke); } else { s.no_stroke(); } if hovered { s.fill(fg.blended(colors.background, 0.04)); } else { s.fill(colors.background); } if active { s.rect(tab.offset([1, 1]))?; } else { s.rect(tab)?; } s.no_clip()?; s.rect_mode(RectMode::Center); s.clip(tab)?; s.set_cursor_pos(tab.center()); s.no_stroke(); let is_active_tab = i == s.ui.current_tab(tab_id); if is_active_tab { s.fill(colors.secondary_variant); } else if hovered | focused { s.fill(fg); } else { s.fill(colors.secondary_variant.blended(bg, 0.60)); } s.text(tab_label)?; s.no_clip()?; s.ui.pop_cursor(); s.pop(); s.ui.handle_events(id); s.advance_cursor(tab.size()); if !disabled && s.ui.was_clicked(id) { s.ui.set_current_tab(tab_id, i); } } let pos = s.cursor_pos(); let fpad = s.theme.spacing.frame_pad; s.push(); s.stroke(colors.disabled()); let y = pos.y() + 1; let line_width = s.ui_width()? - fpad.x(); s.line(line_![fpad.x(), y, line_width, y])?; s.pop(); s.advance_cursor([line_width, fpad.y()]); s.push_id(tab_id); f(s.ui.current_tab(tab_id), s)?; s.pop_id(); Ok(()) }
function_block-full_function
[ { "content": "pub fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"Colors\")\n\n .with_frame_rate()\n\n .build()?;\n\n let mut app = Colors::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/colors.rs", "rank": 0, "score": 101949.36434358904 }, { "content": "pub fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"Flocking\")\n\n .with_frame_rate()\n\n .target_frame_rate(60)\n\n .build()?;\n\n let mut app = App::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/flocking.rs", "rank": 1, "score": 101949.36434358904 }, { "content": "pub fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"3D Raytracing\")\n\n .with_frame_rate()\n\n .build()?;\n\n let mut app = App::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/3d_raytracing.rs", "rank": 2, "score": 101949.36434358904 }, { "content": "pub fn main() -> PixResult<()> {\n\n let width = 800;\n\n let height = 600;\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(width, height)\n\n .with_title(\"Asteroids\")\n\n .with_frame_rate()\n\n .build()?;\n\n let mut app = Asteroids::new(width, height);\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/asteroids.rs", "rank": 3, "score": 101949.36434358904 }, { "content": "pub fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"Maze Generation & A* Search\")\n\n .with_frame_rate()\n\n .target_frame_rate(60)\n\n .build()?;\n\n let mut app = MazeApp::new(COLS, ROWS);\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/maze/main.rs", "rank": 4, "score": 99110.35671496368 }, { "content": "pub fn main() -> PixResult<()> {\n\n println!(\"Renders more smoothly using `--release` and `--features opengl`.\");\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .scale(2.0, 2.0)\n\n .with_title(\"Fluid Simulation\")\n\n .with_frame_rate()\n\n .target_frame_rate(30)\n\n .build()?;\n\n let mut app = App::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/fluid_simulation.rs", "rank": 5, "score": 99110.35671496368 }, { "content": "/// Returns a random number between `0` and a given `value`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use pix_engine::math::random;\n\n///\n\n/// let x = random(100); // x will range from (0..100]\n\n/// assert!(x >= 0 && x < 100);\n\n///\n\n/// let x = random(100.0); // x will range from (0.0..100.0]\n\n/// assert!(x >= 0.0 && x < 100.0);\n\npub fn random<T>(val: T) -> T\n\nwhere\n\n T: Num + SampleUniform + PartialOrd,\n\n{\n\n if val > T::zero() {\n\n random_rng(T::zero()..val)\n\n } else {\n\n random_rng(val..T::zero())\n\n }\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 6, "score": 87829.52035430325 }, { "content": "/// Returns a random number within a range.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use pix_engine::math::random_rng;\n\n///\n\n/// let x = random_rng(0.0..1.0); // x will range from (0.0..1.0]\n\n/// assert!(x >= 0.0 && x < 1.0);\n\n///\n\n/// let x = random_rng(20..50); // x will range from (20..50]\n\n/// assert!(x >= 20 && x < 50);\n\npub fn random_rng<T, R>(val: R) -> T\n\nwhere\n\n T: SampleUniform + PartialOrd,\n\n R: Into<Range<T>>,\n\n{\n\n let val = val.into();\n\n rand::thread_rng().gen_range(val)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 7, "score": 81354.67380795366 }, { "content": "/// Returns the [Perlin noise](https://en.wikipedia.org/wiki/Perlin_noise) value at specified coordinates.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use pix_engine::math::noise;\n\n///\n\n/// let n = noise([5.0]);\n\n/// assert!(n >= 0.0 && n < 1.0);\n\n///\n\n/// let n = noise([2.0, 1.5]);\n\n/// assert!(n >= 0.0 && n < 1.0);\n\n///\n\n/// let n = noise([2.0, 1.5, 3.0]);\n\n/// assert!(n >= 0.0 && n < 1.0);\n\n/// ```\n\npub fn noise<V, const N: usize>(vector: V) -> Scalar\n\nwhere\n\n V: Into<Vector<Scalar, N>>,\n\n{\n\n let v = vector.into();\n\n\n\n let values = v.as_array();\n\n let x = values.get(0).unwrap_or(&0.0).abs();\n\n let y = values.get(1).unwrap_or(&0.0).abs();\n\n let z = values.get(2).unwrap_or(&0.0).abs();\n\n\n\n let mut xi: usize = x.trunc() as usize;\n\n let mut yi: usize = y.trunc() as usize;\n\n let mut zi: usize = z.trunc() as usize;\n\n\n\n let mut xf = x.fract();\n\n let mut yf = y.fract();\n\n let mut zf = z.fract();\n\n let (mut rxf, mut ryf);\n\n\n", "file_path": "src/math.rs", "rank": 8, "score": 76605.41091577403 }, { "content": "/// Linear interpolates between two values by a given amount.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use pix_engine::math::lerp;\n\n///\n\n/// let start = 0.0;\n\n/// let end = 5.0;\n\n/// let amount = 0.5;\n\n/// let value = lerp(start, end, amount);\n\n/// assert_eq!(value, 2.5);\n\n/// ```\n\npub fn lerp<T>(start: T, end: T, amount: T) -> T\n\nwhere\n\n T: Num + Copy + PartialOrd,\n\n{\n\n (T::one() - amount) * start + amount * end\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 9, "score": 73597.5461490758 }, { "content": "/// Linear interpolates values for a range of independent values based on depdendent values.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use pix_engine::math::lerp_map;\n\n///\n\n/// let x1 = 0;\n\n/// let x2 = 5;\n\n/// let y1 = 0;\n\n/// let y2 = 10;\n\n/// let values = lerp_map(x1, x2, y1, y2);\n\n/// assert_eq!(values, vec![0, 2, 4, 6, 8, 10]);\n\n///\n\n/// let x1 = 0.0;\n\n/// let x2 = 4.0;\n\n/// let y1 = 0.0;\n\n/// let y2 = 14.0;\n\n/// let values = lerp_map(x1, x2, y1, y2);\n\n/// assert_eq!(values, vec![0.0, 3.5, 7.0, 10.5, 14.0]);\n\n/// ```\n\npub fn lerp_map<T>(start1: T, end1: T, start2: T, end2: T) -> Vec<T>\n\nwhere\n\n T: Num + NumCast + Copy + PartialOrd + AddAssign,\n\n{\n\n if start1 == end1 {\n\n vec![start2]\n\n } else {\n\n let size: usize = NumCast::from(end1 - start1).unwrap_or(4);\n\n let mut values = Vec::with_capacity(size);\n\n let a = (end2 - start2) / (end1 - start1);\n\n let mut d = start2;\n\n let mut i = start1;\n\n while i <= end1 {\n\n values.push(d);\n\n d += a;\n\n i += T::one();\n\n }\n\n values\n\n }\n\n}\n", "file_path": "src/math.rs", "rank": 10, "score": 64444.17179675972 }, { "content": "/// Remaps a number from one range to another.\n\n///\n\n/// Map range defaults to `0.0..=f64::MAX` in the event casting to [f64] fails.\n\n/// NaN will result in the max mapped value.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use pix_engine::prelude::*;\n\n/// let value = 25;\n\n/// let m = map(value, 0, 100, 0, 800);\n\n/// assert_eq!(m, 200);\n\n///\n\n/// let value = 50.0;\n\n/// let m = map(value, 0.0, 100.0, 0.0, 1.0);\n\n/// assert_eq!(m, 0.5);\n\n///\n\n/// let value = f64::NAN;\n\n/// let m = map(value, 0.0, 100.0, 0.0, 1.0);\n\n/// assert!(m.is_nan());\n\n///\n\n/// let value = f64::INFINITY;\n\n/// let m = map(value, 0.0, 100.0, 0.0, 1.0);\n\n/// assert_eq!(m, 1.0);\n\n///\n\n/// let value = f64::NEG_INFINITY;\n\n/// let m = map(value, 0.0, 100.0, 0.0, 1.0);\n\n/// assert_eq!(m, 0.0);\n\n/// ```\n\npub fn map<T>(value: T, start1: T, end1: T, start2: T, end2: T) -> T\n\nwhere\n\n T: NumCast + Into<Scalar> + PartialOrd + Copy,\n\n{\n\n let default = end2;\n\n let start1 = start1.into();\n\n let end1 = end1.into();\n\n let start2 = start2.into();\n\n let end2 = end2.into();\n\n let value = value.into();\n\n let new_val = ((value - start1) / (end1 - start1)).mul_add(end2 - start2, start2);\n\n NumCast::from(new_val.clamp(start2, end2)).unwrap_or(default)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 11, "score": 63868.110062910244 }, { "content": "fn project(\n\n velx: &mut [Scalar],\n\n vely: &mut [Scalar],\n\n p: &mut [Scalar],\n\n div: &mut [Scalar],\n\n tmp: &mut [Scalar],\n\n) {\n\n let c = 1.0 / 6.0;\n\n div.par_iter_mut()\n\n .zip(tmp.par_iter_mut())\n\n .enumerate()\n\n .for_each(|(i, (div, tmp))| {\n\n let (x, y) = get_xy(i);\n\n if (1..NLEN).contains(&x) && (1..NHEIGHT).contains(&y) {\n\n *div = -0.5\n\n * (velx[get_idx(x + 1, y)] - velx[get_idx(x - 1, y)] + vely[get_idx(x, y + 1)]\n\n - vely[get_idx(x, y - 1)])\n\n / N_SCALAR;\n\n *tmp = *div * c;\n\n }\n", "file_path": "examples/fluid_simulation.rs", "rank": 12, "score": 60374.2940092556 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn linear_solve(\n\n b: usize,\n\n xs: &mut [Scalar],\n\n xs0: &[Scalar],\n\n a: Scalar,\n\n c: Scalar,\n\n tmp: &mut [Scalar],\n\n) {\n\n let c_recip = c.recip();\n\n tmp.par_iter_mut().enumerate().for_each(|(i, tmp)| {\n\n let (x, y) = get_xy(i);\n\n if (1..NLEN).contains(&x) && (1..NHEIGHT).contains(&y) {\n\n *tmp = (xs0[i]\n\n + a * (xs[get_idx(x + 1, y)]\n\n + xs[get_idx(x - 1, y)]\n\n + xs[get_idx(x, y + 1)]\n\n + xs[get_idx(x, y - 1)]))\n\n * c_recip;\n\n }\n\n });\n\n xs.swap_with_slice(tmp);\n\n set_bounds(b, xs);\n\n}\n\n\n", "file_path": "examples/fluid_simulation.rs", "rank": 13, "score": 58711.68354251192 }, { "content": "/// Trait for objects that can be drawn to the screen.\n\npub trait Draw {\n\n /// Draw object to the current [`PixState`] canvas.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If the renderer fails to draw to the current render target, then an error is returned.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n\n /// fn on_update(&mut self, s: &mut PixState) -> PixResult<()> {\n\n /// let rect = rect![0, 0, 100, 100];\n\n /// // The following two lines are equivalent.\n\n /// s.rect(rect)?;\n\n /// rect.draw(s)?;\n\n /// Ok(())\n\n /// }\n", "file_path": "src/draw.rs", "rank": 14, "score": 56948.54144450373 }, { "content": "#[allow(unused_variables)]\n\npub trait AppState {\n\n /// Called once upon engine start when [`PixEngine::run`] is called.\n\n ///\n\n /// This can be used to set up initial state like creating objects, loading files or [Image]s, or\n\n /// any additional application state that's either dynamic or relies on runtime values from\n\n /// [`PixState`].\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning an error will immediately exit the application and call [`AppState:: Prefer using\n\n /// the more specialized methods where possible, as it allows you to consume them, preventing\n\n /// the engine from encountering event-handling collisions.on_stop`].\n\n /// [`PixEngine::run`] will return the original error or any error returned from\n\n /// [`AppState::on_stop`]. Calling [`PixState::abort_quit`] during [`AppState::on_stop`] has no\n\n /// effect.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n", "file_path": "src/appstate.rs", "rank": 15, "score": 55283.19966986653 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(800, 600)\n\n .with_title(\"MyApp\")\n\n .build()?;\n\n let mut app = MyApp;\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/template.rs", "rank": 16, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"2D Raycasting\")\n\n .with_frame_rate()\n\n .icon(Image::from_read(LIGHT)?)\n\n .build()?;\n\n let mut app = RayScene::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/2d_raycasting.rs", "rank": 17, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let raw_file = match std::env::args().nth(1) {\n\n None => PathBuf::from(\"./audio/melancholy.raw\"),\n\n Some(s) => PathBuf::from(s),\n\n };\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(1024, 768)\n\n .with_title(\"Audio Demo\")\n\n .with_frame_rate()\n\n .build()?;\n\n let mut app = AudioDemo::new(raw_file);\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/audio.rs", "rank": 18, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(400, 400)\n\n .with_title(\"Window 1\")\n\n .position(10, 10)\n\n .build()?;\n\n let mut app = WindowDemo::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/windows.rs", "rank": 19, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(DEFAULT_WIDTH, DEFAULT_HEIGHT)\n\n .position(0, 0)\n\n .with_title(\"The Matrix\")\n\n .with_frame_rate()\n\n .target_frame_rate(30)\n\n .with_font(Font::from_bytes(\"Sunray\", FONT_DATA))\n\n .with_font_size(DEFAULT_FONT_SIZE)\n\n .build()?;\n\n let mut app = Matrix::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/matrix.rs", "rank": 20, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(1024, 768)\n\n .with_title(\"GUI Demo\")\n\n .with_frame_rate()\n\n .target_frame_rate(60)\n\n .build()?;\n\n let mut app = Gui::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/gui.rs", "rank": 21, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let args: Vec<_> = env::args().collect();\n\n if args.len() < 2 {\n\n Err(anyhow!(\"Usage: cargo run /path/to/image.png\"))\n\n } else {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"Image Demo\")\n\n .with_frame_rate()\n\n .build()?;\n\n let mut app = ImageDemo::new(&args[1])?;\n\n engine.run(&mut app)\n\n }\n\n}\n", "file_path": "examples/image.rs", "rank": 22, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"Textures\")\n\n .with_frame_rate()\n\n .build()?;\n\n let mut app = Textures::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/textures.rs", "rank": 23, "score": 54898.97882028201 }, { "content": "fn main() -> PixResult<()> {\n\n let mut engine = PixEngine::builder()\n\n .with_dimensions(WIDTH, HEIGHT)\n\n .with_title(\"SVG Color Constants\")\n\n .build()?;\n\n let mut app = ColorConsts::new();\n\n engine.run(&mut app)\n\n}\n", "file_path": "examples/color_constants.rs", "rank": 24, "score": 53365.77537856699 }, { "content": "fn get_idx(x: usize, y: usize) -> usize {\n\n let x = x.clamp(0, NLEN);\n\n let y = y.clamp(0, NLEN);\n\n x + y * N\n\n}\n\n\n", "file_path": "examples/fluid_simulation.rs", "rank": 25, "score": 52818.16264510415 }, { "content": "#[test]\n\n#[ignore = \"engine can only be tested in the main thread. --test-threads=1\"]\n\nfn test_run_engine_update() -> PixResult<()> {\n\n let mut eng = create_engine()?;\n\n // Quitting from on_update should exit but still run on_stop\n\n let mut app = App::new();\n\n app.quit_on_update = true;\n\n eng.run(&mut app)?;\n\n assert_eq!(app.start_count, 1, \"on_start was called\");\n\n assert_eq!(app.update_count, 1, \"on_update was called\");\n\n assert_eq!(app.stop_count, 1, \"on_stop was called\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/pix-engine.rs", "rank": 26, "score": 49407.318880080675 }, { "content": "#[test]\n\n#[ignore = \"engine can only be tested in the main thread. --test-threads=1\"]\n\nfn test_run_engine_start() -> PixResult<()> {\n\n let mut eng = create_engine()?;\n\n // Quitting from on_start should exit the game loop early\n\n let mut app = App::new();\n\n app.quit_on_start = true;\n\n eng.run(&mut app)?;\n\n assert_eq!(app.start_count, 1, \"on_start was called\");\n\n assert_eq!(app.update_count, 0, \"on_update was not called\");\n\n assert_eq!(app.stop_count, 1, \"on_stop was called\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/pix-engine.rs", "rank": 27, "score": 49407.318880080675 }, { "content": "#[test]\n\n#[ignore = \"engine can only be tested in the main thread. --test-threads=1\"]\n\nfn test_run_engine_stop() -> PixResult<()> {\n\n let mut eng = create_engine()?;\n\n // Aborting quit from on_stop should resume game loop\n\n let mut app = App::new();\n\n app.quit_on_update = true;\n\n app.abort_quit_on_stop = true;\n\n eng.run(&mut app)?;\n\n assert_eq!(app.start_count, 1, \"on_start was called\");\n\n // Accounts for the initial run, plus 1 more for on_stop being cancelled\n\n assert_eq!(app.update_count, 2, \"on_update was called\");\n\n assert_eq!(app.stop_count, 2, \"on_stop was called\");\n\n Ok(())\n\n}\n", "file_path": "tests/pix-engine.rs", "rank": 28, "score": 49407.318880080675 }, { "content": "/// Default floating-point number trait used math operations.\n\npub trait Float: Num + FloatTrait {}\n\n\n\nimpl<T> Num for T where\n\n T: NumTrait + NumOps + NumRef + NumAssignOps + NumAssignRef + Copy + Default + PartialOrd\n\n{\n\n}\n\n\n\nimpl<T> Float for T where T: Num + FloatTrait {}\n\n\n\nconst PERLIN_YWRAPB: usize = 4;\n\nconst PERLIN_YWRAP: usize = 1 << PERLIN_YWRAPB;\n\nconst PERLIN_ZWRAPB: usize = 8;\n\nconst PERLIN_ZWRAP: usize = 1 << PERLIN_ZWRAPB;\n\nconst PERLIN_SIZE: usize = 4095;\n\n\n\nlazy_static! {\n\n static ref PERLIN: Vec<Scalar> = {\n\n let mut perlin = Vec::with_capacity(PERLIN_SIZE + 1);\n\n for _ in 0..=PERLIN_SIZE {\n\n perlin.push(random(1.0));\n\n }\n\n perlin\n\n };\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 29, "score": 47853.225567426736 }, { "content": "fn create_engine() -> PixResult<PixEngine> {\n\n PixEngine::builder()\n\n .with_title(\"pix-engine integration test\")\n\n .position_centered()\n\n .hidden()\n\n .build()\n\n}\n\n\n", "file_path": "tests/pix-engine.rs", "rank": 30, "score": 46851.71041131443 }, { "content": "/// Trait for shape containing operations.\n\npub trait Contains<T, const N: usize> {\n\n /// The shape type. e.g. [`Rect<T>`].\n\n type Shape;\n\n\n\n /// Returns whether this shape contains a given [Point].\n\n fn contains_point<P>(&self, _p: P) -> bool\n\n where\n\n P: Into<Point<T, N>>;\n\n\n\n /// Returns whether this shape completely contains another shape of the same type.\n\n fn contains_shape<O>(&self, _other: O) -> bool\n\n where\n\n O: Into<Self::Shape>;\n\n}\n\n\n", "file_path": "src/shape.rs", "rank": 31, "score": 44028.04094303763 }, { "content": "/// Trait for shape intersection operations.\n\npub trait Intersects<T, const N: usize> {\n\n /// The shape type. e.g. [`Rect<T>`].\n\n type Shape;\n\n\n\n /// Returns the closest intersection point with a given line and distance along the line or\n\n /// `None` if there is no intersection.\n\n fn intersects_line<L>(&self, _line: L) -> Option<(Point<T, N>, T)>\n\n where\n\n L: Into<Line<T, N>>;\n\n\n\n /// Returns whether this shape intersects with another shape of the same type.\n\n fn intersects_shape<O>(&self, _other: O) -> bool\n\n where\n\n O: Into<Self::Shape>;\n\n}\n\n\n\nimpl PixState {\n\n /// Draw a [Point] to the current canvas. [`PixState::stroke`] controls whether the point is\n\n /// drawn or not. [`PixState::stroke_weight`] and [`PixState::fill`] have no effect.\n\n ///\n", "file_path": "src/shape.rs", "rank": 32, "score": 44028.04094303763 }, { "content": "fn get_xy(idx: usize) -> (usize, usize) {\n\n (idx % N, idx / N)\n\n}\n\n\n", "file_path": "examples/fluid_simulation.rs", "rank": 33, "score": 43641.54196472386 }, { "content": "fn set_bounds(b: usize, xs: &mut [Scalar]) {\n\n for i in 1..NLEN {\n\n let (_, y) = get_xy(i);\n\n if y > NHEIGHT {\n\n break;\n\n }\n\n // Top and bottom\n\n if b == 2 {\n\n xs[get_idx(i, 0)] = -xs[get_idx(i, 1)];\n\n xs[get_idx(i, N - 1)] = -xs[get_idx(i, N - 2)];\n\n } else {\n\n xs[get_idx(i, 0)] = xs[get_idx(i, 1)];\n\n xs[get_idx(i, N - 1)] = xs[get_idx(i, N - 2)];\n\n }\n\n // left and right\n\n if b == 1 {\n\n xs[get_idx(0, i)] = -xs[get_idx(1, i)];\n\n xs[get_idx(N - 1, i)] = -xs[get_idx(N - 2, i)];\n\n } else {\n\n xs[get_idx(0, i)] = xs[get_idx(1, i)];\n", "file_path": "examples/fluid_simulation.rs", "rank": 34, "score": 42567.59719796151 }, { "content": "/// Default number trait used for objects and shapes.\n\npub trait Num: NumTrait + NumOps + NumAssignOps + Copy + Default + PartialOrd {}\n\n\n", "file_path": "src/math.rs", "rank": 35, "score": 35335.84901408081 }, { "content": "fn advect(b: usize, d: &mut [Scalar], d0: &[Scalar], velx: &[Scalar], vely: &[Scalar]) {\n\n d.par_iter_mut().enumerate().for_each(|(i, d)| {\n\n let (x, y) = get_xy(i);\n\n if (1..NLEN).contains(&x) && (1..NHEIGHT).contains(&y) {\n\n let mut x = x as Scalar - (DT * N_SCALAR * velx[i]);\n\n let mut y = y as Scalar - (DT * N_SCALAR * vely[i]);\n\n\n\n if x < 0.5 {\n\n x = 0.5;\n\n }\n\n if x > N_SCALAR + 0.5 {\n\n x = N_SCALAR + 0.5;\n\n }\n\n let i0 = x.floor() as usize;\n\n let i1 = i0 + 1;\n\n if y < 0.5 {\n\n y = 0.5;\n\n }\n\n if y > N_SCALAR + 0.5 {\n\n y = N_SCALAR + 0.5;\n", "file_path": "examples/fluid_simulation.rs", "rank": 36, "score": 34969.85924652954 }, { "content": "fn intersect_ray_sphere(origin: PointF3, direction: VectorF3, obj: &SphereObj) -> (Scalar, Scalar) {\n\n let r = obj.sphere.radius();\n\n let center_origin = origin - obj.sphere.center();\n\n\n\n let a = direction.mag_sq();\n\n let b = 2.0 * center_origin.dot(direction);\n\n let c = center_origin.mag_sq() - (r * r);\n\n\n\n let discriminant = b * b - 4.0 * a * c;\n\n if discriminant < 0.0 {\n\n return (Scalar::INFINITY, Scalar::INFINITY);\n\n }\n\n\n\n let sqrt = discriminant.sqrt();\n\n let two_a = 2.0 * a;\n\n let t1 = (-b + sqrt) / two_a;\n\n let t2 = (-b - sqrt) / two_a;\n\n (t1, t2)\n\n}\n\n\n", "file_path": "examples/3d_raytracing.rs", "rank": 37, "score": 33764.97308820041 }, { "content": "fn diffuse(b: usize, xs: &mut [Scalar], xs0: &[Scalar], amt: Scalar, tmp: &mut [Scalar]) {\n\n let a = DT * amt * (N - 2).pow(2) as Scalar;\n\n linear_solve(b, xs, xs0, a, 1.0 + 6.0 * a, tmp);\n\n}\n\n\n", "file_path": "examples/fluid_simulation.rs", "rank": 38, "score": 32930.086215144256 }, { "content": " /// Constructs an `Ellipse` at position `(x, y)` with `width` and `height`.\n\n pub const fn new(x: T, y: T, width: T, height: T) -> Self {\n\n Self([x, y, width, height])\n\n }\n\n}\n\n\n\nimpl<T: Copy> Ellipse<T> {\n\n /// Returns `Ellipse` values as `[x, y, width, height]`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let e = ellipse!(5, 10, 100, 100);\n\n /// assert_eq!(e.as_array(), [5, 10, 100, 100]);\n\n /// ```\n\n #[inline]\n\n pub fn as_array(&self) -> [T; 4] {\n\n self.0\n\n }\n", "file_path": "src/shape/ellipse.rs", "rank": 39, "score": 29.755542409018652 }, { "content": " /// Returns the `Image` width.\n\n #[inline]\n\n #[must_use]\n\n pub const fn width(&self) -> u32 {\n\n self.width\n\n }\n\n\n\n /// Returns the `Image` height.\n\n #[inline]\n\n #[must_use]\n\n pub const fn height(&self) -> u32 {\n\n self.height\n\n }\n\n\n\n /// Returns the `Image` dimensions as `(width, height)`.\n\n #[inline]\n\n #[must_use]\n\n pub const fn dimensions(&self) -> (u32, u32) {\n\n (self.width, self.height)\n\n }\n", "file_path": "src/image.rs", "rank": 40, "score": 27.854177925728546 }, { "content": " pub fn size(&self) -> Point<T, 2> {\n\n point!(self.width(), self.height())\n\n }\n\n\n\n /// Reposition the the rectangle.\n\n #[inline]\n\n pub fn reposition(&self, x: T, y: T) -> Self {\n\n Self::new(x, y, self.width(), self.height())\n\n }\n\n\n\n /// Resize the the rectangle.\n\n #[inline]\n\n pub fn resize(&self, width: T, height: T) -> Self {\n\n Self::new(self.x(), self.y(), width, height)\n\n }\n\n\n\n /// Offsets a rectangle by shifting coordinates by given amount.\n\n #[inline]\n\n pub fn offset<P>(&self, offsets: P) -> Self\n\n where\n", "file_path": "src/shape/rect.rs", "rank": 41, "score": 27.383682542698626 }, { "content": " /// `Image` height.\n\n height: u32,\n\n /// Raw pixel data.\n\n data: Vec<u8>,\n\n /// Pixel Format.\n\n format: PixelFormat,\n\n}\n\n\n\nimpl Image {\n\n /// Constructs an empty RGBA `Image` with given `width` and `height`.\n\n #[inline]\n\n pub fn new(width: u32, height: u32) -> Self {\n\n Self::with_rgba(width, height)\n\n }\n\n\n\n /// Constructs an empty RGBA `Image` with given `width` and `height`.\n\n ///\n\n /// Alias for [Image::new].\n\n #[doc(alias = \"new\")]\n\n #[inline]\n", "file_path": "src/image.rs", "rank": 42, "score": 26.86593577627441 }, { "content": " /// s.text(\"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam\")?;\n\n /// Ok(())\n\n /// }\n\n /// # }\n\n /// ```\n\n #[inline]\n\n pub fn wrap(&mut self, width: u32) {\n\n self.settings.wrap_width = Some(width);\n\n }\n\n\n\n /// Disable wrapping when drawing text on the canvas.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n\n /// fn on_update(&mut self, s: &mut PixState) -> PixResult<()> {\n\n /// s.no_wrap();\n", "file_path": "src/state/settings.rs", "rank": 43, "score": 26.441566436445637 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let p: PointI2 = Point::origin();\n\n /// assert_eq!(p.as_array(), [0, 0]);\n\n /// ```\n\n #[inline]\n\n pub fn origin() -> Self\n\n where\n\n T: Default,\n\n {\n\n Self::new([(); N].map(|_| T::default()))\n\n }\n\n}\n\n\n\nimpl<T> Point<T, 1> {\n\n /// Constructs a `Point` from an individual x coordinate.\n\n #[inline]\n\n pub const fn from_x(x: T) -> Self {\n", "file_path": "src/shape/point.rs", "rank": 44, "score": 26.28108135399814 }, { "content": " #[inline]\n\n pub fn to_vec(self) -> Vec<T> {\n\n self.0.to_vec()\n\n }\n\n}\n\n\n\nimpl<T: Num, const N: usize> Point<T, N> {\n\n /// Offsets a `Point` by shifting coordinates by given amount.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let mut p = point!(2, 3, 1);\n\n /// p.offset([2, -4]);\n\n /// assert_eq!(p.as_array(), [4, -1, 1]);\n\n /// ```\n\n #[inline]\n\n pub fn offset<P, const M: usize>(&mut self, offsets: P)\n\n where\n", "file_path": "src/shape/point.rs", "rank": 45, "score": 26.140172107287448 }, { "content": "\n\n /// Constructs a `Color` from a [u32] RGB hexadecimal value with max alpha.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::from_hex(0xF0FF00);\n\n /// assert_eq!(c.channels(), [240, 255, 0, 255]);\n\n /// ```\n\n #[inline]\n\n pub const fn from_hex(hex: u32) -> Self {\n\n let [_, r, g, b] = hex.to_be_bytes();\n\n Self::rgba(r, g, b, 255)\n\n }\n\n\n\n /// Constructs a `Color` from a [u32] RGBA hexadecimal value.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/color/conversion.rs", "rank": 46, "score": 25.26460868702739 }, { "content": " fn from(format: PixelFormat) -> Self {\n\n match format {\n\n PixelFormat::Rgb => Self::Rgb,\n\n PixelFormat::Rgba => Self::Rgba,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for PixelFormat {\n\n fn default() -> Self {\n\n Self::Rgba\n\n }\n\n}\n\n\n\n/// An `Image` representing a buffer of pixel color values.\n\n#[derive(Default, Clone)]\n\n#[must_use]\n\npub struct Image {\n\n /// `Image` width.\n\n width: u32,\n", "file_path": "src/image.rs", "rank": 47, "score": 25.21767357953996 }, { "content": " /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::from_hex_alpha(0xF0FF00FF);\n\n /// assert_eq!(c.channels(), [240, 255, 0, 255]);\n\n ///\n\n /// let c = Color::from_hex_alpha(0xF0FF0080);\n\n /// assert_eq!(c.channels(), [240, 255, 0, 128]);\n\n /// ```\n\n #[inline]\n\n pub const fn from_hex_alpha(hex: u32) -> Self {\n\n let [r, g, b, a] = hex.to_be_bytes();\n\n Self::rgba(r, g, b, a)\n\n }\n\n\n\n /// Constructs a `Color` by inverting the RGBA values.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n", "file_path": "src/color/conversion.rs", "rank": 48, "score": 25.133324834671566 }, { "content": "\n\n /// Set window dimensions.\n\n #[inline]\n\n pub fn with_dimensions(&mut self, width: u32, height: u32) -> &mut Self {\n\n self.settings.width = width;\n\n self.settings.height = height;\n\n self\n\n }\n\n\n\n /// Set a window title.\n\n #[inline]\n\n pub fn with_title<S: Into<String>>(&mut self, title: S) -> &mut Self {\n\n self.settings.title = title.into();\n\n self\n\n }\n\n\n\n /// Position the window at the given `(x, y)` coordinates of the display.\n\n #[inline]\n\n pub fn position(&mut self, x: i32, y: i32) -> &mut Self {\n\n self.settings.x = Position::Positioned(x);\n", "file_path": "src/window.rs", "rank": 49, "score": 25.099380370159867 }, { "content": "//! Common [`PixEngine`] trait implementations for types.\n\n\n\nuse crate::prelude::*;\n\nuse num_traits::AsPrimitive;\n\nuse std::{\n\n array::IntoIter,\n\n iter::{FromIterator, Product, Sum},\n\n ops::{\n\n Add, AddAssign, Deref, DerefMut, Div, DivAssign, Index, IndexMut, Mul, MulAssign, Neg, Sub,\n\n SubAssign,\n\n },\n\n};\n\n\n\n#[inline]\n\npub(crate) fn clamp_size(val: u32) -> i32 {\n\n val.clamp(0, i32::MAX as u32 / 2) as i32\n\n}\n\n\n\n#[inline]\n\npub(crate) fn clamp_dimensions(width: u32, height: u32) -> (i32, i32) {\n", "file_path": "src/ops.rs", "rank": 50, "score": 24.77269503502818 }, { "content": "use pix_engine::prelude::*;\n\n\n\nuse crate::cell::{Cell, Direction};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Maze {\n\n cols: u32,\n\n rows: u32,\n\n cells: Vec<Cell>,\n\n}\n\n\n\nimpl Maze {\n\n pub fn new(cols: u32, rows: u32) -> Self {\n\n let mut cells = Vec::with_capacity((cols * rows) as usize);\n\n for row in 0..rows {\n\n // Ensure cols are added contiguously before rows\n\n for col in 0..cols {\n\n cells.push(Cell::new(cells.len(), col, row));\n\n }\n\n }\n", "file_path": "examples/maze/maze.rs", "rank": 51, "score": 24.26570651446075 }, { "content": " }\n\n\n\n /// Constructs a square `Rect` centered at position `(x, y)` with `size`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let s = Rect::square_from_center([50, 50], 100);\n\n /// assert_eq!(s.as_array(), [0, 0, 100, 100]);\n\n /// ```\n\n pub fn square_from_center<P: Into<Point<T, 2>>>(p: P, size: T) -> Self {\n\n let p = p.into();\n\n let two = T::one() + T::one();\n\n let offset = size / two;\n\n Self::new(p.x() - offset, p.y() - offset, size, size)\n\n }\n\n\n\n /// Returns the `size` of the rectangle as a `Point`.\n\n #[inline]\n", "file_path": "src/shape/rect.rs", "rank": 52, "score": 23.97030179210949 }, { "content": " self.0.to_vec()\n\n }\n\n}\n\n\n\nimpl<T: Num, const N: usize> Vector<T, N> {\n\n /// Constructs a `Vector` by shifting coordinates by given amount.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let mut v = vector!(2.0, 3.0, 1.5);\n\n /// v.offset([2.0, -4.0]);\n\n /// assert_eq!(v.as_array(), [4.0, -1.0, 1.5]);\n\n /// ```\n\n #[inline]\n\n pub fn offset<V, const M: usize>(&mut self, offsets: V)\n\n where\n\n V: Into<Vector<T, M>>,\n\n {\n", "file_path": "src/vector.rs", "rank": 53, "score": 23.80978987551906 }, { "content": "use super::Renderer;\n\nuse crate::{prelude::*, renderer::TextureRenderer};\n\n\n\nimpl TextureRenderer for Renderer {\n\n /// Create a `Texture` to draw to.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If the current window target is closed or invalid, or the texture dimensions are invalid,\n\n /// then an error is returned.\n\n #[inline]\n\n fn create_texture(\n\n &mut self,\n\n width: u32,\n\n height: u32,\n\n format: Option<PixelFormat>,\n\n ) -> PixResult<TextureId> {\n\n todo!()\n\n }\n\n\n", "file_path": "src/renderer/wasm/texture.rs", "rank": 54, "score": 23.70976703330418 }, { "content": "\n\n /// Constructs a random `Color` with `red`, `green`, `blue` and alpha.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::random_alpha();\n\n /// // `c.channels()` will return something like:\n\n /// // [132, 159, 233, 76]\n\n /// ```\n\n #[inline]\n\n pub fn random_alpha() -> Self {\n\n Self::rgba(random!(255), random!(255), random!(255), random!(255))\n\n }\n\n\n\n /// Returns the [u32] RGB hexadecimal value of a `Color`.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/color.rs", "rank": 55, "score": 23.61039260202438 }, { "content": " /// Returns the `height` of the rectangle.\n\n #[inline]\n\n pub fn height(&self) -> T {\n\n self.0[3]\n\n }\n\n\n\n /// Sets the `height` of the rectangle.\n\n #[inline]\n\n pub fn set_height(&mut self, height: T) {\n\n self.0[3] = height;\n\n }\n\n}\n\n\n\nimpl<T: Num> Rect<T> {\n\n /// Constructs a `Rect` at position [Point] with `width` and `height`.\n\n pub fn with_position<P: Into<Point<T, 2>>>(p: P, width: T, height: T) -> Self {\n\n let p = p.into();\n\n Self::new(p.x(), p.y(), width, height)\n\n }\n\n\n", "file_path": "src/shape/rect.rs", "rank": 56, "score": 23.37677375268103 }, { "content": " pub fn with_rgba(width: u32, height: u32) -> Self {\n\n let format = PixelFormat::Rgba;\n\n let data = vec![0x00; format.channels() * (width * height) as usize];\n\n Self::from_vec(width, height, data, format)\n\n }\n\n\n\n /// Constructs an empty RGB `Image` with given `width` and `height`.\n\n #[inline]\n\n pub fn with_rgb(width: u32, height: u32) -> Self {\n\n let format = PixelFormat::Rgb;\n\n let data = vec![0x00; format.channels() * (width * height) as usize];\n\n Self::from_vec(width, height, data, format)\n\n }\n\n\n\n /// Constructs an `Image` from a [u8] [prim@slice] representing RGB/A values.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If the bytes length doesn't match the image dimensions and [`PixelFormat`] provided, then\n\n /// an error is returned.\n", "file_path": "src/image.rs", "rank": 57, "score": 22.93697242850473 }, { "content": " /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::rgb(240, 255, 0);\n\n /// assert_eq!(c.as_hex(), 0xF0FF00);\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub const fn as_hex(&self) -> u32 {\n\n let [r, g, b, _] = self.channels();\n\n u32::from_be_bytes([0, r, g, b])\n\n }\n\n\n\n /// Returns the [u32] RGBA hexadecimal value of a `Color`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::rgb(240, 255, 0);\n\n /// assert_eq!(c.as_hex_alpha(), 0xF0FF00FF);\n", "file_path": "src/color.rs", "rank": 58, "score": 22.7459979645857 }, { "content": " $crate::prelude::Sphere::new($x, $y, $z, $r)\n\n };\n\n}\n\n\n\nimpl<T> Sphere<T> {\n\n /// Constructs a `Sphere` at position `(x, y, z)` with `radius`.\n\n pub const fn new(x: T, y: T, z: T, radius: T) -> Self {\n\n Self([x, y, z, radius])\n\n }\n\n}\n\n\n\nimpl<T: Copy> Sphere<T> {\n\n /// Returns `Sphere` as `[x, y, z, radius]`.\n\n #[inline]\n\n pub fn as_array(&self) -> [T; 4] {\n\n self.0\n\n }\n\n\n\n /// Returns `Sphere` as a byte slice `&[x, y, z, radius]`.\n\n #[inline]\n", "file_path": "src/shape/sphere.rs", "rank": 59, "score": 22.65826562426095 }, { "content": " ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let r = rect!(5, 10, 100, 100);\n\n /// assert_eq!(r.as_array(), [5, 10, 100, 100]);\n\n /// ```\n\n #[inline]\n\n pub fn as_array(&self) -> [T; 4] {\n\n self.0\n\n }\n\n\n\n /// Returns `Rect` as a byte slice `&[x, y, width, height]`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let r = rect!(5, 10, 100, 100);\n", "file_path": "src/shape/rect.rs", "rank": 60, "score": 22.493361735383058 }, { "content": "\n\n /// Sets the width used to draw lines on the canvas.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n\n /// fn on_update(&mut self, s: &mut PixState) -> PixResult<()> {\n\n /// s.stroke(Color::BLUE);\n\n /// s.stroke_weight(2);\n\n /// // Shows a 2-pixel wide diagonal line\n\n /// s.line(line_![0, 0, 100, 100])?;\n\n /// Ok(())\n\n /// }\n\n /// # }\n\n /// ```\n\n #[inline]\n\n pub fn stroke_weight(&mut self, weight: u8) {\n", "file_path": "src/state/settings.rs", "rank": 61, "score": 22.474776450088935 }, { "content": " /// Returns the current UI rendering position.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n\n /// fn on_update(&mut self, s: &mut PixState) -> PixResult<()> {\n\n /// let mut pos = s.cursor_pos();\n\n /// pos.offset_y(20);\n\n /// s.set_cursor_pos(pos);\n\n /// s.text(\"Some text, offset down by 20 pixels\")?;\n\n /// Ok(())\n\n /// }\n\n /// # }\n\n /// ```\n\n #[inline]\n\n pub const fn cursor_pos(&self) -> PointI2 {\n\n self.ui.cursor()\n", "file_path": "src/gui/state.rs", "rank": 62, "score": 22.186397837655754 }, { "content": " ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::new(0, 0, 128);\n\n /// assert_eq!(c.channels(), [0, 0, 128, 255]);\n\n /// ```\n\n #[inline]\n\n pub const fn new(r: u8, g: u8, b: u8) -> Self {\n\n Self::rgb(r, g, b)\n\n }\n\n\n\n /// Constructs a `Color` with `red`, `green`, `blue` and `alpha`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::new_alpha(0, 0, 128, 50);\n\n /// assert_eq!(c.channels(), [0, 0, 128, 50]);\n\n /// ```\n", "file_path": "src/color.rs", "rank": 63, "score": 22.119097504792887 }, { "content": "use crate::{\n\n cell::{Cell, Direction},\n\n maze::Maze,\n\n};\n\nuse pix_engine::prelude::*;\n\nuse rand::prelude::IteratorRandom;\n\nuse std::collections::HashSet;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MazeCreator {\n\n current: Option<Cell>,\n\n visited: HashSet<usize>,\n\n stack: Vec<Cell>,\n\n completed: bool,\n\n}\n\n\n\nimpl MazeCreator {\n\n pub fn new(maze: &Maze) -> Self {\n\n let current = maze.random_cell();\n\n Self {\n", "file_path": "examples/maze/maze_creator.rs", "rank": 64, "score": 22.08406413206327 }, { "content": " #[inline]\n\n pub fn from_bytes<B: AsRef<[u8]>>(\n\n width: u32,\n\n height: u32,\n\n bytes: B,\n\n format: PixelFormat,\n\n ) -> PixResult<Self> {\n\n let bytes = bytes.as_ref();\n\n if bytes.len() != (format.channels() * width as usize * height as usize) {\n\n return Err(PixError::InvalidImage {\n\n width,\n\n height,\n\n size: bytes.len(),\n\n format,\n\n }\n\n .into());\n\n }\n\n Ok(Self::from_vec(width, height, bytes.to_vec(), format))\n\n }\n\n\n", "file_path": "src/image.rs", "rank": 65, "score": 21.830729345458092 }, { "content": "\n\n /// Offsets the `y-coordinate` of the ellipse by a given amount.\n\n #[inline]\n\n pub fn offset_y(&mut self, offset: T) {\n\n self.0[1] += offset;\n\n }\n\n\n\n /// Offsets the `width` of the ellipse by a given amount.\n\n #[inline]\n\n pub fn offset_width(&mut self, offset: T) {\n\n self.0[2] += offset;\n\n }\n\n\n\n /// Offsets the `height` of the ellipse by a given amount.\n\n #[inline]\n\n pub fn offset_height(&mut self, offset: T) {\n\n self.0[3] += offset;\n\n }\n\n\n\n /// Offsets the `radius` of the circle by a given amount.\n", "file_path": "src/shape/ellipse.rs", "rank": 66, "score": 21.750147279082434 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::hsl(126.0, 80.0, 50.0);\n\n /// assert_eq!(c.channels(), [25, 230, 46, 255]);\n\n /// ```\n\n #[inline]\n\n pub fn hsl<T: Into<Scalar>>(h: T, s: T, l: T) -> Self {\n\n Self::with_mode(Hsl, h, s, l)\n\n }\n\n\n\n /// Constructs a `Color` with `hue`, `saturation`, `lightness` and `alpha`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::hsla(126.0, 80.0, 50.0, 0.5);\n\n /// assert_eq!(c.channels(), [25, 230, 46, 128]);\n", "file_path": "src/color.rs", "rank": 67, "score": 21.734077610537426 }, { "content": " #[inline]\n\n #[must_use]\n\n pub const fn arrow() -> Self {\n\n Self::System(SystemCursor::Arrow)\n\n }\n\n\n\n /// Constructs a `Cursor` with `SystemCursor::IBeam`.\n\n #[inline]\n\n #[must_use]\n\n pub const fn ibeam() -> Self {\n\n Self::System(SystemCursor::IBeam)\n\n }\n\n\n\n /// Constructs a `Cursor` with `SystemCursor::No`.\n\n #[inline]\n\n #[must_use]\n\n pub const fn no() -> Self {\n\n Self::System(SystemCursor::No)\n\n }\n\n\n", "file_path": "src/window.rs", "rank": 68, "score": 21.662565250261558 }, { "content": "use crate::{\n\n prelude::*,\n\n renderer::{RendererSettings, Rendering},\n\n shape::{LineI2, QuadI2, TriI2},\n\n};\n\n\n\nmod audio;\n\nmod texture;\n\nmod window;\n\n\n\n/// A Web-Assembly [Renderer] implementation.\n\npub(crate) struct Renderer {}\n\n\n\nimpl Rendering for Renderer {\n\n /// Creates a new Renderer instance.\n\n #[inline]\n\n fn new(settings: RendererSettings) -> PixResult<Self> {\n\n todo!()\n\n }\n\n\n", "file_path": "src/renderer/wasm.rs", "rank": 69, "score": 21.38739602340953 }, { "content": " /// ```\n\n #[doc(alias = \"new_alpha\")]\n\n #[inline]\n\n pub const fn rgba(r: u8, g: u8, b: u8, a: u8) -> Self {\n\n Self {\n\n mode: Rgb,\n\n channels: [r, g, b, a],\n\n }\n\n }\n\n\n\n /// Constructs a `Color` with `hue`, `saturation`, `brightness` and max `alpha`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::hsb(126.0, 80.0, 50.0);\n\n /// assert_eq!(c.channels(), [25, 128, 36, 255]);\n\n /// ```\n\n #[inline]\n", "file_path": "src/color.rs", "rank": 70, "score": 21.35440669740186 }, { "content": " /// Returns the `Image` bounding [Rect] positioned at `offset`.\n\n #[inline]\n\n pub fn bounding_rect_offset<P>(&self, offset: P) -> Rect<i32>\n\n where\n\n P: Into<PointI2>,\n\n {\n\n let (width, height) = clamp_dimensions(self.width, self.height);\n\n rect![offset.into(), width, height]\n\n }\n\n\n\n /// Returns the center position as [Point].\n\n #[inline]\n\n pub fn center(&self) -> PointI2 {\n\n let (width, height) = clamp_dimensions(self.width, self.height);\n\n point!(width / 2, height / 2)\n\n }\n\n\n\n /// Returns the `Image` pixel data as an iterator of [u8].\n\n #[inline]\n\n pub fn bytes(&self) -> Bytes<'_> {\n", "file_path": "src/image.rs", "rank": 71, "score": 21.27900312760921 }, { "content": " /// return Ok(true);\n\n /// }\n\n /// Ok(false)\n\n /// }\n\n /// # }\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub fn running(&mut self) -> bool {\n\n self.settings.running\n\n }\n\n\n\n /// Unpause the render loop.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n", "file_path": "src/state/settings.rs", "rank": 72, "score": 21.237664238201823 }, { "content": " #[inline]\n\n pub fn offset_radius(&mut self, offset: T) {\n\n self.0[2] += offset;\n\n self.0[3] += offset;\n\n }\n\n\n\n /// Returns the `size` of the ellipse as a `Point`.\n\n #[inline]\n\n pub fn size(&self) -> Point<T, 2> {\n\n point!(self.width(), self.height())\n\n }\n\n\n\n /// Returns the bounding [Rect] of the ellipse.\n\n #[inline]\n\n pub fn bounding_rect(&self) -> Rect<T> {\n\n rect![self.left(), self.top(), self.width(), self.height()]\n\n }\n\n\n\n /// Returns `Ellipse` as a [Vec].\n\n ///\n", "file_path": "src/shape/ellipse.rs", "rank": 73, "score": 21.219064384783543 }, { "content": "use pix_engine::prelude::*;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum Direction {\n\n North = 0,\n\n East,\n\n South,\n\n West,\n\n}\n\nuse Direction::*;\n\n\n\nuse crate::SIZE;\n\n\n\nimpl Direction {\n\n pub fn opposite(self) -> Self {\n\n match self {\n\n North => South,\n\n East => West,\n\n South => North,\n\n West => East,\n", "file_path": "examples/maze/cell.rs", "rank": 74, "score": 21.21241013686147 }, { "content": "use super::Renderer;\n\nuse crate::{prelude::*, renderer::TextureRenderer};\n\nuse anyhow::Context;\n\nuse sdl2::{\n\n rect::Rect as SdlRect,\n\n render::{Canvas, Texture as SdlTexture},\n\n};\n\nuse std::{\n\n cell::RefCell,\n\n ops::{Deref, DerefMut},\n\n};\n\n\n\npub(crate) struct RendererTexture {\n\n inner: Option<SdlTexture>,\n\n}\n\n\n\nimpl RendererTexture {\n\n pub(crate) const fn new(texture: SdlTexture) -> Self {\n\n Self {\n\n inner: Some(texture),\n", "file_path": "src/renderer/sdl/texture.rs", "rank": 75, "score": 21.076703398016882 }, { "content": " }\n\n .into());\n\n }\n\n let bytes: Vec<u8> = match format {\n\n PixelFormat::Rgb => pixels\n\n .iter()\n\n .flat_map(|p| [p.red(), p.green(), p.blue()])\n\n .collect(),\n\n PixelFormat::Rgba => pixels.iter().flat_map(Color::channels).collect(),\n\n };\n\n Ok(Self::from_vec(width, height, bytes, format))\n\n }\n\n\n\n /// Constructs an `Image` from a [`Vec<u8>`] representing RGB/A values.\n\n #[inline]\n\n pub fn from_vec(width: u32, height: u32, data: Vec<u8>, format: PixelFormat) -> Self {\n\n Self {\n\n width,\n\n height,\n\n data,\n", "file_path": "src/image.rs", "rank": 76, "score": 21.045099125319386 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let p = point!(1.0, 2.0);\n\n /// let v = Vector::from_point(p);\n\n /// assert_eq!(v.as_array(), [1.0, 2.0]);\n\n /// ```\n\n #[inline]\n\n pub fn from_point(p: Point<T, N>) -> Self {\n\n Self::new(p.as_array())\n\n }\n\n\n\n /// Returns the `x-coordinate`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If `Vector` has zero dimensions.\n\n ///\n\n /// # Example\n", "file_path": "src/vector.rs", "rank": 77, "score": 21.044044244237195 }, { "content": " /// assert_eq!(r.as_bytes(), &[5, 10, 100, 100]);\n\n /// ```\n\n #[inline]\n\n pub fn as_bytes(&self) -> &[T; 4] {\n\n &self.0\n\n }\n\n\n\n /// Returns `Rect` as a mutable byte slice `&mut [x, y, width, height]`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let mut r = rect!(5, 10, 100, 100);\n\n /// for p in r.as_bytes_mut() {\n\n /// *p += 5;\n\n /// }\n\n /// assert_eq!(r.as_bytes(), &[10, 15, 105, 105]);\n\n /// ```\n\n #[inline]\n", "file_path": "src/shape/rect.rs", "rank": 78, "score": 21.034582493613303 }, { "content": "\n\n /// Returns `Ellipse` values as a byte slice `&[x, y, width, height]`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let e = ellipse!(5, 10, 100, 100);\n\n /// assert_eq!(e.as_bytes(), &[5, 10, 100, 100]);\n\n /// ```\n\n #[inline]\n\n pub fn as_bytes(&self) -> &[T; 4] {\n\n &self.0\n\n }\n\n\n\n /// Returns `Ellipse` values as a mutable byte slice `&[x, y, width, height]`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n", "file_path": "src/shape/ellipse.rs", "rank": 79, "score": 20.895843647274468 }, { "content": " Self([x])\n\n }\n\n}\n\n\n\nimpl<T> Point<T, 2> {\n\n /// Constructs a `Point` from individual x/y coordinates.\n\n #[inline]\n\n pub const fn from_xy(x: T, y: T) -> Self {\n\n Self([x, y])\n\n }\n\n}\n\n\n\nimpl<T> Point<T, 3> {\n\n /// Constructs a `Point` from individual x/y/z coordinates.\n\n #[inline]\n\n pub const fn from_xyz(x: T, y: T, z: T) -> Self {\n\n Self([x, y, z])\n\n }\n\n}\n\n\n", "file_path": "src/shape/point.rs", "rank": 81, "score": 20.747532032690163 }, { "content": " ///\n\n /// let c = Color::rgba(240, 255, 0, 128);\n\n /// assert_eq!(c.as_hex_alpha(), 0xF0FF0080);\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub const fn as_hex_alpha(&self) -> u32 {\n\n u32::from_be_bytes(self.channels())\n\n }\n\n\n\n /// Returns a list of max values for each color channel based on [Mode].\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::rgb(0, 0, 0);\n\n /// assert_eq!(c.maxes(), [255.0, 255.0, 255.0, 255.0]);\n\n ///\n\n /// let c = Color::hsb(0.0, 0.0, 0.0);\n", "file_path": "src/color.rs", "rank": 82, "score": 20.735490247055555 }, { "content": " Self {\n\n mode,\n\n channels: calculate_channels(levels),\n\n }\n\n }\n\n\n\n /// Constructs a random `Color` with `red`, `green`, `blue` and max alpha.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::random();\n\n /// // `c.channels()` will return something like:\n\n /// // [207, 12, 217, 255]\n\n /// ```\n\n #[inline]\n\n pub fn random() -> Self {\n\n Self::rgb(random!(255), random!(255), random!(255))\n\n }\n", "file_path": "src/color.rs", "rank": 83, "score": 20.655279809695553 }, { "content": " /// If the window has been closed or is invalid, then an error is returned.\n\n #[inline]\n\n pub fn width(&self) -> PixResult<u32> {\n\n let (width, _) = self.dimensions()?;\n\n Ok(width)\n\n }\n\n\n\n /// The width of the current window.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If the window has been closed or is invalid, then an error is returned.\n\n #[inline]\n\n pub fn window_width(&self) -> PixResult<u32> {\n\n let (width, _) = self.window_dimensions()?;\n\n Ok(width)\n\n }\n\n\n\n /// Set the width of the current window.\n\n ///\n", "file_path": "src/window.rs", "rank": 84, "score": 20.593698490264842 }, { "content": "\n\n /// Constructs an `Ellipse` centered at position `(x, y)` with `width` and `height`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let e = Ellipse::from_center([50, 50], 100, 100);\n\n /// assert_eq!(e.as_array(), [0, 0, 100, 100]);\n\n /// ```\n\n pub fn from_center<P: Into<Point<T, 2>>>(p: P, width: T, height: T) -> Self {\n\n let p = p.into();\n\n let two = T::one() + T::one();\n\n Self::new(p.x() - width / two, p.y() - height / two, width, height)\n\n }\n\n\n\n /// Constructs a circle `Ellipse` centered at position `(x, y)` with `radius`.\n\n ///\n\n /// # Example\n\n ///\n", "file_path": "src/shape/ellipse.rs", "rank": 85, "score": 20.54684952328476 }, { "content": " /// # Examples\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let p = Point::new([1]);\n\n /// assert_eq!(p.as_array(), [1]);\n\n ///\n\n /// let p = Point::new([1, 2]);\n\n /// assert_eq!(p.as_array(), [1, 2]);\n\n ///\n\n /// let p = Point::new([1, -2, 1]);\n\n /// assert_eq!(p.as_array(), [1, -2, 1]);\n\n /// ```\n\n #[inline]\n\n pub const fn new(coords: [T; N]) -> Self {\n\n Self(coords)\n\n }\n\n\n\n /// Constructs a `Point` at the origin.\n\n ///\n", "file_path": "src/shape/point.rs", "rank": 86, "score": 20.435943893036786 }, { "content": " /// let v = Vector::new([2.1]);\n\n /// assert_eq!(v.as_array(), [2.1]);\n\n ///\n\n /// let v = Vector::new([2.1, 3.5]);\n\n /// assert_eq!(v.as_array(), [2.1, 3.5]);\n\n ///\n\n /// let v = Vector::new([2.1, 3.5, 1.0]);\n\n /// assert_eq!(v.as_array(), [2.1, 3.5, 1.0]);\n\n /// ```\n\n #[inline]\n\n pub const fn new(coords: [T; N]) -> Self {\n\n Self(coords)\n\n }\n\n\n\n /// Constructs a `Vector` at the origin.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n", "file_path": "src/vector.rs", "rank": 87, "score": 20.435943893036782 }, { "content": " pub fn hsb<T: Into<Scalar>>(h: T, s: T, b: T) -> Self {\n\n Self::with_mode(Hsb, h, s, b)\n\n }\n\n\n\n /// Constructs a `Color` with `hue`, `saturation`, `brightness` and `alpha`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::hsba(126.0, 80.0, 50.0, 0.5);\n\n /// assert_eq!(c.channels(), [25, 128, 36, 128]);\n\n /// ```\n\n #[inline]\n\n pub fn hsba<T: Into<Scalar>>(h: T, s: T, b: T, a: T) -> Self {\n\n Self::with_mode_alpha(Hsb, h, s, b, a)\n\n }\n\n\n\n /// Constructs a `Color` with `hue`, `saturation`, `lightness` and max `alpha`.\n\n ///\n", "file_path": "src/color.rs", "rank": 88, "score": 20.323609059500974 }, { "content": " /// ```\n\n #[doc(alias = \"new\")]\n\n #[inline]\n\n pub const fn rgb(r: u8, g: u8, b: u8) -> Self {\n\n Self {\n\n mode: Rgb,\n\n channels: [r, g, b, 255],\n\n }\n\n }\n\n\n\n /// Constructs a `Color` with `red`, `green`, `blue` and `alpha`.\n\n ///\n\n /// Alias for [Color::new_alpha].\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// let c = Color::rgba(128, 64, 128, 128);\n\n /// assert_eq!(c.channels(), [128, 64, 128, 128]);\n", "file_path": "src/color.rs", "rank": 89, "score": 20.28953012957487 }, { "content": "#[must_use]\n\n#[derive(Debug)]\n\npub struct WindowBuilder<'a> {\n\n state: &'a mut PixState,\n\n settings: RendererSettings,\n\n}\n\n\n\nimpl<'a> WindowBuilder<'a> {\n\n /// Creates a new `WindowBuilder` instance.\n\n #[inline]\n\n pub fn new(s: &'a mut PixState) -> Self {\n\n let vsync = s.renderer.vsync();\n\n Self {\n\n state: s,\n\n settings: RendererSettings {\n\n vsync,\n\n ..RendererSettings::default()\n\n },\n\n }\n\n }\n", "file_path": "src/window.rs", "rank": 90, "score": 20.26378901489666 }, { "content": "use pix_engine::prelude::*;\n\n\n\nconst WIDTH: u32 = 800;\n\nconst HEIGHT: u32 = 800;\n\n\n", "file_path": "examples/textures.rs", "rank": 91, "score": 20.254976705061942 }, { "content": "use anyhow::anyhow;\n\nuse pix_engine::prelude::*;\n\nuse std::{env, path::Path};\n\n\n\nconst WIDTH: u32 = 800;\n\nconst HEIGHT: u32 = 600;\n\n\n", "file_path": "examples/image.rs", "rank": 92, "score": 20.24222578567781 }, { "content": "impl Default for Theme {\n\n fn default() -> Self {\n\n Self::dark()\n\n }\n\n}\n\n\n\nimpl Theme {\n\n /// Constructs a default [Builder] which can build a `Theme` instance.\n\n ///\n\n /// See [Builder] for examples.\n\n #[inline]\n\n pub fn builder() -> Builder {\n\n Builder::default()\n\n }\n\n\n\n /// Constructs a default dark `Theme`.\n\n #[inline]\n\n pub fn dark() -> Self {\n\n Self {\n\n name: \"Dark\".into(),\n", "file_path": "src/gui/theme.rs", "rank": 93, "score": 20.200817094804798 }, { "content": " };\n\n}\n\n\n\nimpl<T> Rect<T> {\n\n /// Constructs a `Rect` at position `(x, y)` with `width` and `height`.\n\n pub const fn new(x: T, y: T, width: T, height: T) -> Self {\n\n Self([x, y, width, height])\n\n }\n\n\n\n /// Constructs a square `Rect` at position `(x, y)` with `size`.\n\n pub fn square(x: T, y: T, size: T) -> Self\n\n where\n\n T: Copy,\n\n {\n\n Self::new(x, y, size, size)\n\n }\n\n}\n\n\n\nimpl<T: Copy> Rect<T> {\n\n /// Returns `Rect` as `[x, y, width, height]`.\n", "file_path": "src/shape/rect.rs", "rank": 94, "score": 20.182690966227035 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n\n /// fn on_update(&mut self, s: &mut PixState) -> PixResult<()> {\n\n /// if s.mouse_pressed() {\n\n /// s.background(Color::random());\n\n /// }\n\n /// Ok(())\n\n /// }\n\n /// # }\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub fn mouse_pressed(&self) -> bool {\n\n self.ui.mouse.is_pressed()\n\n }\n\n\n", "file_path": "src/state.rs", "rank": 95, "score": 20.167493082169713 }, { "content": " /// }\n\n /// Ok(())\n\n /// }\n\n /// # }\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub fn key_pressed(&self) -> bool {\n\n self.ui.keys.is_pressed()\n\n }\n\n\n\n /// Returns if a specific [Key] is currently being held.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use pix_engine::prelude::*;\n\n /// # struct App;\n\n /// # impl AppState for App {\n\n /// fn on_update(&mut self, s: &mut PixState) -> PixResult<()> {\n", "file_path": "src/state.rs", "rank": 96, "score": 20.167493082169713 }, { "content": "impl<T> Vector<T, 2> {\n\n /// Constructs a `Vector` from individual x/y coordinates.\n\n #[inline]\n\n pub const fn from_xy(x: T, y: T) -> Self {\n\n Self([x, y])\n\n }\n\n}\n\n\n\nimpl<T> Vector<T, 3> {\n\n /// Constructs a `Vector` from individual x/y/z coordinates.\n\n #[inline]\n\n pub const fn from_xyz(x: T, y: T, z: T) -> Self {\n\n Self([x, y, z])\n\n }\n\n}\n\n\n\nimpl<T: Num + Float> Vector<T, 2> {\n\n /// Constructs a `Vector` from another `Vector`, rotated by an `angle`.\n\n ///\n\n /// # Example\n", "file_path": "src/vector.rs", "rank": 97, "score": 20.064016559529737 }, { "content": " }\n\n\n\n /// Set window dimensions.\n\n pub fn with_dimensions(&mut self, width: u32, height: u32) -> &mut Self {\n\n self.settings.width = width;\n\n self.settings.height = height;\n\n self\n\n }\n\n\n\n /// Scales the window.\n\n pub fn scale(&mut self, x: f32, y: f32) -> &mut Self {\n\n self.settings.scale_x = x;\n\n self.settings.scale_y = y;\n\n self\n\n }\n\n\n\n /// Set audio sample rate.\n\n pub fn audio_sample_rate(&mut self, sample_rate: i32) -> &mut Self {\n\n self.settings.audio_sample_rate = sample_rate;\n\n self\n", "file_path": "src/engine.rs", "rank": 98, "score": 19.992916105506414 }, { "content": "use pix_engine::{math::map, prelude::*};\n\n\n\nconst WIDTH: u32 = 800;\n\nconst HEIGHT: u32 = 600;\n\nconst SIZE: u32 = 4;\n\n\n", "file_path": "examples/colors.rs", "rank": 99, "score": 19.964084066232445 } ]
Rust
PLs/Rust/Slns/Simple-Windows-RS-Window/src/main.rs
QubitTooLate/How-To-Make-A-Win32-Window
2593bf0192bc361490a957eb3b6b5d2c23bed11b
use bindings::{ Windows::Win32::{ Foundation::*, UI::WindowsAndMessaging::*, System::LibraryLoader::{ GetModuleHandleA, }, }, }; use windows::*; fn main() -> Result<()> { let mut window = Win3Window::new()?; window.run() } struct Win3Window { handle: HWND } impl Win3Window { fn new() -> Result<Self> { Ok(Win3Window { handle: HWND(0) }) } fn procedure(&mut self, window_handle: HWND, message: u32, w: WPARAM, l: LPARAM) -> LRESULT { unsafe { match message { WM_CLOSE => { DestroyWindow(window_handle); LRESULT(0) } WM_DESTROY => { PostQuitMessage(0); LRESULT(0) } _ => DefWindowProcA(window_handle, message, w, l), } } } fn run(&mut self) -> Result<()> { unsafe { let instance = GetModuleHandleA(None); debug_assert!(instance.0 != 0); let wc = WNDCLASSA { hCursor: LoadCursorW(None, IDC_ARROW), hInstance: instance, lpszClassName: PSTR(b"window\0".as_ptr() as _), lpfnWndProc: Some(Self::window_procedure), ..Default::default() }; let atom = RegisterClassA(&wc); debug_assert!(atom != 0); let handle = CreateWindowExA( Default::default(), "window", "Sample Window", WS_OVERLAPPEDWINDOW | WS_VISIBLE, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, None, None, instance, self as *mut _ as _, ); debug_assert!(handle.0 != 0); debug_assert!(handle == self.handle); let mut msg = MSG::default(); /*while msg.message != WM_QUIT { if PeekMessageA(&mut msg, None, 0, 0, PM_REMOVE).into() { DispatchMessageA(&msg); } else { let result = self.no_message_handler(); if result.is_err() { return result; } } }*/ while GetMessageA(&mut msg, None, 0, 0).into() { DispatchMessageA(&msg); } Ok(()) } } /*fn no_message_handler(&mut self) -> Result<()> { Ok(()) }*/ extern "system" fn window_procedure( window_handle: HWND, message: u32, w: WPARAM, l: LPARAM, ) -> LRESULT { unsafe { let this : *mut Self; if message == WM_NCCREATE { let create_struct = l.0 as *const CREATESTRUCTA; this = (*create_struct).lpCreateParams as *mut Self; (*this).handle = window_handle; SetWindowLong(window_handle, GWLP_USERDATA, this as _); } else { this = GetWindowLong(window_handle, GWLP_USERDATA) as *mut Self; } if !this.is_null() { return (*this).procedure(window_handle, message, w, l); } DefWindowProcA(window_handle, message, w, l) } } } #[allow(non_snake_case)] #[cfg(target_pointer_width = "32")] unsafe fn SetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX, value: isize) -> isize { SetWindowLongA(window, index, value as _) as _ } #[allow(non_snake_case)] #[cfg(target_pointer_width = "64")] unsafe fn SetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX, value: isize) -> isize { SetWindowLongPtrA(window, index, value) } #[allow(non_snake_case)] #[cfg(target_pointer_width = "32")] unsafe fn GetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX) -> isize { GetWindowLongA(window, index) as _ } #[allow(non_snake_case)] #[cfg(target_pointer_width = "64")] unsafe fn GetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX) -> isize { GetWindowLongPtrA(window, index) }
use bindings::{ Windows::Win32::{ Foundation::*, UI::WindowsAndMessaging::*, System::LibraryLoader::{ GetModuleHandleA, }, }, }; use windows::*; fn main() -> Result<()> { let mut window = Win3Window::new()?; window.run() } struct Win3Window { handle: HWND } impl Win3Window { fn new() -> Result<Self> { Ok(Win3Window { handle: HWND(0) }) } fn procedure(&mut self, window_handle: HWND, message: u32, w: WPARAM, l: LPARAM) -> LRESULT { unsafe { match message { WM_CLOSE => { DestroyWindow(window_handle);
handle, message, w, l); } DefWindowProcA(window_handle, message, w, l) } } } #[allow(non_snake_case)] #[cfg(target_pointer_width = "32")] unsafe fn SetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX, value: isize) -> isize { SetWindowLongA(window, index, value as _) as _ } #[allow(non_snake_case)] #[cfg(target_pointer_width = "64")] unsafe fn SetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX, value: isize) -> isize { SetWindowLongPtrA(window, index, value) } #[allow(non_snake_case)] #[cfg(target_pointer_width = "32")] unsafe fn GetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX) -> isize { GetWindowLongA(window, index) as _ } #[allow(non_snake_case)] #[cfg(target_pointer_width = "64")] unsafe fn GetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX) -> isize { GetWindowLongPtrA(window, index) }
LRESULT(0) } WM_DESTROY => { PostQuitMessage(0); LRESULT(0) } _ => DefWindowProcA(window_handle, message, w, l), } } } fn run(&mut self) -> Result<()> { unsafe { let instance = GetModuleHandleA(None); debug_assert!(instance.0 != 0); let wc = WNDCLASSA { hCursor: LoadCursorW(None, IDC_ARROW), hInstance: instance, lpszClassName: PSTR(b"window\0".as_ptr() as _), lpfnWndProc: Some(Self::window_procedure), ..Default::default() }; let atom = RegisterClassA(&wc); debug_assert!(atom != 0); let handle = CreateWindowExA( Default::default(), "window", "Sample Window", WS_OVERLAPPEDWINDOW | WS_VISIBLE, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, None, None, instance, self as *mut _ as _, ); debug_assert!(handle.0 != 0); debug_assert!(handle == self.handle); let mut msg = MSG::default(); /*while msg.message != WM_QUIT { if PeekMessageA(&mut msg, None, 0, 0, PM_REMOVE).into() { DispatchMessageA(&msg); } else { let result = self.no_message_handler(); if result.is_err() { return result; } } }*/ while GetMessageA(&mut msg, None, 0, 0).into() { DispatchMessageA(&msg); } Ok(()) } } /*fn no_message_handler(&mut self) -> Result<()> { Ok(()) }*/ extern "system" fn window_procedure( window_handle: HWND, message: u32, w: WPARAM, l: LPARAM, ) -> LRESULT { unsafe { let this : *mut Self; if message == WM_NCCREATE { let create_struct = l.0 as *const CREATESTRUCTA; this = (*create_struct).lpCreateParams as *mut Self; (*this).handle = window_handle; SetWindowLong(window_handle, GWLP_USERDATA, this as _); } else { this = GetWindowLong(window_handle, GWLP_USERDATA) as *mut Self; } if !this.is_null() { return (*this).procedure(window_
random
[]
Rust
lamp_asm_parser/src/lexer.rs
ElFamosoKilluaah/lamp
4a879129118a798de6d2ba05b5e39a0f0663649b
use lamp_common::op::{get_op, Opcode}; #[derive(Debug, Copy, Clone, PartialEq)] pub enum TokenType { Opcode(Opcode), Num8(u8), Ptr8(u8), } #[derive(Debug)] pub enum LexerError { UnexpectedToken(usize, String), InvalidMnemonic(usize, String), InvalidLine(usize), } impl std::fmt::Display for LexerError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { /* at + 1 because at is an array/vec index */ Self::InvalidLine(at) => { write!(f, "Line {} contains one or more invalid tokens", at + 1) } Self::UnexpectedToken(at, tkn) => { write!(f, "Unexpected token at line {}: \'{}\'", at + 1, tkn) } Self::InvalidMnemonic(at, mnemonic) => { write!(f, "Invalid mnemonic at line {}: \'{}\'", at + 1, mnemonic) } } } } pub struct Lexer; #[derive(PartialEq, Debug)] pub struct TokenizedLine { pub opcode: Opcode, pub operands: Vec<TokenType>, } impl TokenizedLine { pub fn empty() -> Self { TokenizedLine { opcode: Opcode::NOP, operands: vec![], } } } impl Lexer { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self {} } #[allow(clippy::ptr_arg)] pub fn tokenize(&self, content: &Vec<String>) -> Result<Vec<TokenizedLine>, Vec<LexerError>> { let mut tokenized = Vec::<TokenizedLine>::new(); let mut errors = Vec::<LexerError>::new(); #[allow(clippy::needless_range_loop)] for i in 0..content.len() { if content[i].starts_with(lamp_common::constants::COMMENT_MARKER) { continue; } match self.tokenize_line(&content[i], i) { Ok(tkn) => { if errors.is_empty() { tokenized.push(tkn); } } Err(e) => errors.push(e), } } if errors.is_empty() { return Ok(tokenized); } Err(errors) } pub fn tokenize_line(&self, line: &str, line_num: usize) -> Result<TokenizedLine, LexerError> { if let Some(index) = line.find(' ') { let to_tokenize = line.split_at(index); let mut tokenized = TokenizedLine::empty(); if let Ok(opcode) = get_op(to_tokenize.0.to_owned()) { tokenized.opcode = opcode; match self.tokenize_operands(to_tokenize.1.to_owned(), line_num) { Ok(tkns) => { tokenized.operands = tkns; Ok(tokenized) } Err(e) => Err(e), } } else { Err(LexerError::InvalidMnemonic( line_num, to_tokenize.0.to_owned(), )) } } else { Err(LexerError::InvalidLine(line_num)) } } pub fn tokenize_operands( &self, line: String, line_num: usize, ) -> Result<Vec<TokenType>, LexerError> { let to_tokenize: Vec<&str> = line.split(',').collect(); let mut tokens = Vec::<TokenType>::new(); for tkn in to_tokenize { let tkn = tkn.split_whitespace().next(); match tkn { Some(tkn) => match self.from_number_to_token(tkn.to_string(), line_num) { Ok(tok) => tokens.push(tok), Err(e) => return Err(e), }, None => break, } } Ok(tokens) } pub fn from_number_to_token(&self, tkn: String, line: usize) -> Result<TokenType, LexerError> { let split = &tkn.split_at(1); match split.1.parse::<u8>() { Ok(number) => match split.0 { "#" => Ok(TokenType::Num8(number)), "$" => Ok(TokenType::Ptr8(number)), other => Err(LexerError::UnexpectedToken(line, other.to_owned())), }, Err(_) => Err(LexerError::UnexpectedToken(line, tkn)), } } }
use lamp_common::op::{get_op, Opcode}; #[derive(Debug, Copy, Clone, PartialEq)] pub enum TokenType { Opcode(Opcode), Num8(u8), Ptr8(u8), } #[derive(Debug)] pub enum LexerError { UnexpectedToken(usize, String), InvalidMnemonic(usize, String), InvalidLine(usize), } impl std::fmt::Display for LexerError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { /* at + 1 because at is an array/vec index */ Self::InvalidLine(at) => { write!(f, "Line {} contains one or more invalid tokens", at + 1) } Self::UnexpectedToken(at, tkn) => { write!(f, "Unexpected token at line {}: \'{}\
e>::new(); let mut errors = Vec::<LexerError>::new(); #[allow(clippy::needless_range_loop)] for i in 0..content.len() { if content[i].starts_with(lamp_common::constants::COMMENT_MARKER) { continue; } match self.tokenize_line(&content[i], i) { Ok(tkn) => { if errors.is_empty() { tokenized.push(tkn); } } Err(e) => errors.push(e), } } if errors.is_empty() { return Ok(tokenized); } Err(errors) } pub fn tokenize_line(&self, line: &str, line_num: usize) -> Result<TokenizedLine, LexerError> { if let Some(index) = line.find(' ') { let to_tokenize = line.split_at(index); let mut tokenized = TokenizedLine::empty(); if let Ok(opcode) = get_op(to_tokenize.0.to_owned()) { tokenized.opcode = opcode; match self.tokenize_operands(to_tokenize.1.to_owned(), line_num) { Ok(tkns) => { tokenized.operands = tkns; Ok(tokenized) } Err(e) => Err(e), } } else { Err(LexerError::InvalidMnemonic( line_num, to_tokenize.0.to_owned(), )) } } else { Err(LexerError::InvalidLine(line_num)) } } pub fn tokenize_operands( &self, line: String, line_num: usize, ) -> Result<Vec<TokenType>, LexerError> { let to_tokenize: Vec<&str> = line.split(',').collect(); let mut tokens = Vec::<TokenType>::new(); for tkn in to_tokenize { let tkn = tkn.split_whitespace().next(); match tkn { Some(tkn) => match self.from_number_to_token(tkn.to_string(), line_num) { Ok(tok) => tokens.push(tok), Err(e) => return Err(e), }, None => break, } } Ok(tokens) } pub fn from_number_to_token(&self, tkn: String, line: usize) -> Result<TokenType, LexerError> { let split = &tkn.split_at(1); match split.1.parse::<u8>() { Ok(number) => match split.0 { "#" => Ok(TokenType::Num8(number)), "$" => Ok(TokenType::Ptr8(number)), other => Err(LexerError::UnexpectedToken(line, other.to_owned())), }, Err(_) => Err(LexerError::UnexpectedToken(line, tkn)), } } }
'", at + 1, tkn) } Self::InvalidMnemonic(at, mnemonic) => { write!(f, "Invalid mnemonic at line {}: \'{}\'", at + 1, mnemonic) } } } } pub struct Lexer; #[derive(PartialEq, Debug)] pub struct TokenizedLine { pub opcode: Opcode, pub operands: Vec<TokenType>, } impl TokenizedLine { pub fn empty() -> Self { TokenizedLine { opcode: Opcode::NOP, operands: vec![], } } } impl Lexer { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self {} } #[allow(clippy::ptr_arg)] pub fn tokenize(&self, content: &Vec<String>) -> Result<Vec<TokenizedLine>, Vec<LexerError>> { let mut tokenized = Vec::<TokenizedLin
random
[ { "content": "pub fn get_op<'a>(base: String) -> Result<Opcode, &'a str> {\n\n for (opcode, name) in OPCODES_STRINGS {\n\n if base.to_uppercase().contains(name) {\n\n return Ok(*opcode);\n\n }\n\n }\n\n\n\n Err(\"No opcode found.\")\n\n}\n", "file_path": "lamp_common/src/op.rs", "rank": 0, "score": 98568.56267563705 }, { "content": "pub fn decode_opcode(val: u8) -> Option<Opcode> {\n\n for code in OPCODES {\n\n if *code as u8 == val {\n\n return Some(*code);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "lamp_common/src/op.rs", "rank": 1, "score": 80349.9699243368 }, { "content": "pub fn decode_opcode(val: u8) -> Result<Opcode, VMError> {\n\n match get_op(val) {\n\n Some(opcode) => Ok(opcode),\n\n None => Err(VMError::InvalidOpcodeError),\n\n }\n\n}\n", "file_path": "lamp_vm/src/base/opcodes.rs", "rank": 2, "score": 78681.53566486659 }, { "content": "pub fn parse_cmd(string: &str) -> Option<Box<dyn DebugCommand>> {\n\n for command in get_cmds() {\n\n if string.contains(command.name()) {\n\n return Some(command);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "lamp_vm/src/debug/commands/mod.rs", "rank": 3, "score": 74522.69299403456 }, { "content": "#[test]\n\npub fn vm_add_test() {\n\n let bin = vec![\n\n // LOAD 13, 15, 15: Put in the register 13 the u16 represented by 15 and 15\n\n 15, 13, 15, 15,\n\n // LOAD 14, 16, 16: Put in the register 14 the u16 represented by 16 and 16\n\n 15, 14, 16, 16,\n\n // ADD 13, 14, 15: Put in the register 15 the result of the register 13 + the register 14\n\n 1, 13, 14, 15,\n\n ];\n\n\n\n let mut vm = VM::new(bin);\n\n let _ = vm.run();\n\n let expected_value = ((15 << 8) | 15) + ((16 << 8) | 16);\n\n assert_eq!(*vm.get_register(15), expected_value);\n\n}\n\n\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 4, "score": 59956.84941734788 }, { "content": "#[test]\n\npub fn vm_mul_test() {\n\n let bin = vec![\n\n // LOAD 13, 15, 15: Put in the register 13 the u16 represented by 15 and 15\n\n 15, 13, 15, 15,\n\n // LOAD 14, 16, 16: Put in the register 14 the u16 represented by 16 and 16\n\n 15, 14, 16, 16,\n\n // MUL 13, 14, 15: Put in the register 15 the result of the register 13 * the register 14\n\n 3, 13, 14, 15,\n\n ];\n\n\n\n let mut vm = VM::new(bin);\n\n let _ = vm.run();\n\n let expected_value = ((15 << 8) | 15) * ((16 << 8) | 16);\n\n assert_eq!(*vm.get_register(15), expected_value);\n\n}\n\n\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 5, "score": 59956.84941734788 }, { "content": "#[test]\n\npub fn vm_load_test() {\n\n let bin = vec![\n\n // LOAD 13, 15, 15: Put in the register 13 the u16 represented by 15 and 15\n\n 15, 13, 15, 15,\n\n ];\n\n let mut vm = VM::new(bin);\n\n let _ = vm.run();\n\n let expected_value = (15 << 8) | 15;\n\n assert_eq!(*vm.get_register(13), expected_value);\n\n}\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 6, "score": 59956.84941734788 }, { "content": "#[test]\n\npub fn vm_mod_test() {\n\n let bin = vec![\n\n // LOAD 13, 15, 15: Put in the register 13 the u16 represented by 15 and 15\n\n 15, 13, 15, 15,\n\n // LOAD 14, 16, 16: Put in the register 14 the u16 represented by 16 and 16\n\n 15, 14, 16, 16,\n\n // MOD 13, 14, 15: Put in the register 15 the result of the register 13 / the register 14. The remainder goes into a special register\n\n 4, 13, 14, 15,\n\n ];\n\n\n\n let mut vm = VM::new(bin);\n\n let _ = vm.run();\n\n let expected_value = ((15 << 8) | 15) / ((16 << 8) | 16);\n\n assert_eq!(*vm.get_register(15), expected_value);\n\n}\n\n\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 7, "score": 59956.84941734788 }, { "content": "#[test]\n\npub fn vm_sub_test() {\n\n let bin = vec![\n\n // LOAD 13, 15, 15: Put in the register 13 the u16 represented by 15 and 15\n\n 15, 13, 15, 15,\n\n // LOAD 14, 16, 16: Put in the register 14 the u16 represented by 16 and 16\n\n 15, 14, 16, 16,\n\n // SUB 13, 14, 15: Put in the register 15 the result of the register 13 - the register 14\n\n 2, 13, 14, 15,\n\n ];\n\n\n\n let mut vm = VM::new(bin);\n\n let _ = vm.run();\n\n let expected_value = ((15 << 8) | 15) - ((16 << 8) | 16);\n\n assert_eq!(*vm.get_register(15), expected_value);\n\n}\n\n\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 8, "score": 59956.84941734788 }, { "content": "#[cfg(test)]\n\npub fn registers_dump(vm: &VM) {\n\n for i in 0..32 {\n\n println!(\"Register {}: {}\", &i, vm.get_register(i));\n\n }\n\n}\n\n\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 9, "score": 55543.20305803565 }, { "content": "pub fn get_cmds() -> Vec<Box<dyn DebugCommand>> {\n\n vec![\n\n Box::new(regdump::RegdumpCommand {}),\n\n Box::new(setpc::SetPcCommand {}),\n\n Box::new(step::StepCommand {}),\n\n ]\n\n}\n\n\n", "file_path": "lamp_vm/src/debug/commands/mod.rs", "rank": 10, "score": 50225.23291887318 }, { "content": "fn main() {\n\n let lamp = LampApp::from_args();\n\n simple_logger::init().unwrap();\n\n let bin = std::fs::read(&lamp.bin_path);\n\n\n\n match bin {\n\n Ok(v) => {\n\n if lamp.debug {\n\n info!(\"Debug session started.\");\n\n let mut debug_session = DebugSession::new(VM::new(v));\n\n debug_session.start_debug_session();\n\n info!(\"Debug session ended.\");\n\n return;\n\n }\n\n\n\n let mut lamp_vm = VM::new(v);\n\n let exit_status = lamp_vm.run();\n\n\n\n match exit_status {\n\n Ok(code) => info!(\"VM exited successfully (code {})\", code),\n\n Err(e) => error!(\"VM exited with an error.\\nReason: {:?}\", e),\n\n }\n\n }\n\n Err(e) => error!(\"Unable to read the binary's content: {:?}\", e),\n\n }\n\n\n\n info!(\"VM Shutdown.\");\n\n}\n", "file_path": "lamp_vm/src/main.rs", "rank": 11, "score": 32772.13810951068 }, { "content": "fn main() {\n\n let args = LampAsm::from_args();\n\n\n\n let mut file = File::open(&args.source);\n\n\n\n match file {\n\n Ok(ref mut file) => {\n\n let mut buffer = String::new();\n\n match file.read_to_string(&mut buffer) {\n\n Ok(k) => {\n\n println!(\"Source file size is {} bytes.\", k);\n\n let lines = buffer.split('\\n').map(|s| s.to_string()).collect();\n\n\n\n let mut compiler = Compiler::new(lines);\n\n match compiler.compile() {\n\n Ok(bin_size) => {\n\n println!(\"Output's size is {} bytes.\", bin_size);\n\n match write_output(&args.output, compiler.result_buffer) {\n\n Ok(_) => println!(\"Compilation successfully ended.\"),\n\n Err(e) => println!(\n", "file_path": "lamp_assembler/src/main.rs", "rank": 12, "score": 32772.13810951068 }, { "content": "pub trait DebugCommand {\n\n fn execute(&self, vm: &mut VM, args: Vec<&str>) -> usize;\n\n fn name(&self) -> &str;\n\n fn description(&self) -> &str;\n\n fn syntax(&self) -> &str;\n\n fn display_error(&self) {\n\n error!(\n\n \"Error: too much/not enough arguments.\\nUsage: {}\",\n\n self.syntax()\n\n );\n\n }\n\n}\n", "file_path": "lamp_vm/src/debug/commands/command_base.rs", "rank": 13, "score": 31391.543955042245 }, { "content": "#[test]\n\nfn test_parse() {\n\n let to_parse = \"LOAD #30, #20, #16\";\n\n let lexer = Lexer::new();\n\n\n\n let res = lexer.tokenize_line(to_parse, 1);\n\n\n\n assert_eq!(res.is_ok(), true);\n\n\n\n let expected = TokenizedLine {\n\n opcode: Opcode::LOAD,\n\n operands: vec![\n\n TokenType::Num8(30),\n\n TokenType::Num8(20),\n\n TokenType::Num8(16),\n\n ],\n\n };\n\n\n\n assert_eq!(res.unwrap(), expected);\n\n}\n\n\n", "file_path": "lamp_asm_parser/src/tests.rs", "rank": 14, "score": 31357.23488130705 }, { "content": "#[test]\n\nfn test_parse_error() {\n\n let to_parse = \"ZIZI #30, '0, #caca\";\n\n let lexer = Lexer::new();\n\n\n\n let res = lexer.tokenize_line(to_parse, 1);\n\n\n\n assert_eq!(res.is_err(), true);\n\n}\n", "file_path": "lamp_asm_parser/src/tests.rs", "rank": 15, "score": 30712.83341891786 }, { "content": "use super::vm::VMError;\n\nuse lamp_common::op::{decode_opcode as get_op, Opcode};\n\n\n", "file_path": "lamp_vm/src/base/opcodes.rs", "rank": 16, "score": 24321.019071703566 }, { "content": "use lamp_asm_parser::lexer::{Lexer, LexerError, TokenType, TokenizedLine};\n\n\n\npub struct Compiler {\n\n origin: Vec<String>,\n\n pub result_buffer: Vec<u8>,\n\n}\n\n\n\nimpl Compiler {\n\n pub fn new(origin: Vec<String>) -> Self {\n\n Self {\n\n origin,\n\n result_buffer: vec![],\n\n }\n\n }\n\n\n\n pub fn compile(&mut self) -> Result<usize, Vec<LexerError>> {\n\n let tokenization = Lexer::new().tokenize(&self.origin);\n\n\n\n match tokenization {\n\n Ok(tokenized_lines) => {\n", "file_path": "lamp_assembler/src/compiler.rs", "rank": 19, "score": 15.920766295847113 }, { "content": "pub enum VMError {\n\n InvalidOpcodeError,\n\n UnexpectedTokenError,\n\n // More errors will be added\n\n}\n\n\n\nimpl VM {\n\n pub fn new(binary: Vec<u8>) -> Self {\n\n Self {\n\n bin: binary,\n\n pc: 0,\n\n registers: [0; 32],\n\n modulo_remainder: 0,\n\n eq_flag: false,\n\n }\n\n }\n\n\n\n // Runs the full binary.\n\n pub fn run(&mut self) -> VMResult {\n\n let mut result;\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 22, "score": 13.892310805408497 }, { "content": " for line in tokenized_lines {\n\n let buff = self.compile_token(line);\n\n\n\n for byte in buff {\n\n self.result_buffer.push(byte);\n\n }\n\n }\n\n }\n\n Err(e) => return Err(e),\n\n }\n\n Ok(self.result_buffer.len())\n\n }\n\n\n\n pub fn compile_token(&mut self, token: TokenizedLine) -> Vec<u8> {\n\n let mut returned_vec = Vec::<u8>::new();\n\n\n\n returned_vec.push(token.opcode as u8);\n\n // Should be changed later\n\n for operand in token.operands {\n\n match operand {\n\n TokenType::Num8(n) => returned_vec.push(n),\n\n TokenType::Ptr8(n) => returned_vec.push(n),\n\n TokenType::Opcode(_) => panic!(\"Should never happen: Unexpected opcode found\"),\n\n }\n\n }\n\n returned_vec\n\n }\n\n}\n", "file_path": "lamp_assembler/src/compiler.rs", "rank": 24, "score": 12.090504720564759 }, { "content": "use crate::lexer::{Lexer, TokenType, TokenizedLine};\n\nuse lamp_common::op::Opcode;\n\n\n\n#[test]\n", "file_path": "lamp_asm_parser/src/tests.rs", "rank": 27, "score": 9.755983189795867 }, { "content": "use super::commands;\n\nuse crate::base::vm::VM;\n\nuse log::{error, info};\n\nuse std::io;\n\nuse std::io::Write;\n\n\n\npub struct DebugSession {\n\n vm: VM,\n\n}\n\n\n\nimpl DebugSession {\n\n pub fn new(vm: VM) -> Self {\n\n Self { vm }\n\n }\n\n\n\n pub fn start_debug_session(&mut self) {\n\n info!(\"Starting debug session.\");\n\n loop {\n\n print!(\"\\n>>> \");\n\n let _ = io::stdout().flush();\n", "file_path": "lamp_vm/src/debug/session.rs", "rank": 28, "score": 9.500998141163112 }, { "content": " loop {\n\n result = self.cycle();\n\n\n\n if self.pc >= self.bin.len() {\n\n break;\n\n }\n\n }\n\n result\n\n }\n\n\n\n // One VM's cycle.\n\n // It does:\n\n // -The opcode decoding\n\n // -The opcode execution\n\n // - Error handling\n\n pub fn cycle(&mut self) -> VMResult {\n\n let opcode = self.next_8_bits();\n\n let outcoming_result: VMResult;\n\n\n\n match decode_opcode(opcode) {\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 29, "score": 9.077156438262511 }, { "content": "macro_rules! opcodes {\n\n { $($variant: ident = $value: expr, op = $operands: expr),+$(,)? } =>\n\n {\n\n #[repr(u8)]\n\n #[derive(Debug, Copy, Clone, PartialEq)]\n\n pub enum Opcode {\n\n $($variant = $value),+,\n\n }\n\n pub const OPCODES: &'static [Opcode] = &[$(Opcode::$variant),*];\n\n pub const OPCODES_OPERANDS: &'static [(Opcode, usize)] = &[$((Opcode::$variant , $operands)),*];\n\n pub const OPCODES_STRINGS: &'static [(Opcode, &str)] = &[$((Opcode::$variant , stringify!(Opcode::$variant))),*];\n\n }\n\n}\n\n// First number is the mnemonic's ID.\n\n// op is the number of operands the mnemonic needs\n\nopcodes! {\n\n // Arithmetical instructions\n\n ADD = 1, op = 3,\n\n SUB = 2, op = 3,\n\n MUL = 3, op = 3,\n", "file_path": "lamp_common/src/op.rs", "rank": 30, "score": 8.670641829823012 }, { "content": " let mut command_line = String::new();\n\n\n\n io::stdin()\n\n .read_line(&mut command_line)\n\n .map_err(|e| {\n\n error!(\"Unable to read line: {:?}\", e);\n\n })\n\n .unwrap();\n\n\n\n command_line = String::from(command_line.trim());\n\n match commands::parse_cmd(&command_line) {\n\n Some(cmd) => {\n\n let res = cmd.execute(&mut self.vm, command_line.split(' ').collect());\n\n info!(\"Command reported the {} result.\", res);\n\n },\n\n None => error!(\"This command doesn\\'t exist. type \\\"help\\\" for an exhaustive list of the available commands.\"),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "lamp_vm/src/debug/session.rs", "rank": 31, "score": 8.293365861643398 }, { "content": "use super::opcodes::decode_opcode;\n\nuse lamp_common::op::Opcode;\n\nuse log::{error, info};\n\n\n\npub struct VM {\n\n // The binary VM has to execute\n\n bin: Vec<u8>,\n\n // The program counter, it's utility is to remind where we are in the program\n\n pc: usize,\n\n // Registers used to store i32 values the program needs\n\n registers: [i32; 32],\n\n // When modulo operation is done, the remainder is pushed here\n\n modulo_remainder: i32,\n\n // When an eq test is done, the result is pushed here\n\n eq_flag: bool,\n\n}\n\n\n\npub type VMResult = Result<i32, VMError>;\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 32, "score": 8.092192115360842 }, { "content": "use super::command_base::DebugCommand;\n\nuse crate::base::vm::VM;\n\nuse log::{error, info};\n\n\n\npub struct RegdumpCommand;\n\n\n\nimpl DebugCommand for RegdumpCommand {\n\n fn execute(&self, vm: &mut VM, args: Vec<&str>) -> usize {\n\n match args.get(1) {\n\n Some(arg) => {\n\n if arg == &\"all\" {\n\n for i in 0..32 {\n\n print!(\"|| {}: {}\", i, vm.get_register(i));\n\n }\n\n 0\n\n } else {\n\n match arg.parse::<u8>() {\n\n Ok(num) => {\n\n if num > 31 {\n\n error!(\"Error: wrong arg 2.\\nUsage: {}\", self.syntax());\n", "file_path": "lamp_vm/src/debug/commands/regdump.rs", "rank": 33, "score": 7.750795133172865 }, { "content": " }\n\n Ok(0)\n\n }\n\n\n\n // Sets the value of a register.\n\n pub fn set_register_value(&mut self, index: u8, val: i32) {\n\n let ptr = self.get_register_mut(index);\n\n *ptr = val;\n\n }\n\n\n\n // Gives a mutable register's reference, and verifies if the given register's index is valid.\n\n pub fn get_register_mut(&mut self, index: u8) -> &mut i32 {\n\n if index < 32 {\n\n &mut self.registers[index as usize]\n\n } else {\n\n error!(\n\n \"Register out of bounds. Expected 0 <= register_index < 32; got {}\",\n\n index\n\n );\n\n panic!(\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 34, "score": 7.611230979921988 }, { "content": "use super::command_base::DebugCommand;\n\nuse crate::base::vm::VM;\n\n\n\npub struct SetPcCommand;\n\n\n\nimpl DebugCommand for SetPcCommand {\n\n fn execute(&self, vm: &mut VM, args: Vec<&str>) -> usize {\n\n match args.get(1) {\n\n Some(arg) => match arg.parse::<usize>() {\n\n Ok(new_pc) => {\n\n vm.set_pc(new_pc);\n\n 0\n\n }\n\n Err(_) => {\n\n self.display_error();\n\n 1\n\n }\n\n },\n\n None => {\n\n self.display_error();\n", "file_path": "lamp_vm/src/debug/commands/setpc.rs", "rank": 35, "score": 7.519856188210014 }, { "content": "use super::command_base::DebugCommand;\n\nuse crate::base::vm::VM;\n\n\n\npub struct StepCommand;\n\n\n\nimpl DebugCommand for StepCommand {\n\n #[allow(unused_variables)]\n\n fn execute(&self, vm: &mut VM, args: Vec<&str>) -> usize {\n\n match vm.cycle() {\n\n Ok(_) => 0,\n\n Err(_) => 1,\n\n }\n\n }\n\n fn name(&self) -> &str {\n\n \"step\"\n\n }\n\n fn description(&self) -> &str {\n\n \"Makes 1 VM's cycle.\"\n\n }\n\n fn syntax(&self) -> &str {\n\n \"step\"\n\n }\n\n}\n", "file_path": "lamp_vm/src/debug/commands/step.rs", "rank": 36, "score": 7.255133802244771 }, { "content": " Ok(opcode) => outcoming_result = self.execute_instruction(opcode),\n\n Err(e) => {\n\n error!(\"VM's error happened. Aborting. \\n {:?}\", e);\n\n return Err(e);\n\n }\n\n }\n\n outcoming_result\n\n }\n\n\n\n // Executes the given opcode.\n\n // This function is just a giant match.\n\n pub fn execute_instruction(&mut self, opcode: Opcode) -> VMResult {\n\n match opcode {\n\n // Arithmetical instructions\n\n Opcode::ADD => {\n\n let val_1 = self.registers[self.next_8_bits() as usize];\n\n let val_2 = self.registers[self.next_8_bits() as usize];\n\n let result_register = self.next_8_bits();\n\n self.set_register_value(result_register, val_1 + val_2);\n\n }\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 37, "score": 6.860280078069135 }, { "content": "pub mod opcodes;\n\npub mod vm;\n", "file_path": "lamp_vm/src/base/mod.rs", "rank": 38, "score": 5.173928118479095 }, { "content": " \"Register out of bounds. Expected 0 <= register_index < 32; got {}\",\n\n index\n\n );\n\n }\n\n }\n\n\n\n pub fn get_register(&self, index: u8) -> &i32 {\n\n if index < 32 {\n\n &self.registers[index as usize]\n\n } else {\n\n error!(\n\n \"Register out of bounds. Expected 0 <= register_index < 32; got {}\",\n\n index\n\n );\n\n panic!(\n\n \"Register out of bounds. Expected 0 <= register_index < 32; got {}\",\n\n index\n\n );\n\n }\n\n }\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 39, "score": 4.844287377348658 }, { "content": "use base::vm::VM;\n\nuse debug::session::DebugSession;\n\nuse log::{error, info};\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\npub mod base;\n\npub mod debug;\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"lamp\")]\n", "file_path": "lamp_vm/src/main.rs", "rank": 40, "score": 4.730143070392978 }, { "content": "pub mod command_base;\n\n\n\n// Commands modules declarations\n\npub mod regdump;\n\npub mod setpc;\n\npub mod step;\n\n\n\nuse command_base::DebugCommand;\n\n\n", "file_path": "lamp_vm/src/debug/commands/mod.rs", "rank": 41, "score": 4.6379293154321495 }, { "content": " // Grabs next 8 bits of the VM's binary\n\n // Stupid out of bounds error detected; should be fixed.\n\n pub fn next_8_bits(&mut self) -> u8 {\n\n self.pc += 1;\n\n self.bin[self.pc - 1]\n\n }\n\n\n\n // Grabs next 16 bytes of the VM's binary\n\n fn next_16_bits(&mut self) -> u16 {\n\n self.pc += 2;\n\n ((u16::from(self.bin[self.pc - 2])) << 8) | u16::from(self.bin[self.pc - 1])\n\n }\n\n\n\n pub fn set_pc(&mut self, new_pc: usize) -> usize {\n\n if new_pc > self.bin.len() {\n\n self.pc = new_pc;\n\n 0\n\n } else {\n\n error!(\n\n \"Tried to set the program counter to {} where the binary\\'s total size is {} bytes\",\n\n new_pc,\n\n self.bin.len()\n\n );\n\n 1\n\n }\n\n }\n\n}\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 42, "score": 3.6469902502493037 }, { "content": "use crate::compiler::Compiler;\n\nuse std::fs::File;\n\nuse std::io::BufWriter;\n\nuse std::io::Read;\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\nmod compiler;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"lamp_asm\")]\n", "file_path": "lamp_assembler/src/main.rs", "rank": 43, "score": 3.0286398800139653 }, { "content": "use crate::base::vm::VM;\n\nuse log::error;\n\n\n", "file_path": "lamp_vm/src/debug/commands/command_base.rs", "rank": 44, "score": 2.9052744761761256 }, { "content": "pub mod op;\n\n\n\npub mod constants {\n\n // Do not ask me from where those values come from\n\n pub const LAMP_BIN_HEADER: &[u8] = &[69, 31, 17, 72];\n\n pub const COMMENT_MARKER: &str = \";\";\n\n}\n", "file_path": "lamp_common/src/lib.rs", "rank": 45, "score": 2.761613193816819 }, { "content": " }\n\n Opcode::NOP => {\n\n info!(\"NOP Opcode encountered, doing nothing.\");\n\n self.pc += 1;\n\n }\n\n Opcode::LOAD => {\n\n let register = self.next_8_bits();\n\n let value = self.next_16_bits() as i32;\n\n self.set_register_value(register, value);\n\n }\n\n Opcode::JMP => {\n\n let register = self.next_8_bits();\n\n let addr = *self.get_register(register);\n\n self.pc = addr as usize;\n\n }\n\n\n\n Opcode::MODR => {\n\n let register = self.next_8_bits();\n\n self.set_register_value(register, self.modulo_remainder);\n\n }\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 46, "score": 2.721929770807549 }, { "content": "pub mod commands;\n\npub mod session;\n", "file_path": "lamp_vm/src/debug/mod.rs", "rank": 47, "score": 2.7021939007470355 }, { "content": "use crate::base::vm::VM;\n\n\n\n#[test]\n", "file_path": "lamp_vm/src/tests/vm_test.rs", "rank": 48, "score": 2.4882956611043747 }, { "content": " let to_inc = self.next_8_bits() as usize;\n\n self.registers[to_inc] += 1;\n\n }\n\n Opcode::DEC => {\n\n let to_dec = self.next_8_bits() as usize;\n\n self.registers[to_dec] -= 1;\n\n }\n\n // Control Flow instructions\n\n Opcode::EQ => {\n\n let reg_1 = self.next_8_bits() as usize;\n\n let reg_2 = self.next_8_bits() as usize;\n\n self.eq_flag = self.registers[reg_1] == self.registers[reg_2];\n\n }\n\n Opcode::NEQ => {\n\n let reg_1 = self.next_8_bits() as usize;\n\n let reg_2 = self.next_8_bits() as usize;\n\n self.eq_flag = self.registers[reg_1] != self.registers[reg_2];\n\n }\n\n Opcode::GT => {\n\n let reg_1 = self.next_8_bits() as usize;\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 49, "score": 2.4327101750167115 }, { "content": "# Lamp\n\nLamp is yet another registers-based VM.\n\n\n\n# Status\n\nThis project is still a work-in-place and will not be achieved before a long moment.\n\n\n\n# Used resources\n\nThis project was supposed to be written using the subnetzero's tutorial, but I finally decided to directly use the documentation of his project: https://gitlab.com/subnetzero/iridium/-/blob/master/docs/manual.adoc\n\n\n\n# TODO\n\n-Everything (lol)\n\n\n", "file_path": "lamp_vm/README.md", "rank": 50, "score": 2.380365345270895 }, { "content": " let reg_2 = self.next_8_bits() as usize;\n\n self.eq_flag = self.registers[reg_1] > self.registers[reg_2];\n\n }\n\n Opcode::GTE => {\n\n let reg_1 = self.next_8_bits() as usize;\n\n let reg_2 = self.next_8_bits() as usize;\n\n self.eq_flag = self.registers[reg_1] >= self.registers[reg_2];\n\n }\n\n Opcode::LT => {\n\n let reg_1 = self.next_8_bits() as usize;\n\n let reg_2 = self.next_8_bits() as usize;\n\n self.eq_flag = self.registers[reg_1] < self.registers[reg_2];\n\n }\n\n Opcode::LTE => {\n\n let reg_1 = self.next_8_bits() as usize;\n\n let reg_2 = self.next_8_bits() as usize;\n\n self.eq_flag = self.registers[reg_1] <= self.registers[reg_2];\n\n }\n\n Opcode::HLT => {\n\n // I literally don't know what should I do here\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 51, "score": 2.34576762804619 }, { "content": "pub mod lexer;\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "lamp_asm_parser/src/lib.rs", "rank": 52, "score": 2.208685298948759 }, { "content": " 1\n\n }\n\n }\n\n }\n\n\n\n fn name(&self) -> &str {\n\n \"setpc\"\n\n }\n\n fn description(&self) -> &str {\n\n \"Sets the program counter to the given index.\"\n\n }\n\n fn syntax(&self) -> &str {\n\n \"setpc <unsigned value>\"\n\n }\n\n}\n", "file_path": "lamp_vm/src/debug/commands/setpc.rs", "rank": 53, "score": 2.2076217260193345 }, { "content": " Opcode::SUB => {\n\n let val_1 = self.registers[self.next_8_bits() as usize];\n\n let val_2 = self.registers[self.next_8_bits() as usize];\n\n let result_register = self.next_8_bits();\n\n self.set_register_value(result_register, val_1 - val_2);\n\n }\n\n Opcode::MUL => {\n\n let val_1 = self.registers[self.next_8_bits() as usize];\n\n let val_2 = self.registers[self.next_8_bits() as usize];\n\n let result_register = self.next_8_bits();\n\n self.set_register_value(result_register, val_1 * val_2);\n\n }\n\n Opcode::MOD => {\n\n let val_1 = self.registers[self.next_8_bits() as usize];\n\n let val_2 = self.registers[self.next_8_bits() as usize];\n\n let result_register = self.next_8_bits();\n\n self.set_register_value(result_register, val_1 / val_2);\n\n self.modulo_remainder = val_1 % val_2;\n\n }\n\n Opcode::INC => {\n", "file_path": "lamp_vm/src/base/vm.rs", "rank": 54, "score": 2.199764081343698 }, { "content": " \"Compilation failed: Cannot write the output. Error: {:?}\",\n\n e\n\n ),\n\n }\n\n }\n\n Err(errors) => {\n\n eprintln!(\"Can\\'t compile the source file: {} problems found. But don\\'t worry ! Here are the listed mistakes: \", errors.len());\n\n for error in errors {\n\n eprintln!(\"{}\", error);\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n eprintln!(\"Error: cannot read the file.\\nDetails: {:?}\", e);\n\n }\n\n }\n\n }\n\n Err(e) => eprintln!(\"Error: cannot open {:?}: {:?}\", args.source, e),\n\n }\n\n\n\n fn write_output(path: &PathBuf, bin: Vec<u8>) -> Result<usize, std::io::Error> {\n\n let buff = bin.as_slice();\n\n let file = File::create(path)?;\n\n let mut writer = BufWriter::new(file);\n\n writer.write(&buff)\n\n }\n\n}\n", "file_path": "lamp_assembler/src/main.rs", "rank": 55, "score": 1.0919260188127309 } ]